Remove useless files from upstream
This commit is contained in:
parent
1b1f6ce2c2
commit
3f10edb533
31 changed files with 0 additions and 1162 deletions
28
.github/workflows/main.yml
vendored
28
.github/workflows/main.yml
vendored
|
@ -1,28 +0,0 @@
|
|||
name: Python package
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r tests/requirements.txt
|
||||
pelican-themes -i ../attila
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
cd tests
|
||||
pytest
|
76
fabfile.py
vendored
76
fabfile.py
vendored
|
@ -1,76 +0,0 @@
|
|||
from fabric.api import *
|
||||
import fabric.contrib.project as project
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import SocketServer
|
||||
|
||||
from pelican.server import ComplexHTTPRequestHandler
|
||||
|
||||
# Local path configuration (can be absolute or relative to fabfile)
|
||||
env.deploy_path = 'output'
|
||||
DEPLOY_PATH = env.deploy_path
|
||||
|
||||
# Remote server configuration
|
||||
production = 'root@localhost:22'
|
||||
dest_path = '/var/www'
|
||||
|
||||
# Rackspace Cloud Files configuration settings
|
||||
env.cloudfiles_username = 'my_rackspace_username'
|
||||
env.cloudfiles_api_key = 'my_rackspace_api_key'
|
||||
env.cloudfiles_container = 'my_cloudfiles_container'
|
||||
|
||||
# Github Pages configuration
|
||||
env.github_pages_branch = "gh-pages"
|
||||
|
||||
# Port for `serve`
|
||||
PORT = 8000
|
||||
|
||||
def clean():
|
||||
"""Remove generated files"""
|
||||
if os.path.isdir(DEPLOY_PATH):
|
||||
shutil.rmtree(DEPLOY_PATH)
|
||||
os.makedirs(DEPLOY_PATH)
|
||||
|
||||
def build():
|
||||
"""Build local version of site"""
|
||||
local('asciidoctor -D {deploy_path} *.adoc'.format(**env))
|
||||
local('cp *.png {deploy_path}'.format(**env))
|
||||
local('mv {deploy_path}/README.html {deploy_path}/index.html'.format(**env))
|
||||
|
||||
def rebuild():
|
||||
"""`clean` then `build`"""
|
||||
clean()
|
||||
build()
|
||||
|
||||
def serve():
|
||||
"""Serve site at http://localhost:8000/"""
|
||||
os.chdir(env.deploy_path)
|
||||
|
||||
class AddressReuseTCPServer(SocketServer.TCPServer):
|
||||
allow_reuse_address = True
|
||||
|
||||
server = AddressReuseTCPServer(('', PORT), ComplexHTTPRequestHandler)
|
||||
|
||||
sys.stderr.write('Serving on port {0} ...\n'.format(PORT))
|
||||
server.serve_forever()
|
||||
|
||||
def reserve():
|
||||
"""`build`, then `serve`"""
|
||||
build()
|
||||
serve()
|
||||
|
||||
def cf_upload():
|
||||
"""Publish to Rackspace Cloud Files"""
|
||||
rebuild()
|
||||
with lcd(DEPLOY_PATH):
|
||||
local('swift -v -A https://auth.api.rackspacecloud.com/v1.0 '
|
||||
'-U {cloudfiles_username} '
|
||||
'-K {cloudfiles_api_key} '
|
||||
'upload -c {cloudfiles_container} .'.format(**env))
|
||||
|
||||
def gh_pages():
|
||||
"""Publish to GitHub Pages"""
|
||||
rebuild()
|
||||
local("ghp-import -b {github_pages_branch} {deploy_path}".format(**env))
|
||||
local("git push origin {github_pages_branch}".format(**env))
|
|
@ -1,7 +0,0 @@
|
|||
:title: With Cover Images
|
||||
:date: 2018-04-29 00:45
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: with-cover-images
|
||||
:cover: /assets/images/article_cover.jpg
|
|
@ -1,7 +0,0 @@
|
|||
:title: With Cover Images
|
||||
:date: 2018-04-29 00:45
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: with-http-cover-images
|
||||
:cover: http://example.com/cover.jpg
|
|
@ -1,7 +0,0 @@
|
|||
:title: With OG Cover Images
|
||||
:date: 2018-04-29 00:59
|
||||
:author: raj
|
||||
:category: bar
|
||||
:tags: bartag
|
||||
:slug: with-og-cover-images
|
||||
:og_image: /assets/images/og_cover.jpg
|
|
@ -1,6 +0,0 @@
|
|||
:title: Without Cover Images
|
||||
:date: 2018-04-29 00:55
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: without-cover-images
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
Binary file not shown.
Before Width: | Height: | Size: 3.6 KiB |
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
Binary file not shown.
Before Width: | Height: | Size: 26 KiB |
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1 +0,0 @@
|
|||
cover.jpg
|
|
@ -1,7 +0,0 @@
|
|||
:title: Page With Cover Images
|
||||
:date: 2018-04-29 00:45
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: page-with-cover-images
|
||||
:cover: assets/images/page_cover.jpg
|
|
@ -1,7 +0,0 @@
|
|||
:title: Page With Cover Images
|
||||
:date: 2018-04-29 00:45
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: page-with-http-cover-images
|
||||
:cover: http://example.com/page-cover.jpg
|
|
@ -1,7 +0,0 @@
|
|||
:title: Page With OG Cover Images
|
||||
:date: 2018-04-29 00:59
|
||||
:author: raj
|
||||
:category: bar
|
||||
:tags: bartag
|
||||
:slug: page-with-og-cover-images
|
||||
:og_image: assets/images/og_cover.jpg
|
|
@ -1,6 +0,0 @@
|
|||
:title: Page Without Cover Images
|
||||
:date: 2018-04-29 00:55
|
||||
:author: arul
|
||||
:category: foo
|
||||
:tags: footag
|
||||
:slug: page-without-cover-images
|
|
@ -1,139 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*- #
|
||||
from __future__ import unicode_literals
|
||||
|
||||
AUTHOR = u'Zutrinken'
|
||||
SITENAME = u'Attila Demo'
|
||||
SITESUBTITLE = u'Blog description here.'
|
||||
SITEURL = ''
|
||||
|
||||
PATH = 'content'
|
||||
|
||||
DEFAULT_DATE = 'fs'
|
||||
|
||||
DEFAULT_DATE_FORMAT = '%d %b %Y'
|
||||
|
||||
TIMEZONE = 'Asia/Calcutta'
|
||||
|
||||
DEFAULT_LANG = u'en'
|
||||
|
||||
# Feed generation is usually not desired when developing
|
||||
FEED_ALL_ATOM = None
|
||||
CATEGORY_FEED_ATOM = None
|
||||
TRANSLATION_FEED_ATOM = None
|
||||
AUTHOR_FEED_ATOM = None
|
||||
AUTHOR_FEED_RSS = None
|
||||
|
||||
# Blogroll
|
||||
LINKS = (('Pelican', 'http://getpelican.com/'),
|
||||
('Python.org', 'http://python.org/'),
|
||||
('Jinja2', 'http://jinja.pocoo.org/'),
|
||||
('You can modify those links in your config file', '#'),)
|
||||
|
||||
# Social widget
|
||||
SOCIAL = (('Facebook', 'http://facebook.com/arulraj.net'),
|
||||
('Twitter', 'http://twitter.com/arulrajnet')
|
||||
)
|
||||
|
||||
# Pagination
|
||||
DEFAULT_PAGINATION = 3
|
||||
PAGINATION_PATTERNS = (
|
||||
(1, '{base_name}/', '{base_name}/index.html'),
|
||||
(2, '{base_name}/page/{number}/', '{base_name}/page/{number}/index.html'),
|
||||
)
|
||||
|
||||
# Uncomment following line if you want document-relative URLs when developing
|
||||
#RELATIVE_URLS = True
|
||||
|
||||
STATIC_PATHS = ['assets']
|
||||
|
||||
EXTRA_PATH_METADATA = {
|
||||
'assets/robots.txt': {'path': 'robots.txt'},
|
||||
'assets/favicon.ico': {'path': 'favicon.ico'},
|
||||
'assets/CNAME': {'path': 'CNAME'}
|
||||
}
|
||||
|
||||
# Post and Pages path
|
||||
ARTICLE_URL = '{date:%Y}/{date:%m}/{slug}.html'
|
||||
ARTICLE_SAVE_AS = '{date:%Y}/{date:%m}/{slug}.html'
|
||||
PAGE_URL = 'pages/{slug}/'
|
||||
PAGE_SAVE_AS = 'pages/{slug}/index.html'
|
||||
YEAR_ARCHIVE_SAVE_AS = '{date:%Y}/index.html'
|
||||
MONTH_ARCHIVE_SAVE_AS = '{date:%Y}/{date:%m}/index.html'
|
||||
|
||||
# Tags and Category path
|
||||
CATEGORY_URL = 'category/{slug}'
|
||||
CATEGORY_SAVE_AS = 'category/{slug}/index.html'
|
||||
CATEGORIES_SAVE_AS = 'catgegories.html'
|
||||
TAG_URL = 'tag/{slug}'
|
||||
TAG_SAVE_AS = 'tag/{slug}/index.html'
|
||||
TAGS_SAVE_AS = 'tags.html'
|
||||
|
||||
# Author
|
||||
AUTHOR_URL = 'author/{slug}'
|
||||
AUTHOR_SAVE_AS = 'author/{slug}/index.html'
|
||||
AUTHORS_SAVE_AS = 'authors.html'
|
||||
|
||||
### Plugins
|
||||
|
||||
# PLUGIN_PATHS = [
|
||||
# 'pelican-plugins'
|
||||
# ]
|
||||
|
||||
# PLUGINS = [
|
||||
# 'sitemap',
|
||||
# 'neighbors',
|
||||
# 'assets'
|
||||
# ]
|
||||
|
||||
# Sitemap
|
||||
SITEMAP = {
|
||||
'format': 'xml',
|
||||
'priorities': {
|
||||
'articles': 0.5,
|
||||
'indexes': 0.5,
|
||||
'pages': 0.5
|
||||
},
|
||||
'changefreqs': {
|
||||
'articles': 'monthly',
|
||||
'indexes': 'daily',
|
||||
'pages': 'monthly'
|
||||
}
|
||||
}
|
||||
|
||||
# Comments
|
||||
DISQUS_SITENAME = "attilademo"
|
||||
|
||||
# Analytics
|
||||
GOOGLE_ANALYTICS = "UA-3546274-12"
|
||||
|
||||
THEME = 'attila'
|
||||
|
||||
### Theme specific settings
|
||||
|
||||
COLOR_SCHEME_CSS = 'github.css'
|
||||
|
||||
CSS_OVERRIDE = ['assets/css/myblog.css']
|
||||
|
||||
# Jinja config - Pelican 4
|
||||
JINJA_ENVIRONMENT = {
|
||||
'extensions' :[
|
||||
'jinja2.ext.loopcontrols',
|
||||
'jinja2.ext.i18n',
|
||||
'jinja2.ext.with_',
|
||||
'jinja2.ext.do'
|
||||
]
|
||||
}
|
||||
|
||||
JINJA_FILTERS = {'max': max}
|
||||
|
||||
# AUTHORS_BIO = {
|
||||
# "arul": {
|
||||
# "name": "Arul",
|
||||
# "cover": "assets/images/avatar.png",
|
||||
# "image": "assets/images/arul_author_cover.jpg",
|
||||
# "website": "http://blog.arulraj.net",
|
||||
# "location": "Chennai",
|
||||
# "bio": "This is the place for a small biography with max 200 characters. Well, now 100 are left. Cool, hugh?"
|
||||
# }
|
||||
# }
|
|
@ -1,3 +0,0 @@
|
|||
pelican
|
||||
BeautifulSoup4
|
||||
pytest
|
334
tests/support.py
334
tests/support.py
|
@ -1,334 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
from contextlib import contextmanager
|
||||
from functools import wraps
|
||||
from logging.handlers import BufferingHandler
|
||||
from os.path import abspath, dirname, join
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from six import StringIO
|
||||
|
||||
from pelican.contents import Article
|
||||
from pelican.settings import (DEFAULT_CONFIG, read_settings)
|
||||
|
||||
__all__ = ['get_article', 'unittest', ]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_folder():
|
||||
"""creates a temporary folder, return it and delete it afterwards.
|
||||
|
||||
This allows to do something like this in tests:
|
||||
|
||||
>>> with temporary_folder() as d:
|
||||
# do whatever you want
|
||||
"""
|
||||
tempdir = mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
rmtree(tempdir)
|
||||
|
||||
|
||||
def isplit(s, sep=None):
|
||||
"""Behaves like str.split but returns a generator instead of a list.
|
||||
|
||||
>>> list(isplit('\tUse the force\n')) == '\tUse the force\n'.split()
|
||||
True
|
||||
>>> list(isplit('\tUse the force\n')) == ['Use', 'the', 'force']
|
||||
True
|
||||
>>> (list(isplit('\tUse the force\n', "e"))
|
||||
== '\tUse the force\n'.split("e"))
|
||||
True
|
||||
>>> list(isplit('Use the force', "e")) == 'Use the force'.split("e")
|
||||
True
|
||||
>>> list(isplit('Use the force', "e")) == ['Us', ' th', ' forc', '']
|
||||
True
|
||||
|
||||
"""
|
||||
sep, hardsep = r'\s+' if sep is None else re.escape(sep), sep is not None
|
||||
exp, pos, length = re.compile(sep), 0, len(s)
|
||||
while True:
|
||||
m = exp.search(s, pos)
|
||||
if not m:
|
||||
if pos < length or hardsep:
|
||||
# ^ mimic "split()": ''.split() returns []
|
||||
yield s[pos:]
|
||||
break
|
||||
start = m.start()
|
||||
if pos < start or hardsep:
|
||||
# ^ mimic "split()": includes trailing empty string
|
||||
yield s[pos:start]
|
||||
pos = m.end()
|
||||
|
||||
|
||||
def mute(returns_output=False):
|
||||
"""Decorate a function that prints to stdout, intercepting the output.
|
||||
If "returns_output" is True, the function will return a generator
|
||||
yielding the printed lines instead of the return values.
|
||||
|
||||
The decorator literally hijack sys.stdout during each function
|
||||
execution, so be careful with what you apply it to.
|
||||
|
||||
>>> def numbers():
|
||||
print "42"
|
||||
print "1984"
|
||||
...
|
||||
>>> numbers()
|
||||
42
|
||||
1984
|
||||
>>> mute()(numbers)()
|
||||
>>> list(mute(True)(numbers)())
|
||||
['42', '1984']
|
||||
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
|
||||
saved_stdout = sys.stdout
|
||||
sys.stdout = StringIO()
|
||||
|
||||
try:
|
||||
out = func(*args, **kwargs)
|
||||
if returns_output:
|
||||
out = isplit(sys.stdout.getvalue().strip())
|
||||
finally:
|
||||
sys.stdout = saved_stdout
|
||||
|
||||
return out
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def get_article(title, slug, content, lang, extra_metadata=None):
|
||||
metadata = {'slug': slug, 'title': title, 'lang': lang}
|
||||
if extra_metadata is not None:
|
||||
metadata.update(extra_metadata)
|
||||
return Article(content, metadata=metadata)
|
||||
|
||||
|
||||
def skipIfNoExecutable(executable):
|
||||
"""Skip test if `executable` is not found
|
||||
|
||||
Tries to run `executable` with subprocess to make sure it's in the path,
|
||||
and skips the tests if not found (if subprocess raises a `OSError`).
|
||||
"""
|
||||
|
||||
with open(os.devnull, 'w') as fnull:
|
||||
try:
|
||||
res = subprocess.call(executable, stdout=fnull, stderr=fnull)
|
||||
except OSError:
|
||||
res = None
|
||||
|
||||
if res is None:
|
||||
return unittest.skip('{0} executable not found'.format(executable))
|
||||
|
||||
return lambda func: func
|
||||
|
||||
|
||||
def module_exists(module_name):
|
||||
"""Test if a module is importable."""
|
||||
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def locale_available(locale_):
|
||||
old_locale = locale.setlocale(locale.LC_TIME)
|
||||
|
||||
try:
|
||||
locale.setlocale(locale.LC_TIME, str(locale_))
|
||||
except locale.Error:
|
||||
return False
|
||||
else:
|
||||
locale.setlocale(locale.LC_TIME, old_locale)
|
||||
return True
|
||||
|
||||
|
||||
def get_settings(**kwargs):
|
||||
"""Provide tweaked setting dictionaries for testing
|
||||
|
||||
Set keyword arguments to override specific settings.
|
||||
"""
|
||||
settings = DEFAULT_CONFIG.copy()
|
||||
for key, value in kwargs.items():
|
||||
settings[key] = value
|
||||
return settings
|
||||
|
||||
def get_my_settings(**kwargs):
|
||||
PATH = abspath(dirname(__file__))
|
||||
default_conf = join(PATH, 'default_conf.py')
|
||||
settings = read_settings(default_conf)
|
||||
for key, value in kwargs.items():
|
||||
settings[key] = value
|
||||
return settings
|
||||
|
||||
|
||||
class LogCountHandler(BufferingHandler):
|
||||
"""Capturing and counting logged messages."""
|
||||
|
||||
def __init__(self, capacity=1000):
|
||||
logging.handlers.BufferingHandler.__init__(self, capacity)
|
||||
|
||||
def count_logs(self, msg=None, level=None):
|
||||
return len([
|
||||
l
|
||||
for l
|
||||
in self.buffer
|
||||
if (msg is None or re.match(msg, l.getMessage())) and
|
||||
(level is None or l.levelno == level)
|
||||
])
|
||||
|
||||
|
||||
class LoggedTestCase(unittest.TestCase):
|
||||
"""A test case that captures log messages."""
|
||||
|
||||
def setUp(self):
|
||||
super(LoggedTestCase, self).setUp()
|
||||
self._logcount_handler = LogCountHandler()
|
||||
logging.getLogger().addHandler(self._logcount_handler)
|
||||
|
||||
def tearDown(self):
|
||||
logging.getLogger().removeHandler(self._logcount_handler)
|
||||
super(LoggedTestCase, self).tearDown()
|
||||
|
||||
def assertLogCountEqual(self, count=None, msg=None, **kwargs):
|
||||
actual = self._logcount_handler.count_logs(msg=msg, **kwargs)
|
||||
self.assertEqual(
|
||||
actual, count,
|
||||
msg='expected {} occurrences of {!r}, but found {}'.format(
|
||||
count, msg, actual))
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from pelican.generators import (ArticlesGenerator, Generator, PagesGenerator,
|
||||
PelicanTemplateNotFound, StaticGenerator,
|
||||
TemplatePagesGenerator)
|
||||
from pelican.readers import RstReader
|
||||
from pelican.writers import Writer
|
||||
from pelican.contents import (Article, Page)
|
||||
|
||||
CUR_DIR = os.path.dirname(__file__)
|
||||
CONTENT_DIR = os.path.join(CUR_DIR, 'content')
|
||||
OUTPUT_DIR = os.path.join(CUR_DIR, 'output')
|
||||
|
||||
class BaseTest(object):
|
||||
|
||||
def __init__(self):
|
||||
self.initSettings()
|
||||
|
||||
def initSettings(self):
|
||||
self.old_locale = locale.setlocale(locale.LC_ALL)
|
||||
locale.setlocale(locale.LC_ALL, str('C'))
|
||||
self.settings = get_my_settings()
|
||||
self.settings['THEME'] = "../"
|
||||
self.settings['filenames'] = {}
|
||||
self.reader = RstReader(self.settings)
|
||||
self.writer = Writer("output", self.settings)
|
||||
|
||||
def gen_article_and_html_from_rst(self, rstPath):
|
||||
content, metadata = self.reader.read(rstPath)
|
||||
article = Article(content=content, metadata=metadata)
|
||||
context = self.settings.copy()
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context['localsiteurl'] = self.settings['SITEURL']
|
||||
generator = ArticlesGenerator( context=context, settings=self.settings, path=CONTENT_DIR, theme=self.settings['THEME'], output_path=OUTPUT_DIR)
|
||||
generator.generate_context()
|
||||
f = lambda a: True if (a.slug == article.slug) else False
|
||||
result = list(filter(f, generator.context["articles"]))[0]
|
||||
self.writer.write_file(
|
||||
result.save_as, generator.get_template('article'),
|
||||
generator.context, article=result)
|
||||
soup = BeautifulSoup(open("./"+self.writer.output_path+'/'+result.save_as), "html.parser")
|
||||
return (result, soup)
|
||||
|
||||
def gen_page_and_html_from_rst(self, rstPath):
|
||||
content, metadata = self.reader.read(rstPath)
|
||||
page = Page(content=content, metadata=metadata)
|
||||
context = self.settings.copy()
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context['localsiteurl'] = self.settings['SITEURL']
|
||||
generator = PagesGenerator( context=context, settings=self.settings, path=CONTENT_DIR, theme=self.settings['THEME'], output_path=OUTPUT_DIR)
|
||||
generator.generate_context()
|
||||
f = lambda a: True if (a.slug == page.slug) else False
|
||||
result = list(filter(f, generator.context["pages"]))[0]
|
||||
self.writer.write_file(
|
||||
result.save_as, generator.get_template('page'),
|
||||
generator.context, page=result)
|
||||
soup = BeautifulSoup(open("./"+self.writer.output_path+'/'+result.save_as), "html.parser")
|
||||
return (result, soup)
|
||||
|
||||
def gen_tag_and_html_from_name(self, name):
|
||||
context = self.settings.copy()
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context['localsiteurl'] = self.settings['SITEURL']
|
||||
generator = ArticlesGenerator( context=context, settings=self.settings, path=CONTENT_DIR, theme=self.settings['THEME'], output_path=OUTPUT_DIR)
|
||||
generator.generate_context()
|
||||
generator.generate_tags(self.writer.write_file)
|
||||
selectedTag = None
|
||||
|
||||
for tag, articles in generator.tags.items():
|
||||
if tag.name == name:
|
||||
selectedTag = tag
|
||||
|
||||
soup = BeautifulSoup(open("./"+self.writer.output_path+'/'+selectedTag.save_as), "html.parser")
|
||||
return (selectedTag, soup)
|
||||
|
||||
def gen_category_and_html_from_name(self, name):
|
||||
context = self.settings.copy()
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context['localsiteurl'] = self.settings['SITEURL']
|
||||
generator = ArticlesGenerator( context=context, settings=self.settings, path=CONTENT_DIR, theme=self.settings['THEME'], output_path=OUTPUT_DIR)
|
||||
generator.generate_context()
|
||||
generator.generate_categories(self.writer.write_file)
|
||||
selectedCategory = None
|
||||
|
||||
for category, articles in generator.categories:
|
||||
if category.name == name:
|
||||
selectedCategory = category
|
||||
|
||||
soup = BeautifulSoup(open("./"+self.writer.output_path+'/'+selectedCategory.save_as), "html.parser")
|
||||
return (selectedCategory, soup)
|
||||
|
||||
def gen_author_and_html_from_name(self, name):
|
||||
context = self.settings.copy()
|
||||
context['generated_content'] = {}
|
||||
context['static_links'] = set()
|
||||
context['static_content'] = {}
|
||||
context['localsiteurl'] = self.settings['SITEURL']
|
||||
generator = ArticlesGenerator( context=context, settings=self.settings, path=CONTENT_DIR, theme=self.settings['THEME'], output_path=OUTPUT_DIR)
|
||||
generator.generate_context()
|
||||
generator.generate_authors(self.writer.write_file)
|
||||
selectedAuthor = None
|
||||
|
||||
for author, articles in generator.authors:
|
||||
if author.name == name:
|
||||
selectedAuthor = author
|
||||
|
||||
soup = BeautifulSoup(open("./"+self.writer.output_path+'/'+selectedAuthor.save_as), "html.parser")
|
||||
return (selectedAuthor, soup)
|
|
@ -1,44 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import locale
|
||||
from shutil import copy, rmtree
|
||||
|
||||
from support import (get_my_settings, unittest, BaseTest, CUR_DIR, CONTENT_DIR, OUTPUT_DIR)
|
||||
|
||||
def tearDownModule():
|
||||
print("teardown module")
|
||||
try:
|
||||
rmtree(OUTPUT_DIR)
|
||||
except OSError as e:
|
||||
print ("Error: %s - %s." % (e.filename,e.strerror))
|
||||
|
||||
class AuthorSocialLinksTest(unittest.TestCase, BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_linkedin_link(self):
|
||||
authorName = "raj"
|
||||
self.settings['AUTHORS_BIO'] = {
|
||||
authorName: {
|
||||
'cover': "http://examble.com/cover.jpg",
|
||||
'linkedin': "mylinkedinname"
|
||||
}
|
||||
}
|
||||
rstPath="content/article_with_og_image.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="span", attrs={"class": "post-author-linkedin"})
|
||||
# Assertion
|
||||
self.assertTrue("https://www.linkedin.com/in/mylinkedinname" in str(selected))
|
||||
|
||||
result, soup = self.gen_author_and_html_from_name(authorName)
|
||||
selected = soup.find(name="span", attrs={"class": "post-author-linkedin"})
|
||||
# Assertion
|
||||
self.assertTrue("https://www.linkedin.com/in/mylinkedinname" in str(selected))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,471 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import locale
|
||||
from shutil import copy, rmtree
|
||||
|
||||
from support import (get_my_settings, unittest, BaseTest, CUR_DIR, CONTENT_DIR, OUTPUT_DIR)
|
||||
|
||||
def setUpModule():
|
||||
print("setup module")
|
||||
|
||||
def tearDownModule():
|
||||
print("teardown module")
|
||||
try:
|
||||
rmtree(OUTPUT_DIR)
|
||||
except OSError as e:
|
||||
print ("Error: %s - %s." % (e.filename,e.strerror))
|
||||
|
||||
class ArticleCoverImageTest(unittest.TestCase, BaseTest):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
print("setUpClass")
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
print("tearDownClass")
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_article_cover(self):
|
||||
self.settings['SITEURL']="http://www.example.com"
|
||||
rstPath="content/article_with_cover_image.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+result.cover in selected["style"])
|
||||
|
||||
def test_article_header_cover(self):
|
||||
self.settings['HEADER_COVER']='/assets/images/header_cover.jpg'
|
||||
rstPath="content/article_without_cover.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVER'] in selected["style"])
|
||||
|
||||
def test_article_header_http_cover(self):
|
||||
self.settings['HEADER_COVER']='http://example.com/cover.jpg'
|
||||
rstPath="content/article_without_cover.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVER'] in selected["style"])
|
||||
|
||||
def test_article_theme_cover(self):
|
||||
rstPath="content/article_without_cover.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(id="post-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_article_header_color(self):
|
||||
self.settings['HEADER_COLOR']='blue'
|
||||
rstPath="content/article_without_cover.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COLOR'] in selected["style"])
|
||||
|
||||
def test_article_http_cover(self):
|
||||
rstPath="content/article_with_http_cover_image.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(result.cover in selected["style"])
|
||||
|
||||
def test_article_og_cover(self):
|
||||
rstPath="content/article_with_og_image.rst"
|
||||
result, soup = self.gen_article_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(result.og_image in selected["style"])
|
||||
|
||||
class PageCoverImageTest(unittest.TestCase, BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_page_cover(self):
|
||||
self.settings['SITEURL']="http://www.example.com"
|
||||
rstPath="content/pages/page_with_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+result.cover in selected["style"])
|
||||
|
||||
def test_page_header_cover(self):
|
||||
self.settings['HEADER_COVER']='/assets/images/header_cover.jpg'
|
||||
rstPath="content/pages/page_without_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVER'] in selected["style"])
|
||||
|
||||
def test_page_header_http_cover(self):
|
||||
self.settings['HEADER_COVER']='http://example.com/cover.jpg'
|
||||
rstPath="content/pages/page_without_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVER'] in selected["style"])
|
||||
|
||||
def test_page_theme_cover(self):
|
||||
rstPath="content/pages/page_without_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(id="post-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_page_header_color(self):
|
||||
self.settings['HEADER_COLOR']='blue'
|
||||
rstPath="content/pages/page_without_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COLOR'] in selected["style"])
|
||||
|
||||
def test_page_http_cover(self):
|
||||
rstPath="content/pages/page_with_http_cover_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(result.cover in selected["style"])
|
||||
|
||||
def test_page_og_cover(self):
|
||||
rstPath="content/pages/page_with_og_image.rst"
|
||||
result, soup = self.gen_page_and_html_from_rst(rstPath)
|
||||
selected = soup.find(name="div", attrs={"class": "post-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(result.og_image in selected["style"])
|
||||
|
||||
class TagCoverImageTest(unittest.TestCase, BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_footag_theme_cover(self):
|
||||
result, soup = self.gen_tag_and_html_from_name("footag")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_footag_cover(self):
|
||||
tagName = "footag"
|
||||
self.settings['HEADER_COVERS_BY_TAG'] = {
|
||||
tagName: "/assets/images/foo_tag_cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_tag_and_html_from_name(tagName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVERS_BY_TAG'][tagName] in selected["style"])
|
||||
|
||||
def test_footag_http_cover(self):
|
||||
tagName = "footag"
|
||||
self.settings['HEADER_COVERS_BY_TAG'] = {
|
||||
tagName: "http://examble.com/cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_tag_and_html_from_name(tagName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVERS_BY_TAG'][tagName] in selected["style"])
|
||||
|
||||
def test_footag_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_tag_and_html_from_name("footag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_footag_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_tag_and_html_from_name("footag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_footag_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_tag_and_html_from_name("footag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
def test_bartag_theme_cover(self):
|
||||
result, soup = self.gen_tag_and_html_from_name("bartag")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_bartag_cover(self):
|
||||
tagName = "bartag"
|
||||
self.settings['HEADER_COVERS_BY_TAG'] = {
|
||||
tagName: "/assets/images/bar_tag_cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_tag_and_html_from_name(tagName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVERS_BY_TAG'][tagName] in selected["style"])
|
||||
|
||||
def test_bartag_http_cover(self):
|
||||
tagName = "bartag"
|
||||
self.settings['HEADER_COVERS_BY_TAG'] = {
|
||||
tagName: "http://examble.com/cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_tag_and_html_from_name(tagName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVERS_BY_TAG'][tagName] in selected["style"])
|
||||
|
||||
def test_bartag_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_tag_and_html_from_name("bartag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_bartag_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_tag_and_html_from_name("bartag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_bartag_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_tag_and_html_from_name("bartag")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
|
||||
class CategoryCoverImageTest(unittest.TestCase, BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_foo_theme_cover(self):
|
||||
result, soup = self.gen_category_and_html_from_name("foo")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_foo_cover(self):
|
||||
categoryName = "foo"
|
||||
self.settings['HEADER_COVERS_BY_CATEGORY'] = {
|
||||
categoryName: "/assets/images/foo_category_cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_category_and_html_from_name(categoryName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVERS_BY_CATEGORY'][categoryName] in selected["style"])
|
||||
|
||||
def test_foo_http_cover(self):
|
||||
categoryName = "foo"
|
||||
self.settings['HEADER_COVERS_BY_CATEGORY'] = {
|
||||
categoryName: "http://examble.com/cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_category_and_html_from_name(categoryName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVERS_BY_CATEGORY'][categoryName] in selected["style"])
|
||||
|
||||
def test_foo_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_category_and_html_from_name("foo")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_foo_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_category_and_html_from_name("foo")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_foo_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_category_and_html_from_name("foo")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
def test_bar_theme_cover(self):
|
||||
result, soup = self.gen_category_and_html_from_name("bar")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_bar_cover(self):
|
||||
categoryName = "bar"
|
||||
self.settings['HEADER_COVERS_BY_CATEGORY'] = {
|
||||
categoryName: "/assets/images/bar_category_cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_category_and_html_from_name(categoryName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['HEADER_COVERS_BY_CATEGORY'][categoryName] in selected["style"])
|
||||
|
||||
def test_bar_http_cover(self):
|
||||
categoryName = "bar"
|
||||
self.settings['HEADER_COVERS_BY_CATEGORY'] = {
|
||||
categoryName: "http://examble.com/cover.jpg"
|
||||
}
|
||||
result, soup = self.gen_category_and_html_from_name(categoryName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['HEADER_COVERS_BY_CATEGORY'][categoryName] in selected["style"])
|
||||
|
||||
def test_bar_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_category_and_html_from_name("bar")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_bar_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_category_and_html_from_name("bar")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_bar_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_category_and_html_from_name("bar")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
class AuthorCoverImageTest(unittest.TestCase, BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
self.initSettings()
|
||||
|
||||
def tearDown(self):
|
||||
locale.setlocale(locale.LC_ALL, self.old_locale)
|
||||
|
||||
def test_arul_theme_cover(self):
|
||||
result, soup = self.gen_author_and_html_from_name("arul")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_arul_cover(self):
|
||||
authorName = "arul"
|
||||
self.settings['AUTHORS_BIO'] = {
|
||||
authorName: {
|
||||
'cover': "/assets/images/arul_author_cover.jpg"
|
||||
}
|
||||
}
|
||||
result, soup = self.gen_author_and_html_from_name(authorName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['AUTHORS_BIO'][authorName]['cover'] in selected["style"])
|
||||
|
||||
def test_arul_http_cover(self):
|
||||
authorName = "arul"
|
||||
self.settings['AUTHORS_BIO'] = {
|
||||
authorName: {
|
||||
'cover': "http://examble.com/cover.jpg"
|
||||
}
|
||||
}
|
||||
result, soup = self.gen_author_and_html_from_name(authorName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['AUTHORS_BIO'][authorName]['cover'] in selected["style"])
|
||||
|
||||
def test_arul_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_author_and_html_from_name("arul")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_arul_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_author_and_html_from_name("arul")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_arul_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_author_and_html_from_name("arul")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
def test_raj_theme_cover(self):
|
||||
result, soup = self.gen_author_and_html_from_name("raj")
|
||||
selected = soup.find(id="blog-header")
|
||||
# Assertion
|
||||
self.assertTrue("class" not in selected)
|
||||
|
||||
def test_raj_cover(self):
|
||||
authorName = "raj"
|
||||
self.settings['AUTHORS_BIO'] = {
|
||||
authorName: {
|
||||
'cover': "/assets/images/raj_author_cover.jpg"
|
||||
}
|
||||
}
|
||||
result, soup = self.gen_author_and_html_from_name(authorName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings['AUTHORS_BIO'][authorName]['cover'] in selected["style"])
|
||||
|
||||
def test_raj_http_cover(self):
|
||||
authorName = "raj"
|
||||
self.settings['AUTHORS_BIO'] = {
|
||||
authorName: {
|
||||
'cover': "http://examble.com/cover.jpg"
|
||||
}
|
||||
}
|
||||
result, soup = self.gen_author_and_html_from_name(authorName)
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['AUTHORS_BIO'][authorName]['cover'] in selected["style"])
|
||||
|
||||
def test_raj_header_cover(self):
|
||||
self.settings['SITEURL'] = "http://example.com"
|
||||
self.settings["HEADER_COVER"] = "/assets/images/header_cover.jpg"
|
||||
result, soup = self.gen_author_and_html_from_name("raj")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings['SITEURL']+'/'+self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_raj_header_http_cover(self):
|
||||
self.settings["HEADER_COVER"] = "http://example.com/cover.jpg"
|
||||
result, soup = self.gen_author_and_html_from_name("raj")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COVER"] in selected["style"])
|
||||
|
||||
def test_raj_header_color(self):
|
||||
self.settings["HEADER_COLOR"] = "red"
|
||||
result, soup = self.gen_author_and_html_from_name("raj")
|
||||
selected = soup.find(name="div", attrs={"class": "blog-cover cover"})
|
||||
# Assertion
|
||||
self.assertTrue(self.settings["HEADER_COLOR"] in selected["style"])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,3 +0,0 @@
|
|||
[debugger]
|
||||
always-on = True
|
||||
errors-only = False
|
Loading…
Add table
Add a link
Reference in a new issue