input
stringlengths
0
2.17k
instruction
stringlengths
18
2.94k
output
stringlengths
47
3.36k
# -*- coding: utf-8 -*- """Ideaxbox Cultura, France""" from .idb import * # noqa from django.utils.translation import ugettext_lazy as _ IDEASCUBE_NAME = u"Cultura" IDEASCUBE_PLACE_NAME = _("city") COUNTRIES_FIRST = ['FR'] TIME_ZONE = None LANGUAGE_CODE = 'fr' LOAN_DURATION = 14 MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender'] USER_FORM_FIELDS = ( (_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa ) HOME_CARDS = HOME_CARDS + [ { 'id': 'cpassorcier', }, { 'id': 'wikisource', }, { 'id': 'software', }, { 'id': 'ted', }, { 'id': 'ubuntudoc', }, ]
Remove "software" card from Cultura conf
# -*- coding: utf-8 -*- """Ideaxbox Cultura, France""" from .idb import * # noqa from django.utils.translation import ugettext_lazy as _ IDEASCUBE_NAME = u"Cultura" IDEASCUBE_PLACE_NAME = _("city") COUNTRIES_FIRST = ['FR'] TIME_ZONE = None LANGUAGE_CODE = 'fr' LOAN_DURATION = 14 MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender'] USER_FORM_FIELDS = ( (_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa ) HOME_CARDS = HOME_CARDS + [ { 'id': 'cpassorcier', }, { 'id': 'wikisource', }, { 'id': 'ted', }, { 'id': 'ubuntudoc', }, ]
#!/usr/bin/python # -*- coding: utf-8 -*- import sys import os import argparse import re reago_dir = '/tools/rna_manipulation/reago/reago/' def add_read_pair_num(input_filepath, output_filepath, read_pair_num): to_add = '.' + str(read_pair_num) with open(input_filepath,'r') as input_file: with open(output_filepath,'w') as output_file: for line in input_file: if line[0] == '>': split_line = line.split() seq_id = split_line[0] if seq_id.rfind(to_add) != (len(seq_id)-len(to_add)): split_line[0] = seq_id + to_add output_file.write(' '.join(split_line) + '\n') else: output_file.write(line) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--r1_sequence_file', required=True) parser.add_argument('--r2_sequence_file', required=True) args = parser.parse_args() add_read_pair_num(args.r1_input_sequence_file, args.r1_input_sequence_file, 1) add_read_pair_num(args.r2_input_sequence_file, args.r2_input_sequence_file, 2)
Correct argument name in script to format reago input file
#!/usr/bin/python # -*- coding: utf-8 -*- import sys import os import argparse import re reago_dir = '/tools/rna_manipulation/reago/reago/' def add_read_pair_num(input_filepath, output_filepath, read_pair_num): to_add = '.' + str(read_pair_num) with open(input_filepath,'r') as input_file: with open(output_filepath,'w') as output_file: for line in input_file: if line[0] == '>': split_line = line.split() seq_id = split_line[0] if seq_id.rfind(to_add) != (len(seq_id)-len(to_add)): split_line[0] = seq_id + to_add output_file.write(' '.join(split_line) + '\n') else: output_file.write(line) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--r1_sequence_file', required=True) parser.add_argument('--r2_sequence_file', required=True) args = parser.parse_args() add_read_pair_num(args.r1_sequence_file, args.r1_sequence_file, 1) add_read_pair_num(args.r2_sequence_file, args.r2_sequence_file, 2)
from __future__ import unicode_literals, absolute_import from django.core.management.base import BaseCommand from ci import models, views, TimeUtils from datetime import timedelta class Command(BaseCommand): help = 'Cancel old Civet jobs. When a specific civet client is no longer running, it can leave jobs lying around that other clients have to ignore.' def add_arguments(self, parser): parser.add_argument('--dryrun', default=False, action='store_true', help="Don't make any changes, just report what would have happened") parser.add_argument('--days', required=True, type=int, help="Cancel jobs older than this many days") def handle(self, *args, **options): dryrun = options["dryrun"] days = options["days"] d = TimeUtils.get_local_time() - timedelta(days=days) jobs = models.Job.objects.filter(active=True, ready=True, status=models.JobStatus.NOT_STARTED, created__lt=d) count = jobs.count() prefix = "" if dryrun: prefix = "DRY RUN: " for job in jobs.all(): self.stdout.write("%sCancel job %s: %s: %s" % (prefix, job.pk, job, job.created)) if not dryrun: views.set_job_canceled(job, "Civet client hasn't run this job in too long a time") job.event.set_complete_if_done() if count == 0: self.stdout.write("No jobs to cancel")
Update cancel old job message
from __future__ import unicode_literals, absolute_import from django.core.management.base import BaseCommand from ci import models, views, TimeUtils from datetime import timedelta class Command(BaseCommand): help = 'Cancel old Civet jobs. When a specific civet client is no longer running, it can leave jobs lying around that other clients have to ignore.' def add_arguments(self, parser): parser.add_argument('--dryrun', default=False, action='store_true', help="Don't make any changes, just report what would have happened") parser.add_argument('--days', required=True, type=int, help="Cancel jobs older than this many days") def handle(self, *args, **options): dryrun = options["dryrun"] days = options["days"] d = TimeUtils.get_local_time() - timedelta(days=days) jobs = models.Job.objects.filter(active=True, ready=True, status=models.JobStatus.NOT_STARTED, created__lt=d) count = jobs.count() prefix = "" if dryrun: prefix = "DRY RUN: " for job in jobs.all(): self.stdout.write("%sCancel job %s: %s: %s" % (prefix, job.pk, job, job.created)) if not dryrun: views.set_job_canceled(job, "Canceled due to civet client not running this job in too long a time") job.event.set_complete_if_done() if count == 0: self.stdout.write("No jobs to cancel")
# Case Conductor is a Test Case Management system. # Copyright (C) 2011 uTest Inc. # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from django import template from django.core.urlresolvers import reverse from ..models import TestCycle, TestRun, TestRunIncludedTestCase register = template.Library() @register.filter def results_detail_url(obj): if isinstance(obj, TestCycle): return reverse("results_testruns") + "?testCycle=%s" % obj.id elif isinstance(obj, TestRun): return reverse("results_testcases") + "?testRun=%s" % obj.id elif isinstance(obj, TestRunIncludedTestCase): return reverse("results_testcase_detail", kwargs={"itc_id": obj.id}) return ""
Fix result status chiclet links for new-style filter querystrings.
# Case Conductor is a Test Case Management system. # Copyright (C) 2011 uTest Inc. # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from django import template from django.core.urlresolvers import reverse from ..models import TestCycle, TestRun, TestRunIncludedTestCase register = template.Library() @register.filter def results_detail_url(obj): if isinstance(obj, TestCycle): return reverse("results_testruns") + "?filter-testCycle=%s" % obj.id elif isinstance(obj, TestRun): return reverse("results_testcases") + "?filter-testRun=%s" % obj.id elif isinstance(obj, TestRunIncludedTestCase): return reverse("results_testcase_detail", kwargs={"itc_id": obj.id}) return ""
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Hardy Jones # Copyright (c) 2013 # # License: MIT # """This module exports the Hlint plugin class.""" from SublimeLinter.lint import Linter class Hlint(Linter): """Provides an interface to hlint.""" syntax = ('haskell', 'haskell-sublimehaskell', 'literate haskell') cmd = 'hlint' regex = ( r'^.+:(?P<line>\d+):' '(?P<col>\d+):\s*' '(?:(?P<error>Error)|(?P<warning>Warning)):\s*' '(?P<message>.+)$' ) multiline = True tempfile_suffix = { 'haskell': 'hs', 'haskell-sublimehaskell': 'hs', 'literate haskell': 'lhs' }
Update to new `defaults` configuration
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Hardy Jones # Copyright (c) 2013 # # License: MIT # """This module exports the Hlint plugin class.""" from SublimeLinter.lint import Linter class Hlint(Linter): """Provides an interface to hlint.""" defaults = { 'selector': 'source.haskell' } cmd = 'hlint' regex = ( r'^.+:(?P<line>\d+):' '(?P<col>\d+):\s*' '(?:(?P<error>Error)|(?P<warning>Warning)):\s*' '(?P<message>.+)$' ) multiline = True tempfile_suffix = 'hs'
from django.db.models.signals import ( post_save, ) from django.dispatch import receiver from .models import ( Performance, Session, ) @receiver(post_save, sender=Performance) def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs): """Create sentinels.""" if not raw: if created: s = 1 while s <= instance.round.num_songs: song = instance.songs.create( performance=instance, num=s, ) s += 1 judges = instance.round.session.judges.filter( category__in=[ instance.round.session.judges.model.CATEGORY.music, instance.round.session.judges.model.CATEGORY.presentation, instance.round.session.judges.model.CATEGORY.singing, ] ) for judge in judges: judge.scores.create( judge=judge, song=song, category=judge.category, kind=judge.kind, ) @receiver(post_save, sender=Session) def session_post_save(sender, instance=None, created=False, raw=False, **kwargs): """Create sentinels.""" if not raw: if created: i = 1 while i <= instance.num_rounds: instance.rounds.create( num=i, kind=(instance.num_rounds - i) + 1, ) i += 1
Create sentinel rounds on Session creation
from django.db.models.signals import ( post_save, ) from django.dispatch import receiver from .models import ( Performance, Session, ) @receiver(post_save, sender=Session) def session_post_save(sender, instance=None, created=False, raw=False, **kwargs): """Create sentinels.""" if not raw: if created: i = 1 while i <= instance.num_rounds: instance.rounds.create( num=i, kind=(instance.num_rounds - i) + 1, ) i += 1 @receiver(post_save, sender=Performance) def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs): """Create sentinels.""" if not raw: if created: s = 1 while s <= instance.round.num_songs: song = instance.songs.create( performance=instance, num=s, ) s += 1 judges = instance.round.session.judges.filter( category__in=[ instance.round.session.judges.model.CATEGORY.music, instance.round.session.judges.model.CATEGORY.presentation, instance.round.session.judges.model.CATEGORY.singing, ] ) for judge in judges: judge.scores.create( judge=judge, song=song, category=judge.category, kind=judge.kind, )
# With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629.
# Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
# -*- coding: utf-8 -*- ''' ZeroMQ transport for napalm-logs. ''' from __future__ import absolute_import from __future__ import unicode_literals # Import stdlib import json # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.transport.base import TransportBase class ZMQTransport(TransportBase): ''' ZMQ transport class. ''' def __init__(self, addr, port): self.addr = addr self.port = port def start(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) def serialise(self, obj): return json.dumps(obj) def publish(self, obj): self.socket.send( self.serialise(obj) ) def tear_down(self): if hasattr(self, 'socket'): self.socket.close() if hasattr(self, 'context'): self.context.term()
Raise bind exception and log
# -*- coding: utf-8 -*- ''' ZeroMQ transport for napalm-logs. ''' from __future__ import absolute_import from __future__ import unicode_literals # Import stdlib import json import logging # Import third party libs import zmq # Import napalm-logs pkgs from napalm_logs.exceptions import BindException from napalm_logs.transport.base import TransportBase log = logging.getLogger(__name__) class ZMQTransport(TransportBase): ''' ZMQ transport class. ''' def __init__(self, addr, port): self.addr = addr self.port = port def start(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) try: self.socket.bind('tcp://{addr}:{port}'.format( addr=self.addr, port=self.port) ) except zmq.error.ZMQError as err: log.error(err, exc_info=True) raise BindException(err) def serialise(self, obj): return json.dumps(obj) def publish(self, obj): self.socket.send( self.serialise(obj) ) def tear_down(self): if hasattr(self, 'socket'): self.socket.close() if hasattr(self, 'context'): self.context.term()
from django.db import models from django.utils.translation import ugettext_lazy as _ {%% for model_name, props in all_models.iteritems() %%} {%% set model_name = model_name|capitalize %%} class {{{ model_name }}}(models.Model): {%% for prop, value in props.iteritems() %%} {{{ prop }}} = {{{ value|model_field }}}{%% endfor %%} def __unicode__(self): fields = [{%% for prop, value in props.iteritems() %%}'{{{ prop }}}', {%% endfor %%}] return unicode('{%% for prop, value in props.iteritems() %%}<{}>, {%% endfor %%}'.format(*fields)) {%% endfor %%}
Change string representation for django model
from django.db import models from django.utils.translation import ugettext_lazy as _ {%% for model_name, props in all_models.iteritems() %%} {%% set model_name = model_name|capitalize %%} class {{{ model_name }}}(models.Model): {%% for prop, value in props.iteritems() %%} {{{ prop }}} = {{{ value|model_field }}}{%% endfor %%} def __unicode__(self): fields = [{%% for prop, value in props.iteritems() %%}'{{{ prop }}}', {%% endfor %%}] return unicode('<{%% for prop, value in props.iteritems() %%}{}.{%% endfor %%}>'.format(*fields)) {%% endfor %%}
''' Database module. Get the database, convert it to the built-in data structure and hold a link to it. The module should be initialized before any other modules except mailer and log. Design: Heranort ''' ''' Connect to the database. ''' def connect_to_datab(): pass ''' Get raw data of the database. ''' def datab_get_raw_data(): pass ''' Process the raw data. ''' def datab_process_data(): pass ''' Preserve the processed data into somewhere. ''' def datab_preserve_data(): pass ''' Check wether the history is modified. If so, emit warning. ''' def check_health(): pass
Add function of data connection
''' Database module. Get the database, convert it to the built-in data structure and hold a link to it. The module should be initialized before any other modules except mailer and log. Design: Heranort ''' import sqlite3, os ''' Connect to the database. ''' def connect_to_datab(): path = os.getcwd() pparent_path = os.path.dirname(os.path.dirname(path)) #get the root dir # print(pparent_path) sql = sqlite3.connect(pparent_path + '\data\data.db') return sql ''' Get raw data of the database. ''' def datab_get_raw_data(sql): cur = sql.cursor() cur.execute('select * from flight') #fetch the raw data of flight raw_data_flight = cur.fetchall() cur.execute('select * from train') #fetch the raw data of train raw_data_train = cur.fetchall() cur.execute('select * from highway') #fetch the raw data of highway raw_data_bus = cur.fetchall() return (raw_data_flight, raw_data_train, raw_data_bus) ''' Process the raw data. ''' def datab_process_data(raw_data_flight, raw_data_train, raw_data_bus): data_price = [[-1 for i in range(10)] for i in range(10)] data_instance = [[-1 for i in range(10)] for i in range(10)] data_time = [[-1 for i in range(10)] for i in range(10)] for element in raw_data_bus: pass ''' Preserve the processed data into somewhere. ''' def datab_preserve_data(): pass ''' Check wether the history is modified. If so, emit warning. ''' def check_health(): pass if(__name__ == '__main__'): sql = connect_to_datab() (raw_data_flight, raw_data_train, raw_data_bus) = datab_get_raw_data(sql) datab_process_data(raw_data_flight, raw_data_train, raw_data_bus)
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM
Set condition [1 is not a prime]
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime(number): if number <= 1: return False else: if number % 1 == 0 and number % range(2, number)
from django.conf import settings from django.core.management import BaseCommand, CommandError from frontend.models import Measure class Command(BaseCommand): def handle(self, measure_id, **options): if not measure_id.startswith(settings.MEASURE_PREVIEW_PREFIX): raise CommandError( f"Not deleting '{measure_id}' because it doesn't look like a preview " f"measure (it doesn't start with '{settings.MEASURE_PREVIEW_PREFIX}')" ) try: measure = Measure.objects.get(id=measure_id) except Measure.DoesNotExist: raise CommandError(f"No measure with ID '{measure_id}'") # The ON DELETE CASCADE configuration ensures that all MeasureValues are deleted # as well measure.delete() self.stdout.write(f"Deleted measure '{measure_id}'") def add_arguments(self, parser): parser.add_argument("measure_id")
Delete measures from BigQuery as well
from django.conf import settings from django.core.management import BaseCommand, CommandError from frontend.models import Measure from gcutils.bigquery import Client class Command(BaseCommand): def handle(self, measure_id, **options): if not measure_id.startswith(settings.MEASURE_PREVIEW_PREFIX): raise CommandError( f"Not deleting '{measure_id}' because it doesn't look like a preview " f"measure (it doesn't start with '{settings.MEASURE_PREVIEW_PREFIX}')" ) try: measure = Measure.objects.get(id=measure_id) except Measure.DoesNotExist: raise CommandError(f"No measure with ID '{measure_id}'") delete_from_bigquery(measure_id) # The ON DELETE CASCADE configuration ensures that all MeasureValues are deleted # as well measure.delete() self.stdout.write(f"Deleted measure '{measure_id}'") def add_arguments(self, parser): parser.add_argument("measure_id") def delete_from_bigquery(measure_id): # Dataset name from `import_measures.MeasureCalculation.get_table()` client = Client("measures") # Table naming convention from `import_measures.MeasureCalculation.table_name()` table_suffix = f"_data_{measure_id}" tables_to_delete = [ table for table in client.list_tables() if table.table_id.endswith(table_suffix) ] for table in tables_to_delete: client.delete_table(table.table_id)
# -*- coding: utf-8 -*- """Get public registrations for staff members. python -m scripts.staff_public_regs """ from collections import defaultdict import logging from modularodm import Q from website.models import Node, User from website.app import init_app logger = logging.getLogger('staff_public_regs') STAFF_GUIDS = [ 'jk5cv', # Jeff 'cdi38', # Brian 'edb8y', # Johanna 'hsey5', # Courtney '5hdme', # Melissa ] def main(): init_app(set_backends=True, routes=False) staff_registrations = defaultdict(list) users = [User.load(each) for each in STAFF_GUIDS] for registration in Node.find(Q('is_registration', 'eq', True) & Q('is_public', 'eq', True)): for user in users: if registration in user.node__contributed: staff_registrations[user._id].append(registration) for uid in staff_registrations: user = User.load(uid) user_regs = staff_registrations[uid] logger.info('{} ({}) on {} Public Registrations:'.format( user.fullname, user._id, len(user_regs)) ) for registration in user_regs: logger.info('\t{} ({}): {}'.format(registration.title, registration._id, registration.absolute_url) ) if __name__ == '__main__': main()
Remove backref in main migration
# -*- coding: utf-8 -*- """Get public registrations for staff members. python -m scripts.staff_public_regs """ from collections import defaultdict import logging from modularodm import Q from website.models import Node, User from website.app import init_app logger = logging.getLogger('staff_public_regs') STAFF_GUIDS = [ 'jk5cv', # Jeff 'cdi38', # Brian 'edb8y', # Johanna 'hsey5', # Courtney '5hdme', # Melissa ] def main(): init_app(set_backends=True, routes=False) staff_registrations = defaultdict(list) users = [User.load(each) for each in STAFF_GUIDS] for registration in Node.find(Q('is_registration', 'eq', True) & Q('is_public', 'eq', True)): for user in users: if registration in user.contributed: staff_registrations[user._id].append(registration) for uid in staff_registrations: user = User.load(uid) user_regs = staff_registrations[uid] logger.info('{} ({}) on {} Public Registrations:'.format( user.fullname, user._id, len(user_regs)) ) for registration in user_regs: logger.info('\t{} ({}): {}'.format(registration.title, registration._id, registration.absolute_url) ) if __name__ == '__main__': main()
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fparser',parent_package,top_path) return config
Add log.config to data files to fix installed fparser.
def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('fparser',parent_package,top_path) config.add_data_files('log.config') return config
#!/usr/bin/env python import sys import subprocess import os import os.path from ebooklib import epub # This is just a basic example which can easily break in real world. if __name__ == '__main__': # read epub book = epub.read_epub(sys.argv[1]) # get base filename from the epub base_name = os.path.basename(os.path.splitext(sys.argv[1])[0]) for item in book.items: # convert into markdown if this is html if isinstance(item, epub.EpubHtml): proc = subprocess.Popen(['pandoc', '-f', 'html', '-t', 'markdown', '-'], stdin=subprocess.PIPE, stdout=subprocess.PIPE ) content, error = proc.communicate(item.content) file_name = os.path.splitext(item.file_name)[0]+'.md' else: file_name = item.file_name content = item.content # create needed directories dir_name = '%s/%s' % (base_name, os.path.dirname(file_name)) if not os.path.exists(dir_name): os.makedirs(dir_name) print '>> ', file_name # write content to file f = open('%s/%s' % (base_name, file_name), 'w') f.write(content) f.close()
Make `samples/04_markdown_parse` Python 2+3 compatible
#!/usr/bin/env python import os.path import subprocess import sys from ebooklib import epub # This is just a basic example which can easily break in real world. if __name__ == '__main__': # read epub book = epub.read_epub(sys.argv[1]) # get base filename from the epub base_name = os.path.basename(os.path.splitext(sys.argv[1])[0]) for item in book.items: # convert into markdown if this is html if isinstance(item, epub.EpubHtml): proc = subprocess.Popen(['pandoc', '-f', 'html', '-t', 'markdown', '-'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) content, error = proc.communicate(item.content) file_name = os.path.splitext(item.file_name)[0] + '.md' else: file_name = item.file_name content = item.content # create needed directories dir_name = '{0}/{1}'.format(base_name, os.path.dirname(file_name)) if not os.path.exists(dir_name): os.makedirs(dir_name) print('>> {0}'.format(file_name)) # write content to file with open('{0}/{1}'.format(base_name, file_name), 'w') as f: f.write(content)
#!/usr/bin/env python #coding=utf8 """ Error dialog interface. """ from whacked4.ui import windows import wx class ErrorDialog(windows.ErrorDialogBase): def __init__(self, parent): windows.ErrorDialogBase.__init__(self, parent) wx.EndBusyCursor() def set_log(self, log_file): """ Shows the log file's contents in the report field. """ log_file.flush() log_file.seek(0) self.Report.ChangeValue(log_file.read()) def copy(self, event): self.Report.SelectAll() self.Report.Copy() self.Report.SetSelection(-1, -1) def close(self, event): self.Hide()
Fix exceptions not displaying if a busy cursor was set.
#!/usr/bin/env python #coding=utf8 """ Error dialog interface. """ from whacked4.ui import windows import wx class ErrorDialog(windows.ErrorDialogBase): def __init__(self, parent): windows.ErrorDialogBase.__init__(self, parent) if wx.IsBusy() == True: wx.EndBusyCursor() def set_log(self, log_file): """ Shows the log file's contents in the report field. """ log_file.flush() log_file.seek(0) self.Report.ChangeValue(log_file.read()) def copy(self, event): self.Report.SelectAll() self.Report.Copy() self.Report.SetSelection(-1, -1) def close(self, event): self.Hide()
from django.test import TestCase
Write tests for swappable model. Resolves #28, #36. --HG-- branch : fix-swappable-model
from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase, modify_settings from boardinghouse.schema import get_schema_model class TestSwappableModel(TestCase): @modify_settings() def test_schema_model_app_not_found(self): settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo.bar' with self.assertRaises(ImproperlyConfigured): get_schema_model() @modify_settings() def test_schema_model_model_not_found(self): settings.BOARDINGHOUSE_SCHEMA_MODEL = 'boardinghouse.NotSchemaModel' with self.assertRaises(ImproperlyConfigured): get_schema_model() @modify_settings() def test_invalid_schema_model_string(self): settings.BOARDINGHOUSE_SCHEMA_MODEL = 'foo__bar' with self.assertRaises(ImproperlyConfigured): get_schema_model()
"""Basic views with no home""" import logging from pyramid.view import view_config from bookie.bcelery import tasks from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ res = tasks.count_total.delay() # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
Clean up old code no longer used
"""Basic views with no home""" import logging from pyramid.view import view_config from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
from flask import Flask app = Flask(__name__) app.config.from_object("config")
Use a env var to get config
import os from flask import Flask app = Flask(__name__) app.config.from_object(os.environ.get("WCONTROL_CONF"))
import foauth.providers class Vimeo(foauth.providers.OAuth1): # General info about the provider provider_url = 'http://vimeo.com/' docs_url = 'http://developer.vimeo.com/apis/advanced' category = 'Videos' # URLs to interact with the API request_token_url = 'https://vimeo.com/oauth/request_token' authorize_url = 'https://vimeo.com/oauth/authorize?permission=delete' access_token_url = 'https://vimeo.com/oauth/access_token' api_domain = 'vimeo.com' available_permissions = [ ('read', 'access information about videos'), ('write', 'update and like videos'), ('delete', 'delete videos'), ] def get_user_id(self, key): r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json') return r.json[u'person'][u'id']
Rewrite Vimeo to use the new scope selection system
import foauth.providers class Vimeo(foauth.providers.OAuth1): # General info about the provider provider_url = 'http://vimeo.com/' docs_url = 'http://developer.vimeo.com/apis/advanced' category = 'Videos' # URLs to interact with the API request_token_url = 'https://vimeo.com/oauth/request_token' authorize_url = 'https://vimeo.com/oauth/authorize' access_token_url = 'https://vimeo.com/oauth/access_token' api_domain = 'vimeo.com' available_permissions = [ (None, 'access your videos'), ('write', 'access, update and like videos'), ('delete', 'access, update, like and delete videos'), ] permissions_widget = 'radio' def get_authorize_params(self, redirect_uri, scopes): params = super(Vimeo, self).get_authorize_params(redirect_uri, scopes) if any(scopes): params['permission'] = scopes[0] return params def get_user_id(self, key): r = self.api(key, self.api_domain, u'/api/rest/v2?method=vimeo.people.getInfo&format=json') return r.json[u'person'][u'id']
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import pytest from byceps.services.shop.cart.models import Cart from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from testfixtures.shop_order import create_orderer from tests.helpers import create_user_with_detail @pytest.fixture def shop(email_config): return shop_service.create_shop('shop-01', 'Some Shop', email_config.id) @pytest.fixture def orderer(normal_user): user = create_user_with_detail('Besteller') return create_orderer(user) @pytest.fixture def empty_cart() -> Cart: return Cart() @pytest.fixture def order_number_sequence(shop) -> None: sequence_service.create_order_number_sequence(shop.id, 'order-')
Remove unused fixture from orderer
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import pytest from byceps.services.shop.cart.models import Cart from byceps.services.shop.sequence import service as sequence_service from byceps.services.shop.shop import service as shop_service from testfixtures.shop_order import create_orderer from tests.helpers import create_user_with_detail @pytest.fixture def shop(email_config): return shop_service.create_shop('shop-01', 'Some Shop', email_config.id) @pytest.fixture def orderer(): user = create_user_with_detail('Besteller') return create_orderer(user) @pytest.fixture def empty_cart() -> Cart: return Cart() @pytest.fixture def order_number_sequence(shop) -> None: sequence_service.create_order_number_sequence(shop.id, 'order-')
import unittest import os from approvaltests.Namer import Namer class NamerTests(unittest.TestCase): def test_class(self): n = Namer() self.assertEqual("NamerTests", n.getClassName()) def test_method(self): n = Namer() self.assertEqual("test_method", n.getMethodName()) def test_file(self): n = Namer() self.assertTrue(os.path.exists(n.getDirectory() + "/NamerTests.py")) def test_basename(self): n = Namer() self.assertTrue(n.get_basename().endswith("\\NamerTests.test_basename"), n.get_basename()) if __name__ == '__main__': unittest.main()
Remove '\' condition from test as it will not be in the file path when used on Linux.
import unittest import os from approvaltests.Namer import Namer class NamerTests(unittest.TestCase): def test_class(self): n = Namer() self.assertEqual("NamerTests", n.getClassName()) def test_method(self): n = Namer() self.assertEqual("test_method", n.getMethodName()) def test_file(self): n = Namer() self.assertTrue(os.path.exists(n.getDirectory() + "/NamerTests.py")) def test_basename(self): n = Namer() self.assertTrue(n.get_basename().endswith("NamerTests.test_basename"), n.get_basename()) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-14 19:02 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tokens', '0001_initial'), ] operations = [ migrations.AddField( model_name='token', name='token_type', field=models.CharField(choices=[('MintableToken', 'Mintable Token')], default='MintableToken', max_length=12), ), ]
Fix psycopg2 DataError due to bad varchar length
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-14 19:02 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tokens', '0001_initial'), ] operations = [ migrations.AddField( model_name='token', name='token_type', field=models.CharField(choices=[('MintableToken', 'Mintable Token')], default='MintableToken', max_length=20), ), ]
''' Creates an admin user if there aren't any existing superusers ''' from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from optparse import make_option class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise StandardError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print('{0} updated'.format(username)) else: print('There are already {0} superusers'.format(su_count))
Fix exception handling in management command. Clean up.
"""Creates an admin user if there aren't any existing superusers.""" from optparse import make_option from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise CommandError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print(f'{username} updated') else: print(f'There are already {su_count} superusers')
import os from .bmi_ilamb import BmiIlamb __all__ = ['BmiIlamb'] __version__ = 0.1 package_dir = os.path.dirname(__file__) data_dir = os.path.join(package_dir, 'data')
Add Configuration to package definition
import os from .bmi_ilamb import BmiIlamb from .config import Configuration __all__ = ['BmiIlamb', 'Configuration'] __version__ = 0.1 package_dir = os.path.dirname(__file__) data_dir = os.path.join(package_dir, 'data')
#!/usr/bin/env python # -*- encoding: utf-8 -*- import json import os import subprocess import sys def find_json_files(): for root, _, filenames in os.walk('.'): if any( d in root for d in ['/WIP', '/.terraform', '/target'] ): continue for f in filenames: if f.lower().endswith('.json'): yield os.path.join(root, f) if __name__ == '__main__': bad_files = [] for f in find_json_files(): f_contents = open(f).read() try: data = json.loads(f_contents) except ValueError as err: print(f'[ERROR] {f} - Invalid JSON? {err}') bad_files.append(f) continue json_str = json.dumps(f_contents, indent=2, sort_keys=True) if json_str == f_contents: print(f'[OK] {f}') else: open(f, 'w').write(json_str) print(f'[FIXED] {f}') if bad_files: print('') print('Errors in the following files:') for f in bad_files: print(f'- {f}') sys.exit(1) else: sys.exit(0)
Tweak the JSON we export
#!/usr/bin/env python # -*- encoding: utf-8 -*- import json import os import subprocess import sys def find_json_files(): for root, _, filenames in os.walk('.'): if any( d in root for d in ['/WIP', '/.terraform', '/target'] ): continue for f in filenames: if f.lower().endswith('.json'): yield os.path.join(root, f) if __name__ == '__main__': bad_files = [] for f in find_json_files(): f_contents = open(f).read() try: data = json.loads(f_contents) except ValueError as err: print(f'[ERROR] {f} - Invalid JSON? {err}') bad_files.append(f) continue json_str = json.dumps(data, indent=2) + '\n' if json_str == f_contents: print(f'[OK] {f}') else: open(f, 'w').write(json_str) print(f'[FIXED] {f}') if bad_files: print('') print('Errors in the following files:') for f in bad_files: print(f'- {f}') sys.exit(1) else: sys.exit(0)
import os import random import string def secure_uuid(): """ Strength: 6*3 random characters from a list of 62, approx. 64^18 possible strings, or 2^100. Should be enough to prevent a successful bruteforce, as download links are only valid for 3 hours :return: """ return id_generator() + "-" + id_generator() + "-" + id_generator() def id_generator(size=6, chars=string.ascii_letters + string.digits): return ''.join(random.choice(chars) for _ in range(size)) def static(*args): return os.path.join('static', *args)
Use SystemRandom to generate security-viable randomness
import os import string from random import SystemRandom cryptogen = SystemRandom() def secure_uuid(): """ Strength: 6*3 random characters from a list of 62, approx. 64^18 possible strings, or 2^100. Should be enough to prevent a successful bruteforce, as download links are only valid for 3 hours :return: """ return id_generator() + "-" + id_generator() + "-" + id_generator() def id_generator(size=6, chars=string.ascii_letters + string.digits): return ''.join(cryptogen.choice(chars) for _ in range(size)) def static(*args): return os.path.join('static', *args)
from flask import render_template from app.main import main @main.app_errorhandler(400) def page_not_found(e): return _render_error_page(500) @main.app_errorhandler(404) def page_not_found(e): return _render_error_page(404) @main.app_errorhandler(500) def exception(e): return _render_error_page(500) @main.app_errorhandler(503) def service_unavailable(e): return _render_error_page(503) def _render_error_page(status_code): template_map = { 404: "errors/404.html", 500: "errors/500.html", 503: "errors/500.html", } if status_code not in template_map: status_code = 500 template_data = main.config['BASE_TEMPLATE_DATA'] return render_template(template_map[status_code], **template_data), status_code
Add print statements for all error types
from flask import render_template from app.main import main @main.app_errorhandler(400) def page_not_found(e): print(e.message) return _render_error_page(500) @main.app_errorhandler(404) def page_not_found(e): print(e.message) return _render_error_page(404) @main.app_errorhandler(500) def exception(e): print(e.message) return _render_error_page(500) @main.app_errorhandler(503) def service_unavailable(e): print(e.message) return _render_error_page(503) def _render_error_page(status_code): template_map = { 404: "errors/404.html", 500: "errors/500.html", 503: "errors/500.html", } if status_code not in template_map: status_code = 500 template_data = main.config['BASE_TEMPLATE_DATA'] return render_template(template_map[status_code], **template_data), status_code
# -*- encoding: utf-8 -*- # Odoo, Open Source Management Solution # Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from openerp import _, exceptions class PartnerNameError(exceptions.ValidationError): def __init__(self, record, value=None): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id @property def value(self): raise NotImplementedError() class EmptyNames(PartnerNameError): @property def value(self): return _("No name is set.")
Remove subclassing of exception, since there is only one.
# -*- encoding: utf-8 -*- # Odoo, Open Source Management Solution # Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from openerp import _, exceptions class EmptyNames(exceptions.ValidationError): def __init__(self, record, value=_("No name is set.")): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id
# -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) @app.route('/') def hello(): return u'Hello world!' app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal user=elizabethrives' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() if __name__ == '__main__': app.run(debug=True)
Add code for getting and releasing a database connection
# -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing from flask import g DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) @app.route('/') def hello(): return u'Hello world!' app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal user=elizabethrives' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): db.rollback() else: db.commit() db.close() if __name__ == '__main__': app.run(debug=True)
import os import logging from celery import Celery from temp_config.set_environment import DeployEnv runtime_env = DeployEnv() runtime_env.load_deployment_environment() redis_server = os.environ.get('REDIS_HOSTNAME') redis_port = os.environ.get('REDIS_PORT') celery_tasks = [ 'hms_flask.modules.hms_controller', 'pram_flask.tasks' ] redis = 'redis://' + redis_server + ':' + redis_port + '/0' logging.info("Celery connecting to redis server: " + redis) celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks) celery.conf.update( CELERY_ACCEPT_CONTENT=['json'], CELERY_TASK_SERIALIZER='json', CELERY_RESULT_SERIALIZER='json', CELERY_IGNORE_RESULT=False, CELERY_TRACK_STARTED=True, )
Set celery to ignore results
import os import logging from celery import Celery from temp_config.set_environment import DeployEnv runtime_env = DeployEnv() runtime_env.load_deployment_environment() redis_server = os.environ.get('REDIS_HOSTNAME') redis_port = os.environ.get('REDIS_PORT') celery_tasks = [ 'hms_flask.modules.hms_controller', 'pram_flask.tasks' ] redis = 'redis://' + redis_server + ':' + redis_port + '/0' logging.info("Celery connecting to redis server: " + redis) celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks) celery.conf.update( CELERY_ACCEPT_CONTENT=['json'], CELERY_TASK_SERIALIZER='json', CELERY_RESULT_SERIALIZER='json', CELERY_IGNORE_RESULT=True, CELERY_TRACK_STARTED=True, )
#!/usr/bin/env python import argparse import logging import sys import gym import universe # register the universe environments from universe import wrappers logger = logging.getLogger() def main(): parser = argparse.ArgumentParser(description=None) parser.add_argument('-v', '--verbose', action='count', dest='verbosity', default=0, help='Set verbosity.') args = parser.parse_args() if args.verbosity == 0: logger.setLevel(logging.INFO) elif args.verbosity >= 1: logger.setLevel(logging.DEBUG) env = gym.make('flashgames.NeonRace-v0') # Restrict the valid random actions. (Try removing this and see # what happens when the agent is given full control of the # keyboard/mouse.) env = wrappers.SafeActionSpace(env) observation_n = env.reset() while True: # your agent here # # Try sending this instead of a random action: ('KeyEvent', 'ArrowUp', True) action_n = [env.action_space.sample() for ob in observation_n] observation_n, reward_n, done_n, info = env.step(action_n) env.render() return 0 if __name__ == '__main__': sys.exit(main())
Add configure call to random_agent
#!/usr/bin/env python import argparse import logging import sys import gym import universe # register the universe environments from universe import wrappers logger = logging.getLogger() def main(): parser = argparse.ArgumentParser(description=None) parser.add_argument('-v', '--verbose', action='count', dest='verbosity', default=0, help='Set verbosity.') args = parser.parse_args() if args.verbosity == 0: logger.setLevel(logging.INFO) elif args.verbosity >= 1: logger.setLevel(logging.DEBUG) env = gym.make('flashgames.NeonRace-v0') env.configure(remotes=1) # automatically creates a local docker container # Restrict the valid random actions. (Try removing this and see # what happens when the agent is given full control of the # keyboard/mouse.) env = wrappers.SafeActionSpace(env) observation_n = env.reset() while True: # your agent here # # Try sending this instead of a random action: ('KeyEvent', 'ArrowUp', True) action_n = [env.action_space.sample() for ob in observation_n] observation_n, reward_n, done_n, info = env.step(action_n) env.render() return 0 if __name__ == '__main__': sys.exit(main())
from celery import Celery from cref.app.terminal import run_cref app = Celery( 'tasks', backend='db+sqlite:///results.sqlite', broker='amqp://guest@localhost//' ) @app.task def predict_structure(sequence, params={}): return run_cref(sequence)
Move task results database to data dir
from celery import Celery from cref.app.terminal import run_cref app = Celery( 'tasks', backend='db+sqlite:///data/results.sqlite', broker='amqp://guest@localhost//' ) @app.task def predict_structure(sequence, params={}): return run_cref(sequence)
import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
Raise exception when --debug is specified to main script I.e., instead of printing the exception and then exiting.
import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): debug = None try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) debug = run_args.get('debug', run.parameters['debug'].default) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: if debug or debug is None: # User specified --debug OR processing didn't get far enough # to determine whether user specified --debug. raise printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
from tastypie.resources import ModelResource, ALL from .models import Filer, Filing from .utils.serializer import CIRCustomSerializer class FilerResource(ModelResource): class Meta: queryset = Filer.objects.all() serializer = CIRCustomSerializer() filtering = { 'filer_id_raw': ALL } excludes = [ 'id' ] class FilingResource(ModelResource): class Meta: queryset = Filing.objects.all() serializer = CIRCustomSerializer() filtering = { 'filing_id_raw': ALL } excludes = [ 'id' ]
Fix style issues raised by pep8.
from tastypie.resources import ModelResource, ALL from .models import Filer, Filing from .utils.serializer import CIRCustomSerializer class FilerResource(ModelResource): class Meta: queryset = Filer.objects.all() serializer = CIRCustomSerializer() filtering = {'filer_id_raw': ALL} excludes = ['id'] class FilingResource(ModelResource): class Meta: queryset = Filing.objects.all() serializer = CIRCustomSerializer() filtering = {'filing_id_raw': ALL} excludes = ['id']
from . import formula def direction(lat, lng): return formula.qibla(lat, lng) def direction_dms(lat, lng): return _dms(formula.qibla(lat, lng)) def direction_str(lat, lng, prec=0): d, m, s = direction_dms(lat, lng) # negative input might returns wrong result return '{}° {}\' {:.{}f}"'.format(d, m, s, prec) def _dms(deg): seconds = deg * 3600 m, s = divmod(seconds, 60) d, m = divmod(m, 60) return (int(d), int(m), s)
Add encoding spec to comply Python 2
# -*- coding: utf-8 -*- from . import formula def direction(lat, lng): return formula.qibla(lat, lng) def direction_dms(lat, lng): return _dms(formula.qibla(lat, lng)) def direction_str(lat, lng, prec=0): d, m, s = direction_dms(lat, lng) # negative input might returns wrong result return '{}° {}\' {:.{}f}"'.format(d, m, s, prec) def _dms(deg): seconds = deg * 3600 m, s = divmod(seconds, 60) d, m = divmod(m, 60) return (int(d), int(m), s)
# # Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import ( Dataset, DatasetBuildError, open_container, open_file, open_fileindex, open_from_index, ) from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass
Drop unused and dangerous entrypoint `open_fileindex`
# # Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass
#!/usr/bin/python import sys, fileinput, subprocess err=0 cols=78 config_proc=subprocess.Popen([ "git", "config", "core.autocrlf" ], stdout=subprocess.PIPE) result=config_proc.communicate()[0] autocrlf=result.strip() == b"true" if result is not None else False def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if not autocrlf and line.find('\r') != -1: report_err("CR character") line_len = len(line)-2 if autocrlf else len(line)-1 if line_len > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
Revert "Don't complain about \r when core.autocrlf is on in Git" This reverts commit 828afaa2fa4cc9e3e53bda0ae3073abfcfa151ca.
#!/usr/bin/python import sys, fileinput err=0 cols=78 def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if line.find('\r') != -1: report_err("CR character") if len(line)-1 > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
from ..value import ForeignCloVal def plus(a: int, b: int) -> int: return a + b def minus(a: int, b: int) -> int: return a - b def times(a: int, b: int) -> int: return a * b def divide(a: int, b: int) -> float: return a / b env = { '+': ForeignCloVal(plus, {}), '-': ForeignCloVal(minus, {}), '*': ForeignCloVal(times, {}), '//': ForeignCloVal(divide, {}), }
Fix typo in division operator
from ..value import ForeignCloVal def plus(a: int, b: int) -> int: return a + b def minus(a: int, b: int) -> int: return a - b def times(a: int, b: int) -> int: return a * b def divide(a: int, b: int) -> float: return a / b env = { '+': ForeignCloVal(plus, {}), '-': ForeignCloVal(minus, {}), '*': ForeignCloVal(times, {}), '/': ForeignCloVal(divide, {}), }
import re import subprocess from datetime import datetime def test_last_review_date(): statement_file_path = "app/templates/views/accessibility_statement.html" # test local changes against master for a full diff of what will be merged statement_diff = subprocess.run( [f"git diff --exit-code origin/master -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True ) # if statement has changed, test the review date was part of those changes if statement_diff.returncode == 1: raw_diff = statement_diff.stdout.decode("utf-8") today = datetime.now().strftime("%d %B %Y") with open(statement_file_path, "r") as statement_file: current_review_date = re.search( (r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read() ).group(1) # guard against changes that don't need to update the review date if current_review_date != today: assert '"Last updated": "' in raw_diff
Rename master branch to main
import re import subprocess from datetime import datetime def test_last_review_date(): statement_file_path = "app/templates/views/accessibility_statement.html" # test local changes against main for a full diff of what will be merged statement_diff = subprocess.run( [f"git diff --exit-code origin/main -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True ) # if statement has changed, test the review date was part of those changes if statement_diff.returncode == 1: raw_diff = statement_diff.stdout.decode("utf-8") today = datetime.now().strftime("%d %B %Y") with open(statement_file_path, "r") as statement_file: current_review_date = re.search( (r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read() ).group(1) # guard against changes that don't need to update the review date if current_review_date != today: assert '"Last updated": "' in raw_diff
from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) return {"basket": items}
Add basket total to context
from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) total_price = sum(item.total() for item in items) return {"basket": items, "total_price": total_price}
from menpodetect.opencv import (load_opencv_frontal_face_detector, load_opencv_eye_detector) import menpo.io as mio takeo = mio.import_builtin_asset.takeo_ppm() def test_frontal_face_detector(): takeo_copy = takeo.copy() opencv_detector = load_opencv_frontal_face_detector() pcs = opencv_detector(takeo_copy) assert len(pcs) == 1 assert takeo_copy.n_channels == 3 assert takeo_copy.landmarks['opencv_0'][None].n_points == 4 def test_frontal_face_detector_min_neighbors(): takeo_copy = takeo.copy() opencv_detector = load_opencv_frontal_face_detector() pcs = opencv_detector(takeo_copy, min_neighbours=100) assert len(pcs) == 0 assert takeo_copy.n_channels == 3 def test_eye_detector(): takeo_copy = takeo.copy() opencv_detector = load_opencv_eye_detector() pcs = opencv_detector(takeo_copy, min_size=(5, 5)) assert len(pcs) == 1 assert takeo_copy.n_channels == 3 assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
Use assert_allclose so we can see the appveyor failure
from numpy.testing import assert_allclose from menpodetect.opencv import (load_opencv_frontal_face_detector, load_opencv_eye_detector) import menpo.io as mio takeo = mio.import_builtin_asset.takeo_ppm() def test_frontal_face_detector(): takeo_copy = takeo.copy() opencv_detector = load_opencv_frontal_face_detector() pcs = opencv_detector(takeo_copy) assert len(pcs) == 1 assert takeo_copy.n_channels == 3 assert takeo_copy.landmarks['opencv_0'][None].n_points == 4 def test_frontal_face_detector_min_neighbors(): takeo_copy = takeo.copy() opencv_detector = load_opencv_frontal_face_detector() pcs = opencv_detector(takeo_copy, min_neighbours=100) assert len(pcs) == 0 assert takeo_copy.n_channels == 3 def test_eye_detector(): takeo_copy = takeo.copy() opencv_detector = load_opencv_eye_detector() pcs = opencv_detector(takeo_copy, min_size=(5, 5)) assert_allclose(len(pcs), 1) assert takeo_copy.n_channels == 3 assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import messages from django.core.urlresolvers import reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views.generic.edit import FormView from braces.views import LoginRequiredMixin from .forms import ObservationForm, BatchUploadForm class AddObservationView(FormView): """ Add a single observation. """ form_class = ObservationForm template_name = "observations/add_observation.html" success_url = reverse_lazy('observations:add_observation') class UploadObservationsView(LoginRequiredMixin, FormView): """ Upload a file of observations. """ form_class = BatchUploadForm template_name = "observations/upload_observations.html" success_url = reverse_lazy('observations:upload_observations') def form_valid(self, form): form.process_file() messages.success(self.request, _("File uploaded successfully!")) return super(UploadObservationsView, self).form_valid(form)
Save the observation if the form was valid.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import messages from django.core.urlresolvers import reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views.generic.edit import FormView from braces.views import LoginRequiredMixin from .forms import ObservationForm, BatchUploadForm class AddObservationView(FormView): """ Add a single observation. """ form_class = ObservationForm template_name = "observations/add_observation.html" success_url = reverse_lazy('observations:add_observation') def form_valid(self, form): observation = form.save(commit=False) observation.observer = self.request.observer observation.save() return super(AddObservationView, self).form_valid(form) class UploadObservationsView(LoginRequiredMixin, FormView): """ Upload a file of observations. """ form_class = BatchUploadForm template_name = "observations/upload_observations.html" success_url = reverse_lazy('observations:upload_observations') def form_valid(self, form): form.process_file() messages.success(self.request, _("File uploaded successfully!")) return super(UploadObservationsView, self).form_valid(form)
#!/usr/bin/env python # -*- coding: utf-8 -*- __doc__ = ''' Crawl comment from nicovideo.jp Usage: main_crawl.py [--sqlite <sqlite>] [--csv <csv>] Options: --sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3] --csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv] ''' from docopt import docopt from nicocrawler.nicocrawler import NicoCrawler if __name__ == '__main__': # コマンドライン引数の取得 args = docopt(__doc__) sqlite_path = args['--sqlite'] csv_path = args['--csv'] ncrawler = NicoCrawler() ncrawler.connect_sqlite(sqlite_path) url = 'http://ch.nicovideo.jp/2016winter_anime' df = ncrawler.get_all_video_url_of_season(url) ncrawler.initialize_csv_from_db(csv_path) # # デイリーランキング1~300位の動画を取得する # url = 'http://www.nicovideo.jp/ranking/fav/daily/all' # ncrawler.initialize_csv_from_url(url, csv_path, max_page=3) # ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
Apply change of file name
#!/usr/bin/env python # -*- coding: utf-8 -*- __doc__ = ''' Crawl comment from nicovideo.jp Usage: crawl_comments.py [--sqlite <sqlite>] [--csv <csv>] Options: --sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3] --csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv] ''' from docopt import docopt from nicocrawler.nicocrawler import NicoCrawler if __name__ == '__main__': # コマンドライン引数の取得 args = docopt(__doc__) sqlite_path = args['--sqlite'] csv_path = args['--csv'] ncrawler = NicoCrawler() ncrawler.connect_sqlite(sqlite_path) url = 'http://ch.nicovideo.jp/2016winter_anime' df = ncrawler.get_all_video_url_of_season(url) ncrawler.initialize_csv_from_db(csv_path) # # デイリーランキング1~300位の動画を取得する # url = 'http://www.nicovideo.jp/ranking/fav/daily/all' # ncrawler.initialize_csv_from_url(url, csv_path, max_page=3) # ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
import web from modules.base import renderer from modules.login import loginInstance from modules.courses import Course #Index page class IndexPage: #Simply display the page def GET(self): if loginInstance.isLoggedIn(): userInput = web.input(); if "logoff" in userInput: loginInstance.disconnect(); return renderer.index(False) else: courses = Course.GetAllCoursesIds() return renderer.main(courses) else: return renderer.index(False) #Try to log in def POST(self): userInput = web.input(); if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password): return renderer.main() else: return renderer.index(True)
Fix another bug in the authentication
import web from modules.base import renderer from modules.login import loginInstance from modules.courses import Course #Index page class IndexPage: #Simply display the page def GET(self): if loginInstance.isLoggedIn(): userInput = web.input(); if "logoff" in userInput: loginInstance.disconnect(); return renderer.index(False) else: return renderer.main(Course.GetAllCoursesIds()) else: return renderer.index(False) #Try to log in def POST(self): userInput = web.input(); if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password): return renderer.main(Course.GetAllCoursesIds()) else: return renderer.index(True)
Make sure test setup is run for subdirectories
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 Openstack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work from nova.tests import *
import logging from json.encoder import JSONEncoder import datetime logger = logging.getLogger('moulinette.utils.serialize') # JSON utilities ------------------------------------------------------- class JSONExtendedEncoder(JSONEncoder): """Extended JSON encoder Extend default JSON encoder to recognize more types and classes. It will never raise if the object can't be encoded and return its repr instead. The following objects and types are supported: - set: converted into list """ def default(self, o): """Return a serializable object""" # Convert compatible containers into list if isinstance(o, set) or ( hasattr(o, '__iter__') and hasattr(o, 'next')): return list(o) # Convert compatible containers into list if isinstance(o, datetime.datetime) or isinstance(o, datetime.date): return str(o) # Return the repr for object that json can't encode logger.warning('cannot properly encode in JSON the object %s, ' 'returned repr is: %r', type(o), o) return repr(o)
[enh] Use isoformat date RFC 3339
import logging from json.encoder import JSONEncoder import datetime logger = logging.getLogger('moulinette.utils.serialize') # JSON utilities ------------------------------------------------------- class JSONExtendedEncoder(JSONEncoder): """Extended JSON encoder Extend default JSON encoder to recognize more types and classes. It will never raise if the object can't be encoded and return its repr instead. The following objects and types are supported: - set: converted into list """ def default(self, o): """Return a serializable object""" # Convert compatible containers into list if isinstance(o, set) or ( hasattr(o, '__iter__') and hasattr(o, 'next')): return list(o) # Convert compatible containers into list if isinstance(o, datetime.datetime) or isinstance(o, datetime.date): return o.isoformat() # Return the repr for object that json can't encode logger.warning('cannot properly encode in JSON the object %s, ' 'returned repr is: %r', type(o), o) return repr(o)
import json def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) assert package_json["dependencies"]["govuk-frontend"].startswith("3."), ( "After upgrading the Design System, manually validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." )
Update GOV.UK Frontend/Jinja lib test Check both the javascript and python packages, and make sure they're both on our expected versions. If not, prompt the developer to check macros.
import json from importlib import metadata from packaging.version import Version def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"]) govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # This should be checking govuk_frontend_version == 3.14.x, but we're not there yet. Update this when we are. # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.0.0") <= govuk_frontend_version < Version("4.0.0") correct_govuk_frontend_jinja_version = Version("1.5.0") <= govuk_frontend_jinja_version < Version("1.6.0") assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." )
import pytest from kafka import KafkaConsumer, KafkaProducer from test.conftest import version from test.testutil import random_string @pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set") def test_end_to_end(kafka_broker): connect_str = 'localhost:' + str(kafka_broker.port) producer = KafkaProducer(bootstrap_servers=connect_str, max_block_ms=10000, value_serializer=str.encode) consumer = KafkaConsumer(bootstrap_servers=connect_str, group_id=None, consumer_timeout_ms=10000, auto_offset_reset='earliest', value_deserializer=bytes.decode) topic = random_string(5) for i in range(1000): producer.send(topic, 'msg %d' % i) producer.flush() producer.close() consumer.subscribe([topic]) msgs = set() for i in range(1000): try: msgs.add(next(consumer).value) except StopIteration: break assert msgs == set(['msg %d' % i for i in range(1000)])
Add end-to-end integration testing for all compression types
import pytest from kafka import KafkaConsumer, KafkaProducer from test.conftest import version from test.testutil import random_string @pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set") @pytest.mark.parametrize("compression", [None, 'gzip', 'snappy', 'lz4']) def test_end_to_end(kafka_broker, compression): # LZ4 requires 0.8.2 if compression == 'lz4' and version() < (0, 8, 2): return connect_str = 'localhost:' + str(kafka_broker.port) producer = KafkaProducer(bootstrap_servers=connect_str, max_block_ms=10000, compression_type=compression, value_serializer=str.encode) consumer = KafkaConsumer(bootstrap_servers=connect_str, group_id=None, consumer_timeout_ms=10000, auto_offset_reset='earliest', value_deserializer=bytes.decode) topic = random_string(5) for i in range(1000): producer.send(topic, 'msg %d' % i) producer.flush() producer.close() consumer.subscribe([topic]) msgs = set() for i in range(1000): try: msgs.add(next(consumer).value) except StopIteration: break assert msgs == set(['msg %d' % i for i in range(1000)])
#!/usr/bin/env python2 ## # fsplit # https://github.com/leosartaj/fsplit.git # # Copyright (c) 2014 Sartaj Singh # Licensed under the MIT license. ## from info import __version__ # define __version__ variable from info import __desc__ # define __desc__ variable for description
Upgrade to a better version from info import __version__ # define __version__ variable ModuleNotFoundError: No module named 'info Gave error while building Coz of missing .
#!/usr/bin/env python2 ## # fsplit # https://github.com/leosartaj/fsplit.git # # Copyright (c) 2014 Sartaj Singh # Licensed under the MIT license. ## from .info import __version__ # define __version__ variable from .info import __desc__ # define __desc__ variable for description
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config
Add simple test for config builder Signed-off-by: Michal Kuffa <005ee1c97edba97d164343c993afee612ac25a0c@gmail.com>
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config def test_read_config_files(self): config_base = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'configs') locations = [ os.path.join(config_base, 'etc'), os.path.join(config_base, 'home'), ] config = bawlerd.conf.read_config_files( bawlerd.conf.build_config_location_list(locations=locations)) assert config['common']['listen_timeout'] == 40 assert 'logging' in config
import serial import threading print('Starting server...') temperature_usb = '/dev/ttyAMA0' BAUD_RATE = 9600 temperature_ser = ser.Serial(temperature_usb, BAUD_RATE) def process_line(line): print('Need to process line: {}'.format(line)) def temperature_loop(): line = "" while True: data = temperature_ser.read() if(data == "\r"): process_line(line) line = "" else: line = line + data temperature_thread = threading.Thread(target=temperature_loop) temperature_thread.start()
Use serial instead of ser, DUH
import serial import threading print('Starting server...') temperature_usb = '/dev/ttyAMA0' BAUD_RATE = 9600 temperature_ser = serial.Serial(temperature_usb, BAUD_RATE) def process_line(line): print('Need to process line: {}'.format(line)) def temperature_loop(): line = "" while True: data = temperature_ser.read() if(data == "\r"): process_line(line) line = "" else: line = line + data temperature_thread = threading.Thread(target=temperature_loop) temperature_thread.start()
import os from thinglang import utils from thinglang.execution.execution import ExecutionEngine from thinglang.lexer.lexer import lexer from thinglang.parser.analyzer import Analyzer from thinglang.parser.parser import parse from thinglang.parser.simplifier import Simplifier BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include') def collect_includes(): files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)] return '\n' + '\n'.join(open(f).read() for f in files) def run(source): if not source: raise ValueError('Source cannot be empty') source = (source + collect_includes()).strip().replace(' ' * 4, '\t') utils.print_header('Source', source) lexical_groups = list(lexer(source)) ast = parse(lexical_groups) Simplifier(ast).run() utils.print_header('Parsed AST', ast.tree()) Analyzer(ast).run() with ExecutionEngine(ast) as engine: engine.execute() return engine.results()
Print C++ code during parsing
import os from thinglang import utils from thinglang.execution.execution import ExecutionEngine from thinglang.lexer.lexer import lexer from thinglang.parser.analyzer import Analyzer from thinglang.parser.parser import parse from thinglang.parser.simplifier import Simplifier BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include') def collect_includes(): files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)] return '\n' + '\n'.join(open(f).read() for f in files) def run(source): if not source: raise ValueError('Source cannot be empty') source = (source + collect_includes()).strip().replace(' ' * 4, '\t') utils.print_header('Source', source) lexical_groups = list(lexer(source)) ast = parse(lexical_groups) Simplifier(ast).run() utils.print_header('C++ Transpilation', ast.transpile_children()) utils.print_header('Parsed AST', ast.tree()) Analyzer(ast).run() with ExecutionEngine(ast) as engine: engine.execute() return engine.results()
# encoding: utf-8 ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) multiprocessing.Process(target=netapi[fun]).start()
Make sure to not leave hanging children processes if the parent is killed
# encoding: utf-8 ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing import signal # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.processes = [] def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join()
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
Set update job timeout back to a more reasonable value
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1 hour timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
""" Django Settings that more closely resemble SAML Metadata. Detailed discussion is in doc/SETTINGS_AND_METADATA.txt. """ from django.conf import settings from django.core.exceptions import ImproperlyConfigured CERTIFICATE_DATA = 'certificate_data' CERTIFICATE_FILENAME = 'certificate_file' PRIVATE_KEY_DATA = 'private_key_data' PRIVATE_KEY_FILENAME = 'private_key_file' def check_configuration_contains(config, keys): available_keys = set(keys).intersection(set(config.keys())) if not available_keys: raise ImproperlyConfigured( 'one of the followin keys is required but none was ' 'specified: {}'.format(keys)) if len(available_keys) > 1: raise ImproperlyConfigured( 'found conflicting configuration: {}. Only one key can be used at' 'a time.'.format(available_keys)) def validate_configuration(config): check_configuration_contains(config=config, keys=[PRIVATE_KEY_DATA, PRIVATE_KEY_FILENAME]) check_configuration_contains(config=config, keys=[CERTIFICATE_DATA, CERTIFICATE_FILENAME]) try: SAML2IDP_CONFIG = settings.SAML2IDP_CONFIG except: raise ImproperlyConfigured('SAML2IDP_CONFIG setting is missing.') else: validate_configuration(SAML2IDP_CONFIG) try: SAML2IDP_REMOTES = settings.SAML2IDP_REMOTES except: raise ImproperlyConfigured('SAML2IDP_REMOTES setting is missing.')
Implement suggested changes in PR review
""" Django Settings that more closely resemble SAML Metadata. Detailed discussion is in doc/SETTINGS_AND_METADATA.txt. """ from django.conf import settings from django.core.exceptions import ImproperlyConfigured CERTIFICATE_DATA = 'certificate_data' CERTIFICATE_FILENAME = 'certificate_file' PRIVATE_KEY_DATA = 'private_key_data' PRIVATE_KEY_FILENAME = 'private_key_file' def check_configuration_contains(config, keys): available_keys = frozenset(keys).intersection(frozenset(config.keys())) if not available_keys: raise ImproperlyConfigured( 'one of the following keys is required but none was ' 'specified: {}'.format(keys)) if len(available_keys) > 1: raise ImproperlyConfigured( 'found conflicting configuration: {}. Only one key can be used at' 'a time.'.format(available_keys)) def validate_configuration(config): check_configuration_contains(config=config, keys=(PRIVATE_KEY_DATA, PRIVATE_KEY_FILENAME)) check_configuration_contains(config=config, keys=(CERTIFICATE_DATA, CERTIFICATE_FILENAME)) try: SAML2IDP_CONFIG = settings.SAML2IDP_CONFIG except: raise ImproperlyConfigured('SAML2IDP_CONFIG setting is missing.') else: validate_configuration(SAML2IDP_CONFIG) try: SAML2IDP_REMOTES = settings.SAML2IDP_REMOTES except: raise ImproperlyConfigured('SAML2IDP_REMOTES setting is missing.')
# -*- coding: utf-8 -*- def test_bar_fixture(testdir): testdir.tmpdir.join('test_foo.py').write(''' def test_a(bar): assert bar == "something" ''' result = testdir.runpytest('--foo=something') def test_foo_option(): pass
Implement test for help and cli args
# -*- coding: utf-8 -*- def test_bar_fixture(testdir): """Make sure that pytest accepts our fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_sth(bar): assert bar == "europython2015" """) # run pytest with the following cmd args result = testdir.runpytest( '--foo=something', '-v' ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_a PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cat:', '*--foo=DEST_FOO*Set the value for the fixture "bar".', ])
""" ===================== CollectionViewer demo ===================== Demo of CollectionViewer for viewing collections of images. This demo uses successively darker versions of the same image to fake an image collection. You can scroll through images with the slider, or you can interact with the viewer using your keyboard: left/right arrows Previous/next image in collection. number keys, 0--9 0% to 90% of collection. For example, "5" goes to the image in the middle (i.e. 50%) of the collection. home/end keys First/last image in collection. """ import numpy as np from skimage import data from skimage.viewer import CollectionViewer img = data.lena() img_collection = [np.uint8(img * 0.9**i) for i in range(20)] view = CollectionViewer(img_collection) view.show()
Use gaussian pyramid function for collection viewer example
""" ===================== CollectionViewer demo ===================== Demo of CollectionViewer for viewing collections of images. This demo uses successively darker versions of the same image to fake an image collection. You can scroll through images with the slider, or you can interact with the viewer using your keyboard: left/right arrows Previous/next image in collection. number keys, 0--9 0% to 90% of collection. For example, "5" goes to the image in the middle (i.e. 50%) of the collection. home/end keys First/last image in collection. """ import numpy as np from skimage import data from skimage.viewer import CollectionViewer from skimage.transform import build_gaussian_pyramid img = data.lena() img_collection = tuple(build_gaussian_pyramid(img)) view = CollectionViewer(img_collection) view.show()
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
Remove uses of using() from migrations This hardcoded the db_alias fetched from schema_editor and forces django to try and migrate any second database you use, rather than routing to the default database. In testing a build from scratch, these do not appear needed. Using using() prevents us from using multiple databases behind edxapp
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> from ggrc.converters import errors from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data_dry = self.import_file(filename, dry_run=True) response_data = self.import_file(filename) self.assertEqual(response_data_dry, response_data) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
Optimize basic delete import tests The dry-run check is now automatically performed on each import and we do not need to duplicate the work in the delete test.
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> from integration.ggrc import TestCase class TestBasicCsvImport(TestCase): def setUp(self): TestCase.setUp(self) self.client.get("/login") def test_policy_basic_import(self): filename = "ca_setup_for_deletion.csv" self.import_file(filename) filename = "ca_deletion.csv" response_data = self.import_file(filename) self.assertEqual(response_data[0]["deleted"], 2) self.assertEqual(response_data[0]["ignored"], 0)
""" CartoDB Services Python Library See: https://github.com/CartoDB/geocoder-api """ from setuptools import setup, find_packages setup( name='cartodb_services', version='0.13.0', description='CartoDB Services API Python Library', url='https://github.com/CartoDB/dataservices-api', author='Data Services Team - CartoDB', author_email='dataservices@cartodb.com', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Mapping comunity', 'Topic :: Maps :: Mapping Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', ], keywords='maps api mapping tools geocoder routing', packages=find_packages(exclude=['contrib', 'docs', 'tests']), extras_require={ 'dev': ['unittest'], 'test': ['unittest', 'nose', 'mockredispy', 'mock'], } )
Bump version of python library to 0.14.0
""" CartoDB Services Python Library See: https://github.com/CartoDB/geocoder-api """ from setuptools import setup, find_packages setup( name='cartodb_services', version='0.14.0', description='CartoDB Services API Python Library', url='https://github.com/CartoDB/dataservices-api', author='Data Services Team - CartoDB', author_email='dataservices@cartodb.com', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Mapping comunity', 'Topic :: Maps :: Mapping Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', ], keywords='maps api mapping tools geocoder routing', packages=find_packages(exclude=['contrib', 'docs', 'tests']), extras_require={ 'dev': ['unittest'], 'test': ['unittest', 'nose', 'mockredispy', 'mock'], } )
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. class Channel(object): def __init__(self, name, branch, position, category_postfix, priority): self.branch = branch self.builder_postfix = '-' + name self.category_postfix = category_postfix self.name = name self.position = position self.priority = priority self.all_deps_path = '/' + branch + '/deps/all.deps' self.standalone_deps_path = '/' + branch + '/deps/standalone.deps' self.dartium_deps_path = '/' + branch + '/deps/dartium.deps' # The channel names are replicated in the slave.cfg files for all # dart waterfalls. If you change anything here please also change it there. CHANNELS = [ Channel('be', 'branches/bleeding_edge', 0, '', 4), Channel('dev', 'trunk', 1, '-dev', 2), Channel('stable', 'branches/1.7', 2, '-stable', 1), Channel('integration', 'branches/dartium_integration', 3, '-integration', 3), ] CHANNELS_BY_NAME = {} for c in CHANNELS: CHANNELS_BY_NAME[c.name] = c
Make stable builders pull from 1.8 Review URL: https://codereview.chromium.org/760053002 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@293121 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. class Channel(object): def __init__(self, name, branch, position, category_postfix, priority): self.branch = branch self.builder_postfix = '-' + name self.category_postfix = category_postfix self.name = name self.position = position self.priority = priority self.all_deps_path = '/' + branch + '/deps/all.deps' self.standalone_deps_path = '/' + branch + '/deps/standalone.deps' self.dartium_deps_path = '/' + branch + '/deps/dartium.deps' # The channel names are replicated in the slave.cfg files for all # dart waterfalls. If you change anything here please also change it there. CHANNELS = [ Channel('be', 'branches/bleeding_edge', 0, '', 4), Channel('dev', 'trunk', 1, '-dev', 2), Channel('stable', 'branches/1.8', 2, '-stable', 1), Channel('integration', 'branches/dartium_integration', 3, '-integration', 3), ] CHANNELS_BY_NAME = {} for c in CHANNELS: CHANNELS_BY_NAME[c.name] = c
# This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(datetime): return datetime.strftime(datetime, DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
Fix error on date save
# This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(date): return date.strftime(DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
# -*- coding: utf-8 -*- import numpy as np from numpy.testing import assert_array_almost_equal from scipy import stats import pytest from pmdarima.preprocessing import LogEndogTransformer from pmdarima.preprocessing import BoxCoxEndogTransformer def test_same(): y = [1, 2, 3] trans = BoxCoxEndogTransformer() log_trans = LogEndogTransformer() y_t, _ = trans.fit_transform(y) log_y_t, _ = log_trans.fit_transform(y) assert_array_almost_equal(log_y_t, y_t)
Add test_invertible to log transformer test
# -*- coding: utf-8 -*- import numpy as np from numpy.testing import assert_array_almost_equal from scipy import stats import pytest from pmdarima.preprocessing import LogEndogTransformer from pmdarima.preprocessing import BoxCoxEndogTransformer def test_same(): y = [1, 2, 3] trans = BoxCoxEndogTransformer(lmbda=0) log_trans = LogEndogTransformer() y_t, _ = trans.fit_transform(y) log_y_t, _ = log_trans.fit_transform(y) assert_array_almost_equal(log_y_t, y_t) def test_invertible(): y = [1, 2, 3] trans = LogEndogTransformer() y_t, _ = trans.fit_transform(y) y_prime, _ = trans.inverse_transform(y_t) assert_array_almost_equal(y, y_prime)
from __future__ import absolute_import, unicode_literals from .base import * # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'l33th4x0rs' EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' try: from .local import * except ImportError: pass
Change database connections for data migration tool
from __future__ import absolute_import, unicode_literals from .base import * # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'l33th4x0rs' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'uwcs_zarya', 'USER': 'uwcs_zarya', 'PASSWORD': 'doyouevenlift', 'HOST': '127.0.0.1', 'PORT': '5432', }, 'old_data': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' try: from .local import * except ImportError: pass
import tempfile, os import nose.tools as nt ip = get_ipython() ip.magic('load_ext storemagic') def test_store_restore(): ip.user_ns['foo'] = 78 ip.magic('alias bar echo "hello"') tmpd = tempfile.mkdtemp() ip.magic('cd ' + tmpd) ip.magic('store foo') ip.magic('store bar') # Check storing nt.assert_equal(ip.db['autorestore/foo'], 78) nt.assert_in('bar', ip.db['stored_aliases']) # Remove those items ip.user_ns.pop('foo', None) ip.alias_manager.undefine_alias('bar') ip.magic('cd -') ip.user_ns['_dh'][:] = [] # Check restoring ip.magic('store -r') nt.assert_equal(ip.user_ns['foo'], 78) nt.assert_in('bar', ip.alias_manager.alias_table) nt.assert_in(os.path.realpath(tmpd), ip.user_ns['_dh']) os.rmdir(tmpd)
Add test for StoreMagics.autorestore option
import tempfile, os from IPython.config.loader import Config import nose.tools as nt ip = get_ipython() ip.magic('load_ext storemagic') def test_store_restore(): ip.user_ns['foo'] = 78 ip.magic('alias bar echo "hello"') tmpd = tempfile.mkdtemp() ip.magic('cd ' + tmpd) ip.magic('store foo') ip.magic('store bar') # Check storing nt.assert_equal(ip.db['autorestore/foo'], 78) nt.assert_in('bar', ip.db['stored_aliases']) # Remove those items ip.user_ns.pop('foo', None) ip.alias_manager.undefine_alias('bar') ip.magic('cd -') ip.user_ns['_dh'][:] = [] # Check restoring ip.magic('store -r') nt.assert_equal(ip.user_ns['foo'], 78) nt.assert_in('bar', ip.alias_manager.alias_table) nt.assert_in(os.path.realpath(tmpd), ip.user_ns['_dh']) os.rmdir(tmpd) def test_autorestore(): ip.user_ns['foo'] = 95 ip.magic('store foo') del ip.user_ns['foo'] c = Config() c.StoreMagics.autorestore = False orig_config = ip.config try: ip.config = c ip.extension_manager.reload_extension('storemagic') nt.assert_not_in('foo', ip.user_ns) c.StoreMagics.autorestore = True ip.extension_manager.reload_extension('storemagic') nt.assert_equal(ip.user_ns['foo'], 95) finally: ip.config = orig_config
# test1.py # Ronald L. Rivest and Karim Husayn Karimi # August 17, 2017 # Routine to experiment with scipy.optimize.minimize import scipy.optimize from scipy.stats import norm # function to minimize: def g(xy): (x,y) = xy print("g({},{})".format(x,y)) return x + y # constraints noise_level = 0.0000005 # constraint 1: y <= x/2 def f1(xy): (x,y) = xy return x/2 - y + noise_level * norm.rvs(0) # constraint 2: y >= 1/x def f2(xy): (x,y) = xy return y - 1.0/x + noise_level * norm.rvs(0) constraints = [ { "type": "ineq", "fun": f1 }, { "type": "ineq", "fun": f2 } ] print(scipy.optimize.minimize(g, (11, 5), constraints=constraints))
Switch to COBYLA optimization method. Works much better.
# test1.py # Ronald L. Rivest and Karim Husayn Karimi # August 17, 2017 # Routine to experiment with scipy.optimize.minimize import scipy.optimize from scipy.stats import norm # function to minimize: def g(xy): (x,y) = xy print("g({},{})".format(x,y)) return x + y # constraints noise_level = 0.05 # constraint 1: y <= x/2 def f1(xy): (x,y) = xy return x/2 - y + noise_level * norm.rvs(0) # constraint 2: y >= 1/x def f2(xy): (x,y) = xy return y - 1.0/x + noise_level * norm.rvs(0) constraints = [ { "type": "ineq", "fun": f1 }, { "type": "ineq", "fun": f2 } ] print(scipy.optimize.minimize(g, (11, 5), method = "COBYLA", tol = 0.01, constraints=constraints))
# Copyright (C) 2015-2016 Regents of the University of California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os def getAzureZone(defaultZone=None): """ Find an appropriate azure zone. Look for an environment variable or return a default as provided. :param defaultZone: The zone specified in the leader metadata. :return zone: The zone. """ return os.environ.get('TOIL_AZURE_ZONE') or defaultZone
Remove getAzureZone from init, no longer needed.
# Copyright (C) 2015-2016 Regents of the University of California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
Add a custom Admin page for organization members. This is a partial fix for BB-66.
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) class OrganizationMemberAdmin(admin.ModelAdmin): list_display = ('user', 'function', 'organization') list_filter = ('function',) search_fields = ('user__first_name', 'user__last_name', 'user__username', 'organization__name') admin.site.register(OrganizationMember, OrganizationMemberAdmin)
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='alias for --foo' ) @pytest.fixture def bar(request): return request.config.option.foo
Optimize the help message for the option arg
# -*- coding: utf-8 -*- import pytest def pytest_addoption(parser): group = parser.getgroup('{{cookiecutter.plugin_name}}') group.addoption( '--foo', action='store', dest='foo', help='Set the value for the fixture "bar".' ) @pytest.fixture def bar(request): return request.config.option.foo
from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None) def rebuild_export_task(export_instance): rebuild_export(export_instance)
Fix botched keyword args in rebuild_export_task()
from celery.task import task from corehq.apps.export.export import get_export_file, rebuild_export from couchexport.models import Format from couchexport.tasks import escape_quotes from soil.util import expose_cached_download @task def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete() @task(queue='background_queue', ignore_result=True) def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None): rebuild_export(export_instance, last_access_cutoff, filter)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ :copyright: (c) 2012-2013 by PN. :license: MIT, see LICENSE for more details. """ from __future__ import print_function from __future__ import unicode_literals from __future__ import division import signal from functools import wraps ############################################################ # Timeout ############################################################ #http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/ class TimeoutError(Exception): def __init__(self, value="Timed Out"): self.value = value def __str__(self): return repr(self.value) def timeout(seconds=None): def decorate(f): def handler(signum, frame): raise TimeoutError() @wraps(f) def new_f(*args, **kwargs): old = signal.signal(signal.SIGALRM, handler) new_seconds = kwargs['timeout'] if 'timeout' in kwargs else seconds if new_seconds is None: raise ValueError("You must provide a timeout value") signal.alarm(new_seconds) try: result = f(*args, **kwargs) finally: signal.signal(signal.SIGALRM, old) signal.alarm(0) return result return new_f return decorate
Make timeout error an assertion error, not just any old exception This means that timeout failures are considered to be test failures, where a specific assertion (i.e. 'this function takes less than N seconds') has failed, rather than being a random error in the test that may indicate a bug.
#!/usr/bin/env python # -*- coding: utf-8 -*- """ :copyright: (c) 2012-2013 by PN. :license: MIT, see LICENSE for more details. """ from __future__ import print_function from __future__ import unicode_literals from __future__ import division import signal from functools import wraps ############################################################ # Timeout ############################################################ #http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/ class TimeoutError(AssertionError): def __init__(self, value="Timed Out"): self.value = value def __str__(self): return repr(self.value) def timeout(seconds=None): def decorate(f): def handler(signum, frame): raise TimeoutError() @wraps(f) def new_f(*args, **kwargs): old = signal.signal(signal.SIGALRM, handler) new_seconds = kwargs['timeout'] if 'timeout' in kwargs else seconds if new_seconds is None: raise ValueError("You must provide a timeout value") signal.alarm(new_seconds) try: result = f(*args, **kwargs) finally: signal.signal(signal.SIGALRM, old) signal.alarm(0) return result return new_f return decorate
from django.conf.urls import patterns, include, url from django.conf import settings from django.conf.urls.static import static # Uncomment the next two lines to enable the admin: from django.contrib import admin # from mezzanine.core.views import direct_to_template admin.autodiscover() # Must be defined before auto discover and urlpatterns var. So when there is root url # injection, we first insert root url to this, then the last line will insert it to real urlpatterns default_app_url_patterns = [] from djangoautoconf import auto_conf_urls auto_conf_urls.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^default_django_15_and_below/', include('default_django_15_and_below.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), # url(r'^', include('demo.urls')), # url(r'^obj_sys/', include('obj_sys.urls')), # url("^$", direct_to_template, {"template": "index.html"}, name="home"), ) urlpatterns = [ # ... the rest of your URLconf goes here ... ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += default_app_url_patterns
Fix the issue of override url by mistake.
from django.conf.urls import patterns, include, url from django.conf import settings from django.conf.urls.static import static # Uncomment the next two lines to enable the admin: from django.contrib import admin # from mezzanine.core.views import direct_to_template admin.autodiscover() # Must be defined before auto discover and urlpatterns var. So when there is root url # injection, we first insert root url to this, then the last line will insert it to real urlpatterns default_app_url_patterns = [] from djangoautoconf import auto_conf_urls auto_conf_urls.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^default_django_15_and_below/', include('default_django_15_and_below.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), # url(r'^', include('demo.urls')), # url(r'^obj_sys/', include('obj_sys.urls')), # url("^$", direct_to_template, {"template": "index.html"}, name="home"), ) urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += default_app_url_patterns
from __future__ import absolute_import from ..plot_object import PlotObject from ..properties import Bool class Widget(PlotObject): disabled = Bool(False)
Implement display protocol for Widget (_repr_html_) This effectively allows us to automatically display plots and widgets.
from __future__ import absolute_import from ..plot_object import PlotObject from ..properties import Bool from ..embed import notebook_div class Widget(PlotObject): disabled = Bool(False) def _repr_html_(self): return notebook_div(self) @property def html(self): from IPython.core.display import HTML return HTML(self._repr_html_())
DATABASES = { 'default': { 'ENGINE': '{{ db_engine }}', 'NAME': '{{ db_name }}', 'USER': '{{ db_user}}', 'PASSWORD': '{{ db_pass }}', }, 'other': { 'ENGINE': 'django.db.backends.sqlite3', 'TEST_NAME': 'other_db' } }
Append PID to Django database to avoid conflicts.
import os pid = os.getpid() DATABASES = { 'default': { 'ENGINE': '{{ db_engine }}', 'NAME': '{{ db_name }}', 'USER': '{{ db_user}}', 'PASSWORD': '{{ db_pass }}', }, 'other': { 'ENGINE': 'django.db.backends.sqlite3', 'TEST_NAME': 'other_db_%s' % pid, } }
DEFAULT_NUM_MONTHS = 60 def generate_dates(end_str, months=None): """ Given an end date as a string in YYYY-MM form (or the underscore separated equivalent), return a list of N consecutive months as strings in YYYY-MM-01 form, with that month as the final member """ if months is None: months = DEFAULT_NUM_MONTHS end_date = parse_date(end_str) assert months > 0 dates = [] for offset in range(1-months, 1): date = increment_months(end_date, offset) dates.append('{:04d}-{:02d}-01'.format(date[0], date[1])) return dates def parse_date(date_str): """ Given a date string in YYYY-MM form (or the underscore separated equivalent), return a pair of (year, month) integers """ year_str, month_str = date_str.replace('_', '-').split('-')[:2] assert len(year_str) == 4 assert len(month_str) == 2 return int(year_str), int(month_str) def increment_months((year, month), months): """ Given a pair of (year, month) integers return the (year, month) pair N months in the future """ i = (year*12) + (month - 1) i += months return int(i/12), (i % 12) + 1
Fix another py27-ism which Black can't handle Not sure how I missed this one last time.
DEFAULT_NUM_MONTHS = 60 def generate_dates(end_str, months=None): """ Given an end date as a string in YYYY-MM form (or the underscore separated equivalent), return a list of N consecutive months as strings in YYYY-MM-01 form, with that month as the final member """ if months is None: months = DEFAULT_NUM_MONTHS end_date = parse_date(end_str) assert months > 0 dates = [] for offset in range(1-months, 1): date = increment_months(end_date, offset) dates.append('{:04d}-{:02d}-01'.format(date[0], date[1])) return dates def parse_date(date_str): """ Given a date string in YYYY-MM form (or the underscore separated equivalent), return a pair of (year, month) integers """ year_str, month_str = date_str.replace('_', '-').split('-')[:2] assert len(year_str) == 4 assert len(month_str) == 2 return int(year_str), int(month_str) def increment_months(year_month, months): """ Given a pair of (year, month) integers return the (year, month) pair N months in the future """ year, month = year_month i = (year*12) + (month - 1) i += months return int(i/12), (i % 12) + 1
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json import boto3 table_name = os.environ.get("TABLE_NAME") table = boto3.resource("dynamodb").Table(table_name) def _log_dynamo(response): print "HTTPStatusCode:{}, RetryAttempts:{}, ScannedCount:{}, Count:{}".format( response.get("ResponseMetadata").get("HTTPStatusCode"), response.get("ResponseMetadata").get("RetryAttempts"), response.get("ScannedCount"), response.get("Count") ) def get_items(event, context): response = table.scan(Limit=10) _log_dynamo(response) return { "statusCode": 200, "body": json.dumps(response["Items"], indent=1) } def get_item(event, context): response = table.get_item(Key={"id": event.get("pathParameters").get("id")}) _log_dynamo(response) return { "statusCode": 200, "body": json.dumps(response["Item"], indent=1) }
Add CORS header to function
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json import boto3 table_name = os.environ.get("TABLE_NAME") table = boto3.resource("dynamodb").Table(table_name) def _log_dynamo(response): print "HTTPStatusCode:{}, RetryAttempts:{}, ScannedCount:{}, Count:{}".format( response.get("ResponseMetadata").get("HTTPStatusCode"), response.get("ResponseMetadata").get("RetryAttempts"), response.get("ScannedCount"), response.get("Count") ) def get_items(event, context): response = table.scan(Limit=10) _log_dynamo(response) return { "statusCode": 200, "body": json.dumps(response["Items"], indent=1), "headers": {"Access-Control-Allow-Origin": "*"} } def get_item(event, context): response = table.get_item(Key={"id": event.get("pathParameters").get("id")}) _log_dynamo(response) return { "statusCode": 200, "body": json.dumps(response["Item"], indent=1) }
from django.http import HttpResponseRedirect from django.shortcuts import render from django.core.urlresolvers import reverse from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout from django.contrib.auth.decorators import login_required from cloudfish.models import Cloud def login(r): if r.POST: username = r.POST['username'] password = r.POST['password'] user = authenticate(username=username, password=password) if user is not None: auth_login(r, user) if not Cloud.objects.filter(account=user).exists(): return HttpResponseRedirect(reverse('connect-view')) return HttpResponseRedirect(reverse('myservers-view')) return render(r, 'auth.html') def logout(request): auth_logout(request) return HttpResponseRedirect(reverse('index-view')) @login_required def connect(request): return render(request, 'connect.html')
Add login failed flash message
from django.http import HttpResponseRedirect from django.shortcuts import render from django.core.urlresolvers import reverse from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout from django.contrib.auth.decorators import login_required from cloudfish.models import Cloud def login(r): c = {} if r.POST: username = r.POST['username'] password = r.POST['password'] user = authenticate(username=username, password=password) if user is not None: auth_login(r, user) if not Cloud.objects.filter(account=user).exists(): return HttpResponseRedirect(reverse('connect-view')) return HttpResponseRedirect(reverse('myservers-view')) c['errors'] = "Login failed, please try again" return render(r, 'auth.html', c) def logout(request): auth_logout(request) return HttpResponseRedirect(reverse('index-view')) @login_required def connect(request): return render(request, 'connect.html')
import random from discord.ext import commands from lxml import etree class NSFW: def __init__(self, bot): self.bot = bot @commands.command(aliases=['gel']) async def gelbooru(self, ctx, *, tags): async with ctx.typing(): entries = [] url = 'http://gelbooru.com/index.php' params = {'page': 'dapi', 's': 'post', 'q': 'index', 'tags': tags} async with self.bot.session.get(url, params=params) as resp: root = etree.fromstring((await resp.text()).encode(), etree.HTMLParser()) search_nodes = root.findall(".//post") for node in search_nodes: image = next((item[1] for item in node.items() if item[0] == 'file_url'), None) if image is not None: entries.append(image) try: message = f'http:{random.choice(entries)}' except IndexError: message = 'No images found.' await ctx.send(message) @commands.command(hidden=True) async def massage(self, ctx, *, tags=''): await ctx.invoke(self.gelbooru, tags='massage ' + tags) def setup(bot): bot.add_cog(NSFW(bot))
Make command invisible by default
import random from discord.ext import commands from lxml import etree class NSFW: def __init__(self, bot): self.bot = bot @commands.command(aliases=['gel'], hidden=True) async def gelbooru(self, ctx, *, tags): async with ctx.typing(): entries = [] url = 'http://gelbooru.com/index.php' params = {'page': 'dapi', 's': 'post', 'q': 'index', 'tags': tags} async with self.bot.session.get(url, params=params) as resp: root = etree.fromstring((await resp.text()).encode(), etree.HTMLParser()) search_nodes = root.findall(".//post") for node in search_nodes: image = next((item[1] for item in node.items() if item[0] == 'file_url'), None) if image is not None: entries.append(image) try: message = f'http:{random.choice(entries)}' except IndexError: message = 'No images found.' await ctx.send(message) @commands.command(hidden=True) async def massage(self, ctx, *, tags=''): await ctx.invoke(self.gelbooru, tags='massage ' + tags) def setup(bot): bot.add_cog(NSFW(bot))
from django.core.mail import EmailMessage from django.template.loader import render_to_string def notify_existing_user(user, event): """ Sends e-mail to existing organizer, that they're added to the new Event. """ content = render_to_string('emails/existing_user.html', { 'user': user, 'event': event }) subject = 'You have been granted access to new Django Girls event' send_email(content, subject, user) def notify_new_user(user, event, password): """ Sends e-mail to newly created organizer that their account was created and that they were added to the Event. """ content = render_to_string('emails/new_user.html', { 'user': user, 'event': event, 'password': password, }) subject = 'Access to Django Girls website' send_email(content, subject, user) def send_email(user, content, subject): msg = EmailMessage(subject, content, "Django Girls <hello@djangogirls.org>", [user.email]) msg.content_subtype = "html" msg.send()
Fix broken order of arguments in send_email Ticket #342
from django.core.mail import EmailMessage from django.template.loader import render_to_string def notify_existing_user(user, event): """ Sends e-mail to existing organizer, that they're added to the new Event. """ content = render_to_string('emails/existing_user.html', { 'user': user, 'event': event }) subject = 'You have been granted access to new Django Girls event' send_email(content, subject, user) def notify_new_user(user, event, password): """ Sends e-mail to newly created organizer that their account was created and that they were added to the Event. """ content = render_to_string('emails/new_user.html', { 'user': user, 'event': event, 'password': password, }) subject = 'Access to Django Girls website' send_email(content, subject, user) def send_email(content, subject, user): msg = EmailMessage(subject, content, "Django Girls <hello@djangogirls.org>", [user.email]) msg.content_subtype = "html" msg.send()
from django.core.checks import run_checks, Error from django.test import override_settings from axes.checks import Messages, Hints, Codes from axes.conf import settings from axes.tests.base import AxesTestCase @override_settings(AXES_HANDLER='axes.handlers.cache.AxesCacheHandler') class CacheCheckTestCase(AxesTestCase): @override_settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache'}}) def test_cache_check(self): errors = run_checks() self.assertEqual([], errors) @override_settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}) def test_cache_check_errors(self): errors = run_checks() error = Error( msg=Messages.CACHE_INVALID, hint=Hints.CACHE_INVALID, obj=settings.CACHES, id=Codes.CACHE_INVALID, ) self.assertEqual([error], errors)
Add check test for missing case branch Signed-off-by: Aleksi Häkli <44cb6a94c0d20644d531e2be44779b52833cdcd2@iki.fi>
from django.core.checks import run_checks, Error from django.test import override_settings from axes.checks import Messages, Hints, Codes from axes.conf import settings from axes.tests.base import AxesTestCase class CacheCheckTestCase(AxesTestCase): @override_settings( AXES_HANDLER='axes.handlers.cache.AxesCacheHandler', CACHES={'default': {'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache'}}, ) def test_cache_check(self): errors = run_checks() self.assertEqual([], errors) @override_settings( AXES_HANDLER='axes.handlers.cache.AxesCacheHandler', CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}, ) def test_cache_check_errors(self): errors = run_checks() error = Error( msg=Messages.CACHE_INVALID, hint=Hints.CACHE_INVALID, obj=settings.CACHES, id=Codes.CACHE_INVALID, ) self.assertEqual([error], errors) @override_settings( AXES_HANDLER='axes.handlers.database.AxesDatabaseHandler', CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}, ) def test_cache_check_does_not_produce_check_errors_with_database_handler(self): errors = run_checks() self.assertEqual([], errors)
from base import SubLayout, Rect from sublayouts import HorizontalStack from subtile import SubTile class SubVertTile(SubTile): arrangements = ["top", "bottom"] def _init_sublayouts(self): ratio = self.ratio expand = self.expand master_windows = self.master_windows arrangement = self.arrangement class MasterWindows(HorizontalStack): def filter(self, client): return self.index_of(client) < master_windows def request_rectangle(self, r, windows): return (r, Rect()) class SlaveWindows(HorizontalStack): def filter(self, client): return self.index_of(client) >= master_windows def request_rectangle(self, r, windows): if self.autohide and not windows: return (Rect(), r) else: if arrangement == "top": rmaster, rslave = r.split_horizontal(ratio=ratio) else: rslave, rmaster = r.split_horizontal(ratio=(1-ratio)) return (rslave, rmaster) self.sublayouts.append(SlaveWindows(self.clientStack, self.theme, parent=self, autohide=self.expand ) ) self.sublayouts.append(MasterWindows(self.clientStack, self.theme, parent=self, autohide=self.expand ) )
Refactor SubVertTile - make sublayout use the parents' variables
from base import SubLayout, Rect from sublayouts import HorizontalStack from subtile import SubTile class SubVertTile(SubTile): arrangements = ["top", "bottom"] def _init_sublayouts(self): class MasterWindows(HorizontalStack): def filter(self, client): return self.index_of(client) < self.parent.master_windows def request_rectangle(self, r, windows): return (r, Rect()) class SlaveWindows(HorizontalStack): def filter(self, client): return self.index_of(client) >= self.parent.master_windows def request_rectangle(self, r, windows): if self.autohide and not windows: return (Rect(), r) else: if self.parent.arrangement == "top": rmaster, rslave = r.split_horizontal(ratio=self.parent.ratio) else: rslave, rmaster = r.split_horizontal(ratio=(1-self.parent.ratio)) return (rslave, rmaster) self.sublayouts.append(SlaveWindows(self.clientStack, self.theme, parent=self, autohide=self.expand ) ) self.sublayouts.append(MasterWindows(self.clientStack, self.theme, parent=self, autohide=self.expand ) )
import unittest from contextlib import redirect_stdout from conllu import print_tree from conllu.tree_helpers import TreeNode from io import StringIO class TestPrintTree(unittest.TestCase): def test_print_empty_list(self): result = self._capture_print(print_tree, []) self.assertEqual(result, "") def test_print_simple_treenode(self): node = TreeNode(data={"id": "X", "deprel": "Y"}, children={}) result = self._capture_print(print_tree, node) self.assertEqual(result, "(deprel:Y) id:X deprel:Y [X]\n") def test_print_list_of_nodes(self): node = TreeNode(data={"id": "X", "deprel": "Y"}, children={}) nodes = [node, node] result = self._capture_print(print_tree, nodes) self.assertEqual(result, "(deprel:Y) id:X deprel:Y [X]\n" * 2) def _capture_print(self, func, args): f = StringIO() with redirect_stdout(f): func(args) return f.getvalue()
Fix redirect_stdout not available in python2.
import unittest from conllu import print_tree from conllu.tree_helpers import TreeNode from io import StringIO try: from contextlib import redirect_stdout except ImportError: import sys import contextlib @contextlib.contextmanager def redirect_stdout(target): original = sys.stdout sys.stdout = target yield sys.stdout = original class TestPrintTree(unittest.TestCase): def test_print_empty_list(self): result = self._capture_print(print_tree, []) self.assertEqual(result, "") def test_print_simple_treenode(self): node = TreeNode(data={"id": "X", "deprel": "Y"}, children={}) result = self._capture_print(print_tree, node) self.assertEqual(result, "(deprel:Y) id:X deprel:Y [X]\n") def test_print_list_of_nodes(self): node = TreeNode(data={"id": "X", "deprel": "Y"}, children={}) nodes = [node, node] result = self._capture_print(print_tree, nodes) self.assertEqual(result, "(deprel:Y) id:X deprel:Y [X]\n" * 2) def _capture_print(self, func, args): f = StringIO() with redirect_stdout(f): func(args) return f.getvalue()
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import argparse import logging import logging.config import multiprocessing.pool import fedmsg.config import fedimg.uploader logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') def trigger_upload(compose_id, url, push_notifications): upload_pool = multiprocessing.pool.ThreadPool(processes=4) compose_meta = {'compose_id': compose_id} fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta) def get_args(): parser = argparse.ArgumentParser( description="Trigger a manual upload process with the " "specified raw.xz URL") parser.add_argument( "-u", "--url", type=str, help=".raw.xz URL", required=True) parser.add_argument( "-c", "--compose-id", type=str, help="compose id of the .raw.xz file", required=True) parser.add_argument( "-p", "--push-notifications", help="Bool to check if we need to push fedmsg notifications", action="store_true", required=False) args = parser.parse_args() return args.url, args.compose_id, args.push_notifications def main(): url, compose_id, push_notifications = get_args() trigger_upload(url, compose_id, push_notifications) if __name__ == '__main__': main()
services.ec2: Add the missing push_notifications args Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import argparse import logging import logging.config import multiprocessing.pool import fedmsg.config import fedimg.uploader logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') def trigger_upload(compose_id, url, push_notifications): upload_pool = multiprocessing.pool.ThreadPool(processes=4) compose_meta = {'compose_id': compose_id} fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta, push_notifications=push_notifications) def get_args(): parser = argparse.ArgumentParser( description="Trigger a manual upload process with the " "specified raw.xz URL") parser.add_argument( "-u", "--url", type=str, help=".raw.xz URL", required=True) parser.add_argument( "-c", "--compose-id", type=str, help="compose id of the .raw.xz file", required=True) parser.add_argument( "-p", "--push-notifications", help="Bool to check if we need to push fedmsg notifications", action="store_true", required=False) args = parser.parse_args() return args.url, args.compose_id, args.push_notifications def main(): url, compose_id, push_notifications = get_args() trigger_upload(url, compose_id, push_notifications) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- """ dynmen - A simple python interface to dynamic menus like dmenu or rofi import dynmen menu = dynmen.Menu(['dmenu', '-fn', 'Sans-30']) output = menu({'a': 1, 'b': 2, 'c': 3}) You can make the menu non-blocking by setting: menu.process_mode = 'futures' Please see the repository for more examples: https://github.com/frostidaho/dynmen """ from .menu import Menu, MenuError del menu def new_dmenu(**kwargs): from .dmenu import DMenu return DMenu(**kwargs) def new_rofi(**kwargs): from .rofi import Rofi return Rofi(**kwargs)
Add MenuResult to the top-level namespace
# -*- coding: utf-8 -*- """ dynmen - A simple python interface to dynamic menus like dmenu or rofi import dynmen menu = dynmen.Menu(['dmenu', '-fn', 'Sans-30']) output = menu({'a': 1, 'b': 2, 'c': 3}) You can make the menu non-blocking by setting: menu.process_mode = 'futures' Please see the repository for more examples: https://github.com/frostidaho/dynmen """ from .menu import Menu, MenuError, MenuResult del menu def new_dmenu(**kwargs): from .dmenu import DMenu return DMenu(**kwargs) def new_rofi(**kwargs): from .rofi import Rofi return Rofi(**kwargs)
from importlib import import_module from django.core.management.base import BaseCommand from django.utils import translation class Command(BaseCommand): """ Upgrades Janeway """ help = "Upgrades an install from one version to another." def add_arguments(self, parser): """Adds arguments to Django's management command-line parser. :param parser: the parser to which the required arguments will be added :return: None """ parser.add_argument('upgrade_module') def handle(self, *args, **options): translation.activate('en') upgrade_module_name = options.get('upgrade_module') upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name) try: upgrade_module = import_module(upgrade_module_path) upgrade_module.execute() except ImportError as e: print('There was an error running the requested upgrade: ') print(e)
Upgrade path is now not required, help text is output if no path supp.
import os from importlib import import_module from django.core.management.base import BaseCommand from django.utils import translation from django.conf import settings def get_modules(): path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade') root, dirs, files = next(os.walk(path)) return files class Command(BaseCommand): """ Upgrades Janeway """ help = "Upgrades an install from one version to another." def add_arguments(self, parser): """Adds arguments to Django's management command-line parser. :param parser: the parser to which the required arguments will be added :return: None """ parser.add_argument('--path', required=False) def handle(self, *args, **options): if not options.get('path'): print('No upgrade selected. Available upgrade paths: ') for file in get_modules(): module_name = file.split('.')[0] print('- {module_name}'.format(module_name=module_name)) print('To run an upgrade use the following: `python3 manage.py run_upgrade --script 12_13`') else: translation.activate('en') upgrade_module_name = options.get('path') upgrade_module_path = 'utils.upgrade.{module_name}'.format(module_name=upgrade_module_name) try: upgrade_module = import_module(upgrade_module_path) upgrade_module.execute() except ImportError as e: print('There was an error running the requested upgrade: ') print(e)
from django.conf.urls.defaults import * from django.contrib import admin from django.conf import settings admin.autodiscover() urlpatterns = patterns('', url(r'^admin/', admin.site.urls), url(r'^publication/', include('project.publications.urls')), #url(r'^google/', include('project.google.urls')), url(r'^project/', include('project.projects.urls')), url(r'^i18n/', include('django.conf.urls.i18n')), url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'), url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog'), url(r'^$', 'project.views.index'), # Media serving url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}, name='media', ), )
Make photos and videos accessible
from django.conf.urls.defaults import * from django.contrib import admin from django.conf import settings admin.autodiscover() urlpatterns = patterns('', url(r'^admin/', admin.site.urls), url(r'^publication/', include('project.publications.urls')), url(r'^google/', include('djangoogle.urls')), url(r'^project/', include('project.projects.urls')), url(r'^i18n/', include('django.conf.urls.i18n')), url(r'^jsi18n/(?P<packages>\S+?)/$', 'django.views.i18n.javascript_catalog'), url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog'), url(r'^$', 'project.views.index'), # Media serving url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}, name='media', ), )
import io import random import numpy as np import pandas as pd import sklearn import requests def gettingData(): url = "https://www.coingecko.com/price_charts/export/279/eur.csv" content = requests.get(url).content data = pd.read_csv(io.StringIO(content.decode('utf-8'))) return data def preprocessing(data): #customize index data.snapped_at[0].split()[0] data.snapped_at = data.snapped_at.apply(lambda x: x.split()[0]) data.set_index('snapped_at', inplace=True) data.index = pd.to_datetime(data.index) def main(): data = gettingData() print("Retrieved data:") print(data.tail()) if __name__ == "__main__": main()
Index is completed if there is no sample for a date
import io import random import numpy as np import pandas as pd import sklearn import requests def gettingData(): url = "https://www.coingecko.com/price_charts/export/279/eur.csv" content = requests.get(url).content data = pd.read_csv(io.StringIO(content.decode('utf-8'))) return data def preprocessing(data): #customize index data.snapped_at[0].split()[0] data.snapped_at = data.snapped_at.apply(lambda x: x.split()[0]) data.set_index('snapped_at', inplace=True) data.index = pd.to_datetime(data.index) ''' In some cases there is no sample for a certain date. ''' #Generate all the possible days and use them to reindex start = data.index[data.index.argmin()] end = data.index[data.index.argmax()] index_complete = pd.date_range(start, end) data = data.reindex(index_complete) #Fill the blanks with the mean between the previous and the day after print("\nLooking if the index is complete...") for idx in data.index: dayloc = data.index.get_loc(idx) day = data.loc[idx] if day.hasnans: #updating rg = slice(dayloc-1, dayloc+2) data.loc[idx] = data.iloc[rg].mean() print("Day <{}> updated with the mean".format(idx)) def main(): data = gettingData() print("\nRetrieved data:") print(data.tail()) preprocessing(data) if __name__ == "__main__": main()
import morepath import chameleon class ChameleonApp(morepath.App): pass @ChameleonApp.setting_section(section='chameleon') def get_setting_section(): return { 'auto_reload': False } @ChameleonApp.template_engine(extension='.pt') def get_chameleon_render(path, original_render, settings): config = {'auto_reload': settings.chameleon.auto_reload} template = chameleon.PageTemplateFile(path, **config) def render(content, request): variables = {'request': request} variables.update(content) return original_render(template.render(**variables), request) return render
Make the way chameleon settings are defined more generic; any Chameleon setting can now be in the chameleon config section.
import morepath import chameleon class ChameleonApp(morepath.App): pass @ChameleonApp.setting_section(section='chameleon') def get_setting_section(): return {'auto_reload': False} @ChameleonApp.template_engine(extension='.pt') def get_chameleon_render(path, original_render, settings): config = settings.chameleon.__dict__ template = chameleon.PageTemplateFile(path, **config) def render(content, request): variables = {'request': request} variables.update(content) return original_render(template.render(**variables), request) return render
""" Flask server app. """ import datetime as dt import sys import flask import sqlalchemy as sa import coils import tables import mapping app = flask.Flask(__name__) # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.cfg' config = coils.Config(CONFIG) @app.route('/') def index(): """Render the index page.""" return flask.render_template('index.html') @app.route('/info') def info(): """Return JSON of server info.""" # Connect to database engine. engine = sa.create_engine( 'mysql://{}:{}@{}/{}'.format( config['username'], config['password'], config['host'], config['db_name'])) Session = sa.orm.sessionmaker(bind=engine) session = Session() now = dt.datetime.now() datum = session.query(mapping.Datum).\ filter(mapping.Datum.name=='size')[0] return flask.jsonify(server_time=now, db_size=datum.value) if __name__ == '__main__': app.run()
Use SQLAlchemy extension in Flask app.
""" Flask server app. """ import datetime as dt import sys import flask from flask.ext.sqlalchemy import SQLAlchemy import coils import mapping # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.cfg' config = coils.Config(CONFIG) # Initialize Flask and SQLAlchemy. app = flask.Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://{}:{}@{}/{}'.format( config['username'], config['password'], config['host'], config['db_name']) db = SQLAlchemy(app) @app.route('/') def index(): """Render the index page.""" return flask.render_template('index.html') @app.route('/info') def info(): """Return JSON of server info.""" now = dt.datetime.now() datum = db.session.query(mapping.Datum).\ filter(mapping.Datum.name=='size')[0] return flask.jsonify(server_time=now, db_size=datum.value) if __name__ == '__main__': app.run()
from setuptools import setup VERSION = '0.2.9' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='filwaitman@gmail.com', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
Change project maturity level and bump to 0.3
from setuptools import setup VERSION = '0.3' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='filwaitman@gmail.com', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
import setuptools import pathlib setuptools.setup( name='crafter', version='0.17.0', description='Open world survival game for reinforcement learning.', url='http://github.com/danijar/crafter', long_description=pathlib.Path('README.md').read_text(), long_description_content_type='text/markdown', packages=['crafter'], package_data={'crafter': ['assets/*']}, entry_points={'console_scripts': ['crafter=crafter.run_gui:main']}, install_requires=[ 'numpy', 'imageio', 'pillow', 'opensimplex', 'ruamel.yaml'], extras_require={'gui': ['pygame']}, classifiers=[ 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Topic :: Games/Entertainment', 'Topic :: Scientific/Engineering :: Artificial Intelligence', ], )
Include data file in package.
import setuptools import pathlib setuptools.setup( name='crafter', version='0.18.0', description='Open world survival game for reinforcement learning.', url='http://github.com/danijar/crafter', long_description=pathlib.Path('README.md').read_text(), long_description_content_type='text/markdown', packages=['crafter'], package_data={'crafter': ['data.yaml', 'assets/*']}, entry_points={'console_scripts': ['crafter=crafter.run_gui:main']}, install_requires=[ 'numpy', 'imageio', 'pillow', 'opensimplex', 'ruamel.yaml'], extras_require={'gui': ['pygame']}, classifiers=[ 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Topic :: Games/Entertainment', 'Topic :: Scientific/Engineering :: Artificial Intelligence', ], )
# -*- coding: utf-8 -*- # # Copyright (C) 2009-2010 Sebastian Krysmanski # Copyright (C) 2012 Greg Lavallee # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # from setuptools import setup PACKAGE = 'TicketGuidelinesPlugin' VERSION = '1.0.0' setup( name=PACKAGE, version=VERSION, author='Sebastian Krysmanski', url='https://trac-hacks.org/wiki/TicketGuidelinesPlugin', description="Adds your ticket guidelines to the ticket view. The " "guidelines are specified in the wiki pages " "'TicketGuidelines/NewShort' and " "'TicketGuidelines/ModifyShort'.", keywords='trac plugin', license='Modified BSD', install_requires=['Trac'], packages=['ticketguidelines'], package_data={'ticketguidelines': ['htdocs/*']}, entry_points={'trac.plugins': '%s = ticketguidelines.web_ui' % PACKAGE}, )
Change package name before publishing to PyPI
# -*- coding: utf-8 -*- # # Copyright (C) 2009-2010 Sebastian Krysmanski # Copyright (C) 2012 Greg Lavallee # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # from setuptools import setup PACKAGE = 'TracTicketGuidelines' VERSION = '1.0.0' setup( name=PACKAGE, version=VERSION, author='Sebastian Krysmanski', url='https://trac-hacks.org/wiki/TicketGuidelinesPlugin', description="Adds your ticket guidelines to the ticket view. The " "guidelines are specified in the wiki pages " "'TicketGuidelines/NewShort' and " "'TicketGuidelines/ModifyShort'.", keywords='trac plugin', license='Modified BSD', install_requires=['Trac'], packages=['ticketguidelines'], package_data={'ticketguidelines': ['htdocs/*']}, entry_points={'trac.plugins': '%s = ticketguidelines.web_ui' % PACKAGE}, )
import os from distutils.core import setup from sphinx.setup_command import BuildDoc import django_assets def find_packages(root): # so we don't depend on setuptools; from the Storm ORM setup.py packages = [] for directory, subdirectories, files in os.walk(root): if '__init__.py' in files: packages.append(directory.replace(os.sep, '.')) return packages setup( name = 'django-assets', version=".".join(map(str, django_assets.__version__)), description = 'Media asset management for the Django web framework.', long_description = 'Merges, minifies and compresses Javascript and ' 'CSS files, supporting a variety of different filters, including ' 'YUI, jsmin, jspacker or CSS tidy. Also supports URL rewriting ' 'in CSS files.', author = 'Michael Elsdoerfer', author_email = 'michael@elsdoerfer.info', license = 'BSD', url = 'http://launchpad.net/django-assets', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], packages = find_packages('django_assets'), cmdclass={'build_sphinx': BuildDoc}, )
Allow the package to be built without Sphinx being required.
import os from distutils.core import setup try: from sphinx.setup_command import BuildDoc cmdclass = {'build_sphinx': BuildDoc} except ImportError: print "Sphinx not installed--needed to build documentation" # default cmdclass to None to avoid cmdclass = {} import django_assets def find_packages(root): # so we don't depend on setuptools; from the Storm ORM setup.py packages = [] for directory, subdirectories, files in os.walk(root): if '__init__.py' in files: packages.append(directory.replace(os.sep, '.')) return packages setup( name = 'django-assets', version=".".join(map(str, django_assets.__version__)), description = 'Media asset management for the Django web framework.', long_description = 'Merges, minifies and compresses Javascript and ' 'CSS files, supporting a variety of different filters, including ' 'YUI, jsmin, jspacker or CSS tidy. Also supports URL rewriting ' 'in CSS files.', author = 'Michael Elsdoerfer', author_email = 'michael@elsdoerfer.info', license = 'BSD', url = 'http://launchpad.net/django-assets', classifiers = [ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], packages = find_packages('django_assets'), cmdclass=cmdclass, )
# -*- coding: utf-8 -*- from setuptools import setup setup( name='pusher', version='1.2.0', description='A Python library to interract with the Pusher API', url='https://github.com/pusher/pusher-http-python', author='Pusher', author_email='support@pusher.com', classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Topic :: Internet :: WWW/HTTP", ], keywords='pusher rest realtime websockets service', license='MIT', packages=[ 'pusher' ], install_requires=['six', 'requests>=2.3.0', 'urllib3', 'pyopenssl', 'ndg-httpsclient', 'pyasn1'], tests_require=['nose', 'mock', 'HTTPretty'], extras_require={ 'aiohttp': ["aiohttp>=0.9.0"], 'tornado': ['tornado>=4.0.0'] }, test_suite='pusher_tests', )
Include cacert.pem as part of the package
# -*- coding: utf-8 -*- from setuptools import setup setup( name='pusher', version='1.2.0', description='A Python library to interract with the Pusher API', url='https://github.com/pusher/pusher-http-python', author='Pusher', author_email='support@pusher.com', classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Topic :: Internet :: WWW/HTTP", ], keywords='pusher rest realtime websockets service', license='MIT', packages=[ 'pusher' ], install_requires=['six', 'requests>=2.3.0', 'urllib3', 'pyopenssl', 'ndg-httpsclient', 'pyasn1'], tests_require=['nose', 'mock', 'HTTPretty'], extras_require={ 'aiohttp': ["aiohttp>=0.9.0"], 'tornado': ['tornado>=4.0.0'] }, package_data={ 'pusher': ['cacert.pem'] }, test_suite='pusher_tests', )
import logging import subprocess from hotness.cache import cache log = logging.getLogger('fedmsg') def get_version(package_name, yumconfig): nvr_dict = build_nvr_dict(yumconfig) return nvr_dict[package_name] @cache.cache_on_arguments() def build_nvr_dict(yumconfig): cmdline = ["/usr/bin/repoquery", "--config", yumconfig, "--quiet", "--archlist=src", "--all", "--qf", "%{name}\t%{version}\t%{release}"] log.info("Running %r" % ' '.join(cmdline)) repoquery = subprocess.Popen(cmdline, stdout=subprocess.PIPE) (stdout, stderr) = repoquery.communicate() log.debug("Done with repoquery.") if stderr: log.warn(stderr) new_nvr_dict = {} for line in stdout.split("\n"): line = line.strip() if line: name, version, release = line.split("\t") new_nvr_dict[name] = (version, release) return new_nvr_dict
Drop explicit archlist for now.
import logging import subprocess from hotness.cache import cache log = logging.getLogger('fedmsg') def get_version(package_name, yumconfig): nvr_dict = build_nvr_dict(yumconfig) return nvr_dict[package_name] @cache.cache_on_arguments() def build_nvr_dict(yumconfig): cmdline = ["/usr/bin/repoquery", "--config", yumconfig, "--quiet", #"--archlist=src", "--all", "--qf", "%{name}\t%{version}\t%{release}"] log.info("Running %r" % ' '.join(cmdline)) repoquery = subprocess.Popen(cmdline, stdout=subprocess.PIPE) (stdout, stderr) = repoquery.communicate() log.debug("Done with repoquery.") if stderr: log.warn(stderr) new_nvr_dict = {} for line in stdout.split("\n"): line = line.strip() if line: name, version, release = line.split("\t") new_nvr_dict[name] = (version, release) return new_nvr_dict
from os.path import isdir import pytest from filesystem_tree import FilesystemTree @pytest.yield_fixture def fs(): fs = FilesystemTree() yield fs fs.remove() def test_it_can_be_instantiated(): assert FilesystemTree().__class__.__name__ == 'FilesystemTree' def test_args_go_to_mk_not_root(): fs = FilesystemTree('foo', 'bar') assert fs.root != 'foo' def test_it_makes_a_directory(fs): assert isdir(fs.root) def test_remove_removes(fs): assert isdir(fs.root) fs.remove() assert not isdir(fs.root)
Add an initial test each for resolve and mk
import os from os.path import isdir import pytest from filesystem_tree import FilesystemTree @pytest.yield_fixture def fs(): fs = FilesystemTree() yield fs fs.remove() def test_it_can_be_instantiated(): assert FilesystemTree().__class__.__name__ == 'FilesystemTree' def test_args_go_to_mk_not_root(): fs = FilesystemTree('foo', 'bar') assert fs.root != 'foo' def test_it_makes_a_directory(fs): assert isdir(fs.root) def test_resolve_resolves(fs): path = fs.resolve('some/dir') assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir'])) def test_mk_makes_a_dir(fs): fs.mk('some/dir') assert isdir(fs.resolve('some/dir')) def test_remove_removes(fs): assert isdir(fs.root) fs.remove() assert not isdir(fs.root)
""" Views for the rdap_explorer project, query app. """ import ipwhois from django.http import HttpResponseRedirect from django.shortcuts import render from django.urls import reverse from django.views.decorators.cache import cache_page from json import dumps from .forms import QueryForm def index(request): if request.method == 'POST': form = QueryForm(request.POST) if form.is_valid(): return HttpResponseRedirect(reverse( 'query:results', args=(form['query'].value(),) )) else: form = QueryForm() return render(request, 'query/index.html', { 'title': 'Query', 'form': form }) @cache_page(86400) def results(request, query): error = None result = {} form = QueryForm(initial={"query": query}) try: ip = ipwhois.IPWhois(query) result = ip.lookup_rdap(retry_count=1, depth=2, inc_raw=True) except (ValueError, ipwhois.exceptions.IPDefinedError) as e: error = e return render(request, 'query/index.html', { 'title': 'Results', 'error': error, 'form': form, 'result': dumps(result) })
Change results page title to include query (or "Error" on error).
""" Views for the rdap_explorer project, query app. """ import ipwhois from django.http import HttpResponseRedirect from django.shortcuts import render from django.urls import reverse from django.views.decorators.cache import cache_page from json import dumps from .forms import QueryForm def index(request): if request.method == 'POST': form = QueryForm(request.POST) if form.is_valid(): return HttpResponseRedirect(reverse( 'query:results', args=(form['query'].value(),) )) else: form = QueryForm() return render(request, 'query/index.html', { 'title': 'Query', 'form': form }) @cache_page(86400) def results(request, query): title = 'Results' error = None result = {} form = QueryForm(initial={"query": query}) try: ip = ipwhois.IPWhois(query) result = ip.lookup_rdap(retry_count=1, depth=2, inc_raw=True) title = ip.address_str except (ValueError, ipwhois.exceptions.IPDefinedError) as e: error = e title = 'Error' return render(request, 'query/index.html', { 'title': title, 'error': error, 'form': form, 'result': dumps(result) })
class DashboardContents: _registry = {} content = DashboardContents()
Store project and module componentes separately
class DashboardContents: _registry = {'project': {}, 'module': {}} def __getitem__(self, identifier): component = self._registry['project'].get(identifier, None) if not component: component = self._registry['module'].get(identifier) return component def __contains__(self, identifier): return (identifier in self._registry['project'] or identifier in self._registry['module']) def register_project(self, component): self._registry['project'][component.identifier] = component def register_module(self, component): self._registry['module'][component.identifier] = component def get_project_components(self): return self._registry['project'].items() def get_module_components(self): return self._registry['module'].items() content = DashboardContents()
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2013 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. from flask.ext.login import current_user def create(token=None): return False def read(token=None): return not current_user.is_anonymous() def update(token=None): return False def delete(token=None): return False
Change signature to match other resources auth functions
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2013 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. from flask.ext.login import current_user def create(token=None): return False def read(token=None): return not current_user.is_anonymous() def update(token): return False def delete(token): return False