Datasets:

commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
51
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
96fd8b71fd425d251e9cc07e8cc65b4fc040d857
samples/nanomsg/hello_world.py
samples/nanomsg/hello_world.py
import os.path import shutil import tempfile import threading import sys import nanomsg as nn def ping(url, event): with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url): event.wait() sock.send(b'Hello, World!') def pong(url, event): with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url): event.set() message = sock.recv() print(bytes(message.as_memoryview()).decode('ascii')) def main(): path = tempfile.mkdtemp() try: event = threading.Event() url = 'ipc://' + os.path.join(path, 'reqrep.ipc') print('Play ping-pong on %s' % url) threads = [ threading.Thread(target=ping, args=(url, event)), threading.Thread(target=pong, args=(url, event)), ] for thread in threads: thread.start() for thread in threads: thread.join() finally: shutil.rmtree(path) return 0 if __name__ == '__main__': sys.exit(main())
import threading import sys import nanomsg as nn def ping(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url): sock.send(b'Hello, World!') # Shutdown the endpoint after the other side ack'ed; otherwise # the message could be lost. barrier.wait() def pong(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url): message = sock.recv() print(bytes(message.as_memoryview()).decode('ascii')) barrier.wait() def main(): barrier = threading.Barrier(2) url = 'inproc://test' print('Play ping-pong on %s' % url) threads = [ threading.Thread(target=ping, args=(url, barrier)), threading.Thread(target=pong, args=(url, barrier)), ] for thread in threads: thread.start() for thread in threads: thread.join() return 0 if __name__ == '__main__': sys.exit(main())
Fix message lost issue in samples
Fix message lost issue in samples
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
- import os.path - import shutil - import tempfile import threading import sys import nanomsg as nn - def ping(url, event): + def ping(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url): - event.wait() sock.send(b'Hello, World!') + # Shutdown the endpoint after the other side ack'ed; otherwise + # the message could be lost. + barrier.wait() - def pong(url, event): + def pong(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url): - event.set() message = sock.recv() print(bytes(message.as_memoryview()).decode('ascii')) + barrier.wait() def main(): + barrier = threading.Barrier(2) + url = 'inproc://test' - path = tempfile.mkdtemp() - try: - event = threading.Event() - url = 'ipc://' + os.path.join(path, 'reqrep.ipc') - print('Play ping-pong on %s' % url) + print('Play ping-pong on %s' % url) - threads = [ + threads = [ - threading.Thread(target=ping, args=(url, event)), + threading.Thread(target=ping, args=(url, barrier)), - threading.Thread(target=pong, args=(url, event)), + threading.Thread(target=pong, args=(url, barrier)), - ] + ] - for thread in threads: + for thread in threads: - thread.start() + thread.start() - for thread in threads: + for thread in threads: - thread.join() + thread.join() - finally: - shutil.rmtree(path) return 0 if __name__ == '__main__': sys.exit(main())
Fix message lost issue in samples
## Code Before: import os.path import shutil import tempfile import threading import sys import nanomsg as nn def ping(url, event): with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url): event.wait() sock.send(b'Hello, World!') def pong(url, event): with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url): event.set() message = sock.recv() print(bytes(message.as_memoryview()).decode('ascii')) def main(): path = tempfile.mkdtemp() try: event = threading.Event() url = 'ipc://' + os.path.join(path, 'reqrep.ipc') print('Play ping-pong on %s' % url) threads = [ threading.Thread(target=ping, args=(url, event)), threading.Thread(target=pong, args=(url, event)), ] for thread in threads: thread.start() for thread in threads: thread.join() finally: shutil.rmtree(path) return 0 if __name__ == '__main__': sys.exit(main()) ## Instruction: Fix message lost issue in samples ## Code After: import threading import sys import nanomsg as nn def ping(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url): sock.send(b'Hello, World!') # Shutdown the endpoint after the other side ack'ed; otherwise # the message could be lost. barrier.wait() def pong(url, barrier): with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url): message = sock.recv() print(bytes(message.as_memoryview()).decode('ascii')) barrier.wait() def main(): barrier = threading.Barrier(2) url = 'inproc://test' print('Play ping-pong on %s' % url) threads = [ threading.Thread(target=ping, args=(url, barrier)), threading.Thread(target=pong, args=(url, barrier)), ] for thread in threads: thread.start() for thread in threads: thread.join() return 0 if __name__ == '__main__': sys.exit(main())
8e3abcd310b7e932d769f05fa0a7135cc1a53b76
setup.py
setup.py
from cx_Freeze import setup, Executable # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "excludes": [ "numpy" ], "bin_includes": [ "libcrypto.so.1.0.0", "libssl.so.1.0.0" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "encodings", "idna", "motor", "packaging", "raven", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] setup(name='virtool', executables=executables, options=options)
from cx_Freeze import setup, Executable # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "bin_includes": [ "libcrypto.so.1.0.0", "libssl.so.1.0.0" ], "includes": [ "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "encodings", "idna", "motor", "packaging", "raven", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] setup(name='virtool', executables=executables, options=options)
Include missing numpy modules in build
Include missing numpy modules in build
Python
mit
igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool
from cx_Freeze import setup, Executable # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { - "excludes": [ - "numpy" - ], "bin_includes": [ "libcrypto.so.1.0.0", "libssl.so.1.0.0" + ], + "includes": [ + "numpy", + "numpy.core._methods", + "numpy.lib", + "numpy.lib.format" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "encodings", "idna", "motor", "packaging", "raven", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] setup(name='virtool', executables=executables, options=options)
Include missing numpy modules in build
## Code Before: from cx_Freeze import setup, Executable # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "excludes": [ "numpy" ], "bin_includes": [ "libcrypto.so.1.0.0", "libssl.so.1.0.0" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "encodings", "idna", "motor", "packaging", "raven", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] setup(name='virtool', executables=executables, options=options) ## Instruction: Include missing numpy modules in build ## Code After: from cx_Freeze import setup, Executable # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "bin_includes": [ "libcrypto.so.1.0.0", "libssl.so.1.0.0" ], "includes": [ "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "encodings", "idna", "motor", "packaging", "raven", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] setup(name='virtool', executables=executables, options=options)
49f1715067df0208c79a1af2e73d6aa314b96bef
django_su/utils.py
django_su/utils.py
import warnings import collections from django.conf import settings from django.utils.module_loading import import_string def su_login_callback(user): if hasattr(settings, 'SU_LOGIN'): warnings.warn( "SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK", DeprecationWarning, ) func = getattr(settings, 'SU_LOGIN_CALLBACK', None) if func is not None: if not isinstance(func, collections.Callable): func = import_string(func) return func(user) return user.has_perm('auth.change_user') def custom_login_action(request, user): func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None) if func is None: return False if not isinstance(func, collections.Callable): func = import_string(func) func(request, user) return True
import warnings from collections.abc import Callable from django.conf import settings from django.utils.module_loading import import_string def su_login_callback(user): if hasattr(settings, 'SU_LOGIN'): warnings.warn( "SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK", DeprecationWarning, ) func = getattr(settings, 'SU_LOGIN_CALLBACK', None) if func is not None: if not isinstance(func, Callable): func = import_string(func) return func(user) return user.has_perm('auth.change_user') def custom_login_action(request, user): func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None) if func is None: return False if not isinstance(func, Callable): func = import_string(func) func(request, user) return True
Update collections.Callable typecheck to collections.abc.Callable
Update collections.Callable typecheck to collections.abc.Callable
Python
mit
adamcharnock/django-su,PetrDlouhy/django-su,PetrDlouhy/django-su,adamcharnock/django-su
import warnings - import collections + from collections.abc import Callable from django.conf import settings from django.utils.module_loading import import_string def su_login_callback(user): if hasattr(settings, 'SU_LOGIN'): warnings.warn( "SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK", DeprecationWarning, ) func = getattr(settings, 'SU_LOGIN_CALLBACK', None) if func is not None: - if not isinstance(func, collections.Callable): + if not isinstance(func, Callable): func = import_string(func) return func(user) return user.has_perm('auth.change_user') def custom_login_action(request, user): func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None) if func is None: return False - if not isinstance(func, collections.Callable): + if not isinstance(func, Callable): func = import_string(func) func(request, user) return True
Update collections.Callable typecheck to collections.abc.Callable
## Code Before: import warnings import collections from django.conf import settings from django.utils.module_loading import import_string def su_login_callback(user): if hasattr(settings, 'SU_LOGIN'): warnings.warn( "SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK", DeprecationWarning, ) func = getattr(settings, 'SU_LOGIN_CALLBACK', None) if func is not None: if not isinstance(func, collections.Callable): func = import_string(func) return func(user) return user.has_perm('auth.change_user') def custom_login_action(request, user): func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None) if func is None: return False if not isinstance(func, collections.Callable): func = import_string(func) func(request, user) return True ## Instruction: Update collections.Callable typecheck to collections.abc.Callable ## Code After: import warnings from collections.abc import Callable from django.conf import settings from django.utils.module_loading import import_string def su_login_callback(user): if hasattr(settings, 'SU_LOGIN'): warnings.warn( "SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK", DeprecationWarning, ) func = getattr(settings, 'SU_LOGIN_CALLBACK', None) if func is not None: if not isinstance(func, Callable): func = import_string(func) return func(user) return user.has_perm('auth.change_user') def custom_login_action(request, user): func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None) if func is None: return False if not isinstance(func, Callable): func = import_string(func) func(request, user) return True
a45c79b10ef5ca6eb4b4e792f2229b2f9b0a7bbf
thinglang/foundation/definitions.py
thinglang/foundation/definitions.py
import itertools from thinglang.lexer.values.identifier import Identifier """ The internal ordering of core types used by the compiler and runtime """ INTERNAL_TYPE_COUNTER = itertools.count(1) # TODO: map dynamically at runtime INTERNAL_TYPE_ORDERING = { Identifier("text"): next(INTERNAL_TYPE_COUNTER), Identifier("number"): next(INTERNAL_TYPE_COUNTER), Identifier("bool"): next(INTERNAL_TYPE_COUNTER), Identifier("list"): next(INTERNAL_TYPE_COUNTER), Identifier("map"): next(INTERNAL_TYPE_COUNTER), Identifier("iterator"): next(INTERNAL_TYPE_COUNTER), Identifier("Console"): next(INTERNAL_TYPE_COUNTER), Identifier("File"): next(INTERNAL_TYPE_COUNTER), Identifier("Directory"): next(INTERNAL_TYPE_COUNTER), Identifier("Time"): next(INTERNAL_TYPE_COUNTER), Identifier("Exception"): next(INTERNAL_TYPE_COUNTER) }
import glob import os from thinglang.lexer.values.identifier import Identifier """ The internal ordering of core types used by the compiler and runtime """ CURRENT_PATH = os.path.dirname(os.path.abspath(__file__)) SOURCE_PATTERN = os.path.join(CURRENT_PATH, 'source/**/*.thing') def list_types(): for path in glob.glob(SOURCE_PATTERN, recursive=True): name = os.path.basename(path).replace('.thing', '') yield name, path PRIMITIVE_TYPES = [ 'text', 'number' ] INTERNAL_SOURCES = {Identifier(name): path for name, path in list_types()}
Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables
Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
- import itertools + import glob + + import os from thinglang.lexer.values.identifier import Identifier """ The internal ordering of core types used by the compiler and runtime """ - INTERNAL_TYPE_COUNTER = itertools.count(1) + CURRENT_PATH = os.path.dirname(os.path.abspath(__file__)) + SOURCE_PATTERN = os.path.join(CURRENT_PATH, 'source/**/*.thing') - # TODO: map dynamically at runtime + def list_types(): + for path in glob.glob(SOURCE_PATTERN, recursive=True): + name = os.path.basename(path).replace('.thing', '') + yield name, path - INTERNAL_TYPE_ORDERING = { - Identifier("text"): next(INTERNAL_TYPE_COUNTER), - Identifier("number"): next(INTERNAL_TYPE_COUNTER), - Identifier("bool"): next(INTERNAL_TYPE_COUNTER), - Identifier("list"): next(INTERNAL_TYPE_COUNTER), - Identifier("map"): next(INTERNAL_TYPE_COUNTER), - Identifier("iterator"): next(INTERNAL_TYPE_COUNTER), - Identifier("Console"): next(INTERNAL_TYPE_COUNTER), - Identifier("File"): next(INTERNAL_TYPE_COUNTER), - Identifier("Directory"): next(INTERNAL_TYPE_COUNTER), - Identifier("Time"): next(INTERNAL_TYPE_COUNTER), - Identifier("Exception"): next(INTERNAL_TYPE_COUNTER) - } + + PRIMITIVE_TYPES = [ + 'text', + 'number' + ] + + INTERNAL_SOURCES = {Identifier(name): path for name, path in list_types()}
Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables
## Code Before: import itertools from thinglang.lexer.values.identifier import Identifier """ The internal ordering of core types used by the compiler and runtime """ INTERNAL_TYPE_COUNTER = itertools.count(1) # TODO: map dynamically at runtime INTERNAL_TYPE_ORDERING = { Identifier("text"): next(INTERNAL_TYPE_COUNTER), Identifier("number"): next(INTERNAL_TYPE_COUNTER), Identifier("bool"): next(INTERNAL_TYPE_COUNTER), Identifier("list"): next(INTERNAL_TYPE_COUNTER), Identifier("map"): next(INTERNAL_TYPE_COUNTER), Identifier("iterator"): next(INTERNAL_TYPE_COUNTER), Identifier("Console"): next(INTERNAL_TYPE_COUNTER), Identifier("File"): next(INTERNAL_TYPE_COUNTER), Identifier("Directory"): next(INTERNAL_TYPE_COUNTER), Identifier("Time"): next(INTERNAL_TYPE_COUNTER), Identifier("Exception"): next(INTERNAL_TYPE_COUNTER) } ## Instruction: Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables ## Code After: import glob import os from thinglang.lexer.values.identifier import Identifier """ The internal ordering of core types used by the compiler and runtime """ CURRENT_PATH = os.path.dirname(os.path.abspath(__file__)) SOURCE_PATTERN = os.path.join(CURRENT_PATH, 'source/**/*.thing') def list_types(): for path in glob.glob(SOURCE_PATTERN, recursive=True): name = os.path.basename(path).replace('.thing', '') yield name, path PRIMITIVE_TYPES = [ 'text', 'number' ] INTERNAL_SOURCES = {Identifier(name): path for name, path in list_types()}
3b2fae7875d89adb8537b75c7e9b48a8663a9d4f
src/rnaseq_lib/web/synapse.py
src/rnaseq_lib/web/synapse.py
import os from synapseclient import Synapse, File expression = 'syn11311347' metadata = 'syn11311931' def upload_file(file_path, login, parent, description=None): """ Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS :param str file_path: Path to file :param str login: Login (usually an email address) :param str parent: Parent Synapse ID (example: syn12312415) where file will be placed :param str description: Optional description to add """ description = '' if None else description f = File(file_path, description=description, parent=parent) assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable' syn = Synapse() syn.login(login, os.environ['SYNAPSE_PASS']) syn.store(f)
import os from synapseclient import Synapse, File expression = 'syn11311347' metadata = 'syn11311931' def upload_file(file_path, login, parent, description=None): """ Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS :param str file_path: Path to file :param str login: Login (usually an email address) :param str parent: Parent Synapse ID (example: syn12312415) where file will be placed :param str description: Optional description to add """ description = '' if None else description f = File(file_path, description=description, parent=parent) syn = _syn_login(login) syn.store(f) def download_file(synid, login, download_location='.'): """ Synapse ID of file to download :param str synid: Synapse ID :param str login: Synapse ID :param str download_location: Download location for file """ syn = _syn_login(login) syn.get(synid, downloadLocation=download_location) def _syn_login(login): """ Login to synapse. Set environment variable SYNAPSE_PASS to the password for `login` :param str login: :return: Synapse instance :rtype: instance """ assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable' syn = Synapse() syn.login(login, os.environ['SYNAPSE_PASS']) return syn
Add download and login functions
Add download and login functions
Python
mit
jvivian/rnaseq-lib,jvivian/rnaseq-lib
import os from synapseclient import Synapse, File expression = 'syn11311347' metadata = 'syn11311931' def upload_file(file_path, login, parent, description=None): """ Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS :param str file_path: Path to file :param str login: Login (usually an email address) :param str parent: Parent Synapse ID (example: syn12312415) where file will be placed :param str description: Optional description to add """ - description = '' if None else description f = File(file_path, description=description, parent=parent) + syn = _syn_login(login) + syn.store(f) + + + def download_file(synid, login, download_location='.'): + """ + Synapse ID of file to download + + :param str synid: Synapse ID + :param str login: Synapse ID + :param str download_location: Download location for file + """ + syn = _syn_login(login) + syn.get(synid, downloadLocation=download_location) + + + def _syn_login(login): + """ + Login to synapse. Set environment variable SYNAPSE_PASS to the password for `login` + + :param str login: + :return: Synapse instance + :rtype: instance + """ assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable' - syn = Synapse() syn.login(login, os.environ['SYNAPSE_PASS']) - syn.store(f) + return syn
Add download and login functions
## Code Before: import os from synapseclient import Synapse, File expression = 'syn11311347' metadata = 'syn11311931' def upload_file(file_path, login, parent, description=None): """ Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS :param str file_path: Path to file :param str login: Login (usually an email address) :param str parent: Parent Synapse ID (example: syn12312415) where file will be placed :param str description: Optional description to add """ description = '' if None else description f = File(file_path, description=description, parent=parent) assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable' syn = Synapse() syn.login(login, os.environ['SYNAPSE_PASS']) syn.store(f) ## Instruction: Add download and login functions ## Code After: import os from synapseclient import Synapse, File expression = 'syn11311347' metadata = 'syn11311931' def upload_file(file_path, login, parent, description=None): """ Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS :param str file_path: Path to file :param str login: Login (usually an email address) :param str parent: Parent Synapse ID (example: syn12312415) where file will be placed :param str description: Optional description to add """ description = '' if None else description f = File(file_path, description=description, parent=parent) syn = _syn_login(login) syn.store(f) def download_file(synid, login, download_location='.'): """ Synapse ID of file to download :param str synid: Synapse ID :param str login: Synapse ID :param str download_location: Download location for file """ syn = _syn_login(login) syn.get(synid, downloadLocation=download_location) def _syn_login(login): """ Login to synapse. Set environment variable SYNAPSE_PASS to the password for `login` :param str login: :return: Synapse instance :rtype: instance """ assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable' syn = Synapse() syn.login(login, os.environ['SYNAPSE_PASS']) return syn
b7335f5c011d9fad3570a097fb1165cc6fbd3cef
src/python/grpcio_tests/tests/unit/_logging_test.py
src/python/grpcio_tests/tests/unit/_logging_test.py
"""Test of gRPC Python's interaction with the python logging module""" import unittest import six import grpc import logging class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) if __name__ == '__main__': unittest.main(verbosity=2)
"""Test of gRPC Python's interaction with the python logging module""" import unittest import six from six.moves import reload_module import logging import grpc import functools import sys class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) def test_handler_found(self): old_stderr = sys.stderr sys.stderr = six.StringIO() try: reload_module(logging) logging.basicConfig() reload_module(grpc) self.assertFalse("No handlers could be found" in sys.stderr.getvalue()) finally: sys.stderr = old_stderr reload_module(logging) if __name__ == '__main__': unittest.main(verbosity=2)
Add test for 'No handlers could be found' problem
Add test for 'No handlers could be found' problem
Python
apache-2.0
mehrdada/grpc,sreecha/grpc,stanley-cheung/grpc,vjpai/grpc,mehrdada/grpc,muxi/grpc,pszemus/grpc,stanley-cheung/grpc,jtattermusch/grpc,donnadionne/grpc,grpc/grpc,mehrdada/grpc,pszemus/grpc,ctiller/grpc,nicolasnoble/grpc,firebase/grpc,donnadionne/grpc,ctiller/grpc,jtattermusch/grpc,donnadionne/grpc,vjpai/grpc,muxi/grpc,donnadionne/grpc,grpc/grpc,vjpai/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,stanley-cheung/grpc,ctiller/grpc,jboeuf/grpc,donnadionne/grpc,ejona86/grpc,jboeuf/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,ctiller/grpc,nicolasnoble/grpc,grpc/grpc,pszemus/grpc,stanley-cheung/grpc,sreecha/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,nicolasnoble/grpc,nicolasnoble/grpc,nicolasnoble/grpc,grpc/grpc,pszemus/grpc,carl-mastrangelo/grpc,mehrdada/grpc,nicolasnoble/grpc,grpc/grpc,jtattermusch/grpc,pszemus/grpc,muxi/grpc,carl-mastrangelo/grpc,ctiller/grpc,vjpai/grpc,grpc/grpc,ctiller/grpc,jtattermusch/grpc,sreecha/grpc,vjpai/grpc,firebase/grpc,donnadionne/grpc,sreecha/grpc,donnadionne/grpc,muxi/grpc,grpc/grpc,muxi/grpc,sreecha/grpc,pszemus/grpc,vjpai/grpc,firebase/grpc,grpc/grpc,jboeuf/grpc,jboeuf/grpc,carl-mastrangelo/grpc,firebase/grpc,ejona86/grpc,pszemus/grpc,ejona86/grpc,stanley-cheung/grpc,stanley-cheung/grpc,ejona86/grpc,vjpai/grpc,ejona86/grpc,vjpai/grpc,vjpai/grpc,mehrdada/grpc,pszemus/grpc,muxi/grpc,jtattermusch/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,mehrdada/grpc,ctiller/grpc,grpc/grpc,ejona86/grpc,pszemus/grpc,jtattermusch/grpc,firebase/grpc,ejona86/grpc,firebase/grpc,nicolasnoble/grpc,firebase/grpc,ejona86/grpc,nicolasnoble/grpc,mehrdada/grpc,firebase/grpc,donnadionne/grpc,stanley-cheung/grpc,pszemus/grpc,jboeuf/grpc,donnadionne/grpc,vjpai/grpc,donnadionne/grpc,mehrdada/grpc,ctiller/grpc,muxi/grpc,vjpai/grpc,pszemus/grpc,stanley-cheung/grpc,jboeuf/grpc,mehrdada/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,carl-mastrangelo/grpc,mehrdada/grpc,muxi/grpc,jboeuf/grpc,ctiller/grpc,mehrdada/grpc,nicolasnoble/grpc,carl-mastrangelo/grpc,ejona86/grpc,ejona86/grpc,mehrdada/grpc,muxi/grpc,muxi/grpc,pszemus/grpc,donnadionne/grpc,nicolasnoble/grpc,sreecha/grpc,jboeuf/grpc,sreecha/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,donnadionne/grpc,ctiller/grpc,firebase/grpc,vjpai/grpc,carl-mastrangelo/grpc,jboeuf/grpc,firebase/grpc,jtattermusch/grpc,jtattermusch/grpc,muxi/grpc,grpc/grpc,sreecha/grpc,sreecha/grpc,ejona86/grpc,grpc/grpc,sreecha/grpc,stanley-cheung/grpc,firebase/grpc,muxi/grpc,stanley-cheung/grpc,jboeuf/grpc,jboeuf/grpc,sreecha/grpc,nicolasnoble/grpc,grpc/grpc,firebase/grpc,sreecha/grpc,ejona86/grpc,nicolasnoble/grpc,jboeuf/grpc
"""Test of gRPC Python's interaction with the python logging module""" import unittest import six + from six.moves import reload_module + import logging import grpc - import logging - + import functools + import sys class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) + def test_handler_found(self): + old_stderr = sys.stderr + sys.stderr = six.StringIO() + try: + reload_module(logging) + logging.basicConfig() + reload_module(grpc) + self.assertFalse("No handlers could be found" in sys.stderr.getvalue()) + finally: + sys.stderr = old_stderr + reload_module(logging) if __name__ == '__main__': unittest.main(verbosity=2)
Add test for 'No handlers could be found' problem
## Code Before: """Test of gRPC Python's interaction with the python logging module""" import unittest import six import grpc import logging class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) if __name__ == '__main__': unittest.main(verbosity=2) ## Instruction: Add test for 'No handlers could be found' problem ## Code After: """Test of gRPC Python's interaction with the python logging module""" import unittest import six from six.moves import reload_module import logging import grpc import functools import sys class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) def test_handler_found(self): old_stderr = sys.stderr sys.stderr = six.StringIO() try: reload_module(logging) logging.basicConfig() reload_module(grpc) self.assertFalse("No handlers could be found" in sys.stderr.getvalue()) finally: sys.stderr = old_stderr reload_module(logging) if __name__ == '__main__': unittest.main(verbosity=2)
62d9fdfe0ad3fc37286aa19a87e2890aaf90f639
tasks/check_rd2_enablement.py
tasks/check_rd2_enablement.py
import simple_salesforce from cumulusci.tasks.salesforce import BaseSalesforceApiTask class is_rd2_enabled(BaseSalesforceApiTask): def _run_task(self): try: settings = self.sf.query( "SELECT IsRecurringDonations2Enabled__c " "FROM npe03__Recurring_Donations_Settings__c " "WHERE SetupOwnerId IN (SELECT Id FROM Organization)" ) except simple_salesforce.exceptions.SalesforceMalformedRequest: # The field does not exist in the target org, meaning it's # pre-RD2 self.return_values = False return if settings.get("records"): if settings["records"][0]["IsRecurringDonations2Enabled__c"]: self.return_values = True self.return_values = False
import simple_salesforce from cumulusci.tasks.salesforce import BaseSalesforceApiTask class is_rd2_enabled(BaseSalesforceApiTask): def _run_task(self): try: settings = self.sf.query( "SELECT IsRecurringDonations2Enabled__c " "FROM npe03__Recurring_Donations_Settings__c " "WHERE SetupOwnerId IN (SELECT Id FROM Organization)" ) except simple_salesforce.exceptions.SalesforceMalformedRequest: # The field does not exist in the target org, meaning it's # pre-RD2 self.return_values = False return if settings.get("records"): if settings["records"][0]["IsRecurringDonations2Enabled__c"]: self.return_values = True return self.return_values = False
Correct bug in preflight check
Correct bug in preflight check
Python
bsd-3-clause
SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus
import simple_salesforce from cumulusci.tasks.salesforce import BaseSalesforceApiTask class is_rd2_enabled(BaseSalesforceApiTask): def _run_task(self): try: settings = self.sf.query( "SELECT IsRecurringDonations2Enabled__c " "FROM npe03__Recurring_Donations_Settings__c " "WHERE SetupOwnerId IN (SELECT Id FROM Organization)" ) except simple_salesforce.exceptions.SalesforceMalformedRequest: # The field does not exist in the target org, meaning it's # pre-RD2 self.return_values = False return if settings.get("records"): if settings["records"][0]["IsRecurringDonations2Enabled__c"]: self.return_values = True + return self.return_values = False
Correct bug in preflight check
## Code Before: import simple_salesforce from cumulusci.tasks.salesforce import BaseSalesforceApiTask class is_rd2_enabled(BaseSalesforceApiTask): def _run_task(self): try: settings = self.sf.query( "SELECT IsRecurringDonations2Enabled__c " "FROM npe03__Recurring_Donations_Settings__c " "WHERE SetupOwnerId IN (SELECT Id FROM Organization)" ) except simple_salesforce.exceptions.SalesforceMalformedRequest: # The field does not exist in the target org, meaning it's # pre-RD2 self.return_values = False return if settings.get("records"): if settings["records"][0]["IsRecurringDonations2Enabled__c"]: self.return_values = True self.return_values = False ## Instruction: Correct bug in preflight check ## Code After: import simple_salesforce from cumulusci.tasks.salesforce import BaseSalesforceApiTask class is_rd2_enabled(BaseSalesforceApiTask): def _run_task(self): try: settings = self.sf.query( "SELECT IsRecurringDonations2Enabled__c " "FROM npe03__Recurring_Donations_Settings__c " "WHERE SetupOwnerId IN (SELECT Id FROM Organization)" ) except simple_salesforce.exceptions.SalesforceMalformedRequest: # The field does not exist in the target org, meaning it's # pre-RD2 self.return_values = False return if settings.get("records"): if settings["records"][0]["IsRecurringDonations2Enabled__c"]: self.return_values = True return self.return_values = False
ea17a76c4ada65dac9e909b930c938a24ddb99b2
tests/formatter/test_csver.py
tests/formatter/test_csver.py
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect) def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Fix no header test for csv formatter
Fix no header test for csv formatter
Python
mit
eiri/echolalia-prototype
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" + self.assertEqual(result, expect) def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Fix no header test for csv formatter
## Code Before: import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect) ## Instruction: Fix no header test for csv formatter ## Code After: import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect) def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
3a5432e14c18852758afdf92b913c93906808e3e
cinder/db/sqlalchemy/migrate_repo/versions/115_add_shared_targets_to_volumes.py
cinder/db/sqlalchemy/migrate_repo/versions/115_add_shared_targets_to_volumes.py
from sqlalchemy import Boolean, Column, MetaData, Table def upgrade(migrate_engine): """Add shared_targets column to Volumes.""" meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) # NOTE(jdg): We use a default of True because it's harmless for a device # that does NOT use shared_targets to be treated as if it does shared_targets = Column('shared_targets', Boolean, default=True) volumes.create_column(shared_targets)
from sqlalchemy import Boolean, Column, MetaData, Table def upgrade(migrate_engine): """Add shared_targets column to Volumes.""" meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) # NOTE(jdg): We use a default of True because it's harmless for a device # that does NOT use shared_targets to be treated as if it does if not hasattr(volumes.c, 'shared_targets'): volumes.create_column(Column('shared_targets', Boolean, default=True))
Add 'shared_targets' only when it doesn't exist
Add 'shared_targets' only when it doesn't exist Add existence check before actually create it. Change-Id: I96946f736d7263f80f7ad24f8cbbc9a09eb3cc63
Python
apache-2.0
phenoxim/cinder,Datera/cinder,mahak/cinder,openstack/cinder,j-griffith/cinder,openstack/cinder,mahak/cinder,j-griffith/cinder,Datera/cinder,phenoxim/cinder
from sqlalchemy import Boolean, Column, MetaData, Table def upgrade(migrate_engine): """Add shared_targets column to Volumes.""" meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) # NOTE(jdg): We use a default of True because it's harmless for a device # that does NOT use shared_targets to be treated as if it does + if not hasattr(volumes.c, 'shared_targets'): + volumes.create_column(Column('shared_targets', Boolean, default=True)) - shared_targets = Column('shared_targets', - Boolean, - default=True) - volumes.create_column(shared_targets)
Add 'shared_targets' only when it doesn't exist
## Code Before: from sqlalchemy import Boolean, Column, MetaData, Table def upgrade(migrate_engine): """Add shared_targets column to Volumes.""" meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) # NOTE(jdg): We use a default of True because it's harmless for a device # that does NOT use shared_targets to be treated as if it does shared_targets = Column('shared_targets', Boolean, default=True) volumes.create_column(shared_targets) ## Instruction: Add 'shared_targets' only when it doesn't exist ## Code After: from sqlalchemy import Boolean, Column, MetaData, Table def upgrade(migrate_engine): """Add shared_targets column to Volumes.""" meta = MetaData() meta.bind = migrate_engine volumes = Table('volumes', meta, autoload=True) # NOTE(jdg): We use a default of True because it's harmless for a device # that does NOT use shared_targets to be treated as if it does if not hasattr(volumes.c, 'shared_targets'): volumes.create_column(Column('shared_targets', Boolean, default=True))
052042e2f48b7936a6057c18a128f497d5e5b1a4
folium/__init__.py
folium/__init__.py
from __future__ import absolute_import __version__ = '0.2.0.dev' from folium.folium import Map, initialize_notebook
from __future__ import absolute_import __version__ = '0.2.0.dev' from folium.folium import Map, initialize_notebook, CircleMarker from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle, ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine, RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
Make features accessible from root
Make features accessible from root
Python
mit
QuLogic/folium,talespaiva/folium,andrewgiessel/folium,themiurgo/folium,shankari/folium,python-visualization/folium,talespaiva/folium,QuLogic/folium,BibMartin/folium,ocefpaf/folium,themiurgo/folium,talespaiva/folium,andrewgiessel/folium,BibMartin/folium,ocefpaf/folium,python-visualization/folium,shankari/folium,shankari/folium,themiurgo/folium,BibMartin/folium,andrewgiessel/folium,talespaiva/folium,QuLogic/folium
from __future__ import absolute_import __version__ = '0.2.0.dev' - from folium.folium import Map, initialize_notebook + from folium.folium import Map, initialize_notebook, CircleMarker + from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer + + from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle, + ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine, + RegularPolygonMarker, TopoJson, Vega, WmsTileLayer) +
Make features accessible from root
## Code Before: from __future__ import absolute_import __version__ = '0.2.0.dev' from folium.folium import Map, initialize_notebook ## Instruction: Make features accessible from root ## Code After: from __future__ import absolute_import __version__ = '0.2.0.dev' from folium.folium import Map, initialize_notebook, CircleMarker from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle, ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine, RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
0986bbba02a4bb4d2c13835dd91281cce3bb5f10
alembic/versions/174eb928136a_gdpr_restrict_processing.py
alembic/versions/174eb928136a_gdpr_restrict_processing.py
# revision identifiers, used by Alembic. revision = '174eb928136a' down_revision = 'd5b07c8f0893' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('user', sa.Column('restrict', sa.Boolean)) def downgrade(): op.drop_column('user', 'restrict')
# revision identifiers, used by Alembic. revision = '174eb928136a' down_revision = 'd5b07c8f0893' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('user', sa.Column('restrict', sa.Boolean, default=False)) sql = 'update "user" set restrict=false' op.execute(sql) def downgrade(): op.drop_column('user', 'restrict')
Set default to False, and update existing users.
Set default to False, and update existing users.
Python
agpl-3.0
Scifabric/pybossa,PyBossa/pybossa,Scifabric/pybossa,PyBossa/pybossa
# revision identifiers, used by Alembic. revision = '174eb928136a' down_revision = 'd5b07c8f0893' from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('user', sa.Column('restrict', sa.Boolean)) + op.add_column('user', sa.Column('restrict', sa.Boolean, default=False)) + sql = 'update "user" set restrict=false' + op.execute(sql) def downgrade(): op.drop_column('user', 'restrict')
Set default to False, and update existing users.
## Code Before: # revision identifiers, used by Alembic. revision = '174eb928136a' down_revision = 'd5b07c8f0893' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('user', sa.Column('restrict', sa.Boolean)) def downgrade(): op.drop_column('user', 'restrict') ## Instruction: Set default to False, and update existing users. ## Code After: # revision identifiers, used by Alembic. revision = '174eb928136a' down_revision = 'd5b07c8f0893' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('user', sa.Column('restrict', sa.Boolean, default=False)) sql = 'update "user" set restrict=false' op.execute(sql) def downgrade(): op.drop_column('user', 'restrict')
75d435e55e42fefe1c28095dadb9abb56284c1fb
marked/__init__.py
marked/__init__.py
import markgen from bs4 import BeautifulSoup TAGS = { 'p': 'paragraph', 'div': 'paragraph', 'a': 'link', 'strong': 'emphasis', 'em': 'emphasis', 'b': 'emphasis', 'i': 'emphasis', 'u': 'emphasis', 'img': 'image', 'image': 'image', 'blockquote': 'quote', 'pre': 'pre', 'code': 'pre', 'h1': 'header', 'h2': 'header', 'h3': 'header', 'h4': 'header', 'h5': 'header', 'h6': 'header', 'ul': 'ulist', 'ol': 'olist' } def markup_to_markdown(content): soup = BeautifulSoup(content) # Account for HTML snippets and full documents alike contents = soup.body.contents if soup.body is not None else soup.contents return _iterate_over_contents(contents) def _iterate_over_contents(contents): out = u'' for c in contents: if hasattr(c, 'contents'): c = _iterate_over_contents(c.contents) if c.name in TAGS: wrap = getattr(markgen, TAGS[c.name]) c = wrap(c) out += u"\n{0}".format(c) return out
import markgen from bs4 import BeautifulSoup TAGS = { 'p': 'paragraph', 'div': 'paragraph', 'a': 'link', 'strong': 'emphasis', 'em': 'emphasis', 'b': 'emphasis', 'i': 'emphasis', 'u': 'emphasis', 'img': 'image', 'image': 'image', 'blockquote': 'quote', 'pre': 'pre', 'code': 'pre', 'h1': 'header', 'h2': 'header', 'h3': 'header', 'h4': 'header', 'h5': 'header', 'h6': 'header', 'ul': 'ulist', 'ol': 'olist' } def markup_to_markdown(content): soup = BeautifulSoup(content) # Account for HTML snippets and full documents alike contents = soup.body.contents if soup.body is not None else soup.contents return _iterate_over_contents(contents) def _iterate_over_contents(contents): out = u'' for c in contents: if hasattr(c, 'contents'): c.string = _iterate_over_contents(c.contents) if c.name in TAGS: wrap = getattr(markgen, TAGS[c.name]) c = wrap(c.string) out += u"\n{0}".format(c) return out
Use .string so we keep within BS parse tree
Use .string so we keep within BS parse tree
Python
bsd-3-clause
1stvamp/marked
import markgen from bs4 import BeautifulSoup TAGS = { 'p': 'paragraph', 'div': 'paragraph', 'a': 'link', 'strong': 'emphasis', 'em': 'emphasis', 'b': 'emphasis', 'i': 'emphasis', 'u': 'emphasis', 'img': 'image', 'image': 'image', 'blockquote': 'quote', 'pre': 'pre', 'code': 'pre', 'h1': 'header', 'h2': 'header', 'h3': 'header', 'h4': 'header', 'h5': 'header', 'h6': 'header', 'ul': 'ulist', 'ol': 'olist' } def markup_to_markdown(content): soup = BeautifulSoup(content) # Account for HTML snippets and full documents alike contents = soup.body.contents if soup.body is not None else soup.contents return _iterate_over_contents(contents) def _iterate_over_contents(contents): out = u'' for c in contents: if hasattr(c, 'contents'): - c = _iterate_over_contents(c.contents) + c.string = _iterate_over_contents(c.contents) if c.name in TAGS: wrap = getattr(markgen, TAGS[c.name]) - c = wrap(c) + c = wrap(c.string) out += u"\n{0}".format(c) return out
Use .string so we keep within BS parse tree
## Code Before: import markgen from bs4 import BeautifulSoup TAGS = { 'p': 'paragraph', 'div': 'paragraph', 'a': 'link', 'strong': 'emphasis', 'em': 'emphasis', 'b': 'emphasis', 'i': 'emphasis', 'u': 'emphasis', 'img': 'image', 'image': 'image', 'blockquote': 'quote', 'pre': 'pre', 'code': 'pre', 'h1': 'header', 'h2': 'header', 'h3': 'header', 'h4': 'header', 'h5': 'header', 'h6': 'header', 'ul': 'ulist', 'ol': 'olist' } def markup_to_markdown(content): soup = BeautifulSoup(content) # Account for HTML snippets and full documents alike contents = soup.body.contents if soup.body is not None else soup.contents return _iterate_over_contents(contents) def _iterate_over_contents(contents): out = u'' for c in contents: if hasattr(c, 'contents'): c = _iterate_over_contents(c.contents) if c.name in TAGS: wrap = getattr(markgen, TAGS[c.name]) c = wrap(c) out += u"\n{0}".format(c) return out ## Instruction: Use .string so we keep within BS parse tree ## Code After: import markgen from bs4 import BeautifulSoup TAGS = { 'p': 'paragraph', 'div': 'paragraph', 'a': 'link', 'strong': 'emphasis', 'em': 'emphasis', 'b': 'emphasis', 'i': 'emphasis', 'u': 'emphasis', 'img': 'image', 'image': 'image', 'blockquote': 'quote', 'pre': 'pre', 'code': 'pre', 'h1': 'header', 'h2': 'header', 'h3': 'header', 'h4': 'header', 'h5': 'header', 'h6': 'header', 'ul': 'ulist', 'ol': 'olist' } def markup_to_markdown(content): soup = BeautifulSoup(content) # Account for HTML snippets and full documents alike contents = soup.body.contents if soup.body is not None else soup.contents return _iterate_over_contents(contents) def _iterate_over_contents(contents): out = u'' for c in contents: if hasattr(c, 'contents'): c.string = _iterate_over_contents(c.contents) if c.name in TAGS: wrap = getattr(markgen, TAGS[c.name]) c = wrap(c.string) out += u"\n{0}".format(c) return out
6d964e5ce83b8f07de64ef8ed5b531271725d9c4
peering/management/commands/deploy_configurations.py
peering/management/commands/deploy_configurations.py
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): if ix.configuration_template and ix.router: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): # Only deploy config if there are at least a configuration # template, a router and a platform for the router if ix.configuration_template and ix.router and ix.router.platform: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
Check for router platform in auto-deploy script.
Check for router platform in auto-deploy script.
Python
apache-2.0
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): + # Only deploy config if there are at least a configuration + # template, a router and a platform for the router - if ix.configuration_template and ix.router: + if ix.configuration_template and ix.router and ix.router.platform: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
Check for router platform in auto-deploy script.
## Code Before: from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): if ix.configuration_template and ix.router: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed') ## Instruction: Check for router platform in auto-deploy script. ## Code After: from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): # Only deploy config if there are at least a configuration # template, a router and a platform for the router if ix.configuration_template and ix.router and ix.router.platform: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
0983361e6fba5812416d8fb5b695f6b3034bc927
registration/management/commands/cleanupregistration.py
registration/management/commands/cleanupregistration.py
from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users()
from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
Fix deprecated class NoArgsCommand class. Solve the warning RemovedInDjango110Warning: NoArgsCommand class is deprecated and will be removed in Django 1.10. Use BaseCommand instead, which takes no arguments by default.
Python
bsd-3-clause
sergafts/django-registration,timgraham/django-registration,sergafts/django-registration,pando85/django-registration,pando85/django-registration,allo-/django-registration,allo-/django-registration,timgraham/django-registration
- from django.core.management.base import NoArgsCommand + from django.core.management.base import BaseCommand from ...models import RegistrationProfile - class Command(NoArgsCommand): + class Command(BaseCommand): help = "Delete expired user registrations from the database" - def handle_noargs(self, **options): + def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
## Code Before: from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users() ## Instruction: Fix deprecated class NoArgsCommand class. ## Code After: from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
da66b82b4a5d5c0b0bb716b05a8bfd2dae5e2f4c
ookoobah/glutil.py
ookoobah/glutil.py
from contextlib import contextmanager from pyglet.gl import * def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
from contextlib import contextmanager from pyglet.gl import * __all__ = [ 'ptr', 'gl_disable', 'gl_ortho', ] def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
Fix pyglet breackage by controlling exports.
Fix pyglet breackage by controlling exports.
Python
mit
vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah
from contextlib import contextmanager from pyglet.gl import * + + __all__ = [ + 'ptr', + 'gl_disable', + 'gl_ortho', + ] def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
Fix pyglet breackage by controlling exports.
## Code Before: from contextlib import contextmanager from pyglet.gl import * def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW) ## Instruction: Fix pyglet breackage by controlling exports. ## Code After: from contextlib import contextmanager from pyglet.gl import * __all__ = [ 'ptr', 'gl_disable', 'gl_ortho', ] def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
04c8a36c5713e4279f8bf52fa45cdb03de721dbb
example/deploy.py
example/deploy.py
from pyinfra import inventory, state from pyinfra_docker import deploy_docker from pyinfra_etcd import deploy_etcd from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node SUDO = True FAIL_PERCENT = 0 def get_etcd_nodes(): return [ 'http://{0}:2379'.format( etcd_node.fact.network_devices[etcd_node.data.etcd_interface] ['ipv4']['address'], ) for etcd_node in inventory.get_group('etcd_nodes') ] # Install/configure etcd cluster with state.limit('etcd_nodes'): deploy_etcd() # Install/configure the masters (apiserver, controller, scheduler) with state.limit('kubernetes_masters'): deploy_kubernetes_master(etcd_nodes=get_etcd_nodes()) # Install/configure the nodes with state.limit('kubernetes_nodes'): # Install Docker deploy_docker() # Install Kubernetes node components (kubelet, kube-proxy) first_master = inventory.get_group('kubernetes_masters')[0] deploy_kubernetes_node( master_address='http://{0}'.format(( first_master .fact.network_devices[first_master.data.network_interface] ['ipv4']['address'] )), ) # deploy_docker(config={ # # Make Docker use the Vagrant provided interface which has it's own /24 # 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', # })
from pyinfra import inventory, state from pyinfra_docker import deploy_docker from pyinfra_etcd import deploy_etcd from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node SUDO = True FAIL_PERCENT = 0 def get_etcd_nodes(): return [ 'http://{0}:2379'.format( etcd_node.fact.network_devices[etcd_node.data.etcd_interface] ['ipv4']['address'], ) for etcd_node in inventory.get_group('etcd_nodes') ] # Install/configure etcd cluster with state.limit('etcd_nodes'): deploy_etcd() # Install/configure the masters (apiserver, controller, scheduler) with state.limit('kubernetes_masters'): deploy_kubernetes_master(etcd_nodes=get_etcd_nodes()) # Install/configure the nodes with state.limit('kubernetes_nodes'): # Install Docker deploy_docker(config={ # Make Docker use the Vagrant provided interface which has it's own /24 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', }) # Install Kubernetes node components (kubelet, kube-proxy) first_master = inventory.get_group('kubernetes_masters')[0] deploy_kubernetes_node( master_address='http://{0}'.format(( first_master .fact.network_devices[first_master.data.network_interface] ['ipv4']['address'] )), )
Use Docker config pointing at the correct interface/subnect for networking.
Use Docker config pointing at the correct interface/subnect for networking.
Python
mit
EDITD/pyinfra-kubernetes,EDITD/pyinfra-kubernetes
from pyinfra import inventory, state from pyinfra_docker import deploy_docker from pyinfra_etcd import deploy_etcd from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node SUDO = True FAIL_PERCENT = 0 def get_etcd_nodes(): return [ 'http://{0}:2379'.format( etcd_node.fact.network_devices[etcd_node.data.etcd_interface] ['ipv4']['address'], ) for etcd_node in inventory.get_group('etcd_nodes') ] # Install/configure etcd cluster with state.limit('etcd_nodes'): deploy_etcd() # Install/configure the masters (apiserver, controller, scheduler) with state.limit('kubernetes_masters'): deploy_kubernetes_master(etcd_nodes=get_etcd_nodes()) # Install/configure the nodes with state.limit('kubernetes_nodes'): # Install Docker - deploy_docker() + deploy_docker(config={ + # Make Docker use the Vagrant provided interface which has it's own /24 + 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', + }) # Install Kubernetes node components (kubelet, kube-proxy) first_master = inventory.get_group('kubernetes_masters')[0] deploy_kubernetes_node( master_address='http://{0}'.format(( first_master .fact.network_devices[first_master.data.network_interface] ['ipv4']['address'] )), ) - # deploy_docker(config={ - # # Make Docker use the Vagrant provided interface which has it's own /24 - # 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', - # }) -
Use Docker config pointing at the correct interface/subnect for networking.
## Code Before: from pyinfra import inventory, state from pyinfra_docker import deploy_docker from pyinfra_etcd import deploy_etcd from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node SUDO = True FAIL_PERCENT = 0 def get_etcd_nodes(): return [ 'http://{0}:2379'.format( etcd_node.fact.network_devices[etcd_node.data.etcd_interface] ['ipv4']['address'], ) for etcd_node in inventory.get_group('etcd_nodes') ] # Install/configure etcd cluster with state.limit('etcd_nodes'): deploy_etcd() # Install/configure the masters (apiserver, controller, scheduler) with state.limit('kubernetes_masters'): deploy_kubernetes_master(etcd_nodes=get_etcd_nodes()) # Install/configure the nodes with state.limit('kubernetes_nodes'): # Install Docker deploy_docker() # Install Kubernetes node components (kubelet, kube-proxy) first_master = inventory.get_group('kubernetes_masters')[0] deploy_kubernetes_node( master_address='http://{0}'.format(( first_master .fact.network_devices[first_master.data.network_interface] ['ipv4']['address'] )), ) # deploy_docker(config={ # # Make Docker use the Vagrant provided interface which has it's own /24 # 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', # }) ## Instruction: Use Docker config pointing at the correct interface/subnect for networking. ## Code After: from pyinfra import inventory, state from pyinfra_docker import deploy_docker from pyinfra_etcd import deploy_etcd from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node SUDO = True FAIL_PERCENT = 0 def get_etcd_nodes(): return [ 'http://{0}:2379'.format( etcd_node.fact.network_devices[etcd_node.data.etcd_interface] ['ipv4']['address'], ) for etcd_node in inventory.get_group('etcd_nodes') ] # Install/configure etcd cluster with state.limit('etcd_nodes'): deploy_etcd() # Install/configure the masters (apiserver, controller, scheduler) with state.limit('kubernetes_masters'): deploy_kubernetes_master(etcd_nodes=get_etcd_nodes()) # Install/configure the nodes with state.limit('kubernetes_nodes'): # Install Docker deploy_docker(config={ # Make Docker use the Vagrant provided interface which has it's own /24 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}', }) # Install Kubernetes node components (kubelet, kube-proxy) first_master = inventory.get_group('kubernetes_masters')[0] deploy_kubernetes_node( master_address='http://{0}'.format(( first_master .fact.network_devices[first_master.data.network_interface] ['ipv4']['address'] )), )
4714f803b22eda26eb2fc867c1d9e2c7230bdd11
pythonforandroid/recipes/pysdl2/__init__.py
pythonforandroid/recipes/pysdl2/__init__.py
from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.3' url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.6' url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
Fix outdated PySDL2 version and non-PyPI install source
Fix outdated PySDL2 version and non-PyPI install source
Python
mit
kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,PKRoma/python-for-android,germn/python-for-android,kivy/python-for-android,rnixx/python-for-android,rnixx/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,kivy/python-for-android,germn/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kivy/python-for-android,kronenpj/python-for-android,germn/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android
from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): - version = '0.9.3' + version = '0.9.6' - url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' + url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
Fix outdated PySDL2 version and non-PyPI install source
## Code Before: from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.3' url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe() ## Instruction: Fix outdated PySDL2 version and non-PyPI install source ## Code After: from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.6' url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
32cc988e81bbbecf09f7e7a801e92c6cfc281e75
docs/autogen_config.py
docs/autogen_config.py
from os.path import join, dirname, abspath from IPython.terminal.ipapp import TerminalIPythonApp from ipykernel.kernelapp import IPKernelApp here = abspath(dirname(__file__)) options = join(here, 'source', 'config', 'options') generated = join(options, 'generated.rst') def write_doc(name, title, app, preamble=None): filename = '%s.rst' % name with open(join(options, filename), 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(app.document_config_options()) with open(generated, 'a') as f: f.write(filename + '\n') if __name__ == '__main__': # create empty file with open(generated, 'w'): pass write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp()) write_doc('kernel', 'IPython kernel options', IPKernelApp(), preamble=("These options can be used in :file:`ipython_kernel_config.py`. " "The kernel also respects any options in `ipython_config.py`"), )
from os.path import join, dirname, abspath from IPython.terminal.ipapp import TerminalIPythonApp from ipykernel.kernelapp import IPKernelApp here = abspath(dirname(__file__)) options = join(here, 'source', 'config', 'options') def write_doc(name, title, app, preamble=None): filename = '%s.rst' % name with open(join(options, filename), 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(app.document_config_options()) if __name__ == '__main__': write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp()) write_doc('kernel', 'IPython kernel options', IPKernelApp(), preamble=("These options can be used in :file:`ipython_kernel_config.py`. " "The kernel also respects any options in `ipython_config.py`"), )
Remove generation of unnecessary generated.rst file
Remove generation of unnecessary generated.rst file
Python
bsd-3-clause
ipython/ipython,ipython/ipython
from os.path import join, dirname, abspath from IPython.terminal.ipapp import TerminalIPythonApp from ipykernel.kernelapp import IPKernelApp here = abspath(dirname(__file__)) options = join(here, 'source', 'config', 'options') - generated = join(options, 'generated.rst') def write_doc(name, title, app, preamble=None): filename = '%s.rst' % name with open(join(options, filename), 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(app.document_config_options()) - with open(generated, 'a') as f: - f.write(filename + '\n') if __name__ == '__main__': - # create empty file - with open(generated, 'w'): - pass write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp()) write_doc('kernel', 'IPython kernel options', IPKernelApp(), preamble=("These options can be used in :file:`ipython_kernel_config.py`. " "The kernel also respects any options in `ipython_config.py`"), ) -
Remove generation of unnecessary generated.rst file
## Code Before: from os.path import join, dirname, abspath from IPython.terminal.ipapp import TerminalIPythonApp from ipykernel.kernelapp import IPKernelApp here = abspath(dirname(__file__)) options = join(here, 'source', 'config', 'options') generated = join(options, 'generated.rst') def write_doc(name, title, app, preamble=None): filename = '%s.rst' % name with open(join(options, filename), 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(app.document_config_options()) with open(generated, 'a') as f: f.write(filename + '\n') if __name__ == '__main__': # create empty file with open(generated, 'w'): pass write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp()) write_doc('kernel', 'IPython kernel options', IPKernelApp(), preamble=("These options can be used in :file:`ipython_kernel_config.py`. " "The kernel also respects any options in `ipython_config.py`"), ) ## Instruction: Remove generation of unnecessary generated.rst file ## Code After: from os.path import join, dirname, abspath from IPython.terminal.ipapp import TerminalIPythonApp from ipykernel.kernelapp import IPKernelApp here = abspath(dirname(__file__)) options = join(here, 'source', 'config', 'options') def write_doc(name, title, app, preamble=None): filename = '%s.rst' % name with open(join(options, filename), 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(app.document_config_options()) if __name__ == '__main__': write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp()) write_doc('kernel', 'IPython kernel options', IPKernelApp(), preamble=("These options can be used in :file:`ipython_kernel_config.py`. " "The kernel also respects any options in `ipython_config.py`"), )
b43b555a7803c6afd50fe5992f455cc5d1ad5d86
stonemason/service/tileserver/health/views.py
stonemason/service/tileserver/health/views.py
__author__ = 'ray' __date__ = '3/2/15' from flask import make_response def health_check(): """Return a dummy response""" response = make_response() response.headers['Content-Type'] = 'text/plain' response.headers['Cache-Control'] = 'public, max-age=0' return response
__author__ = 'ray' __date__ = '3/2/15' from flask import make_response import stonemason import sys import platform VERSION_STRING = '''stonemason:%s Python: %s Platform: %s''' % (stonemason.__version__, sys.version, platform.version()) del stonemason, sys, platform def health_check(): """Return a dummy response""" response = make_response(VERSION_STRING) response.headers['Content-Type'] = 'text/plain' response.headers['Cache-Control'] = 'public, max-age=0' return response
Return sys/platform version in tileserver health check
FEATURE: Return sys/platform version in tileserver health check
Python
mit
Kotaimen/stonemason,Kotaimen/stonemason
__author__ = 'ray' __date__ = '3/2/15' from flask import make_response + import stonemason + import sys + import platform + + VERSION_STRING = '''stonemason:%s + + Python: %s + + Platform: %s''' % (stonemason.__version__, + sys.version, + platform.version()) + + del stonemason, sys, platform def health_check(): """Return a dummy response""" - response = make_response() + response = make_response(VERSION_STRING) response.headers['Content-Type'] = 'text/plain' response.headers['Cache-Control'] = 'public, max-age=0' + return response
Return sys/platform version in tileserver health check
## Code Before: __author__ = 'ray' __date__ = '3/2/15' from flask import make_response def health_check(): """Return a dummy response""" response = make_response() response.headers['Content-Type'] = 'text/plain' response.headers['Cache-Control'] = 'public, max-age=0' return response ## Instruction: Return sys/platform version in tileserver health check ## Code After: __author__ = 'ray' __date__ = '3/2/15' from flask import make_response import stonemason import sys import platform VERSION_STRING = '''stonemason:%s Python: %s Platform: %s''' % (stonemason.__version__, sys.version, platform.version()) del stonemason, sys, platform def health_check(): """Return a dummy response""" response = make_response(VERSION_STRING) response.headers['Content-Type'] = 'text/plain' response.headers['Cache-Control'] = 'public, max-age=0' return response
de6ac0596b58fac2efc547fe6f81a48f4a06f527
tests/grammar_creation_test/TerminalAdding.py
tests/grammar_creation_test/TerminalAdding.py
from unittest import TestCase, main from grammpy import * class TerminalAddingTest(TestCase): pass if __name__ == '__main__': main()
from unittest import TestCase, main from grammpy import * class TerminalAddingTest(TestCase): def test_shouldAddOneTerminal(self): g = Grammar(terminals=['asdf']) self.assertTrue(g.have_term('asdf')) self.assertFalse(g.have_term('a')) def test_shouldAddMoreTerminals(self): g = Grammar(terminals=[0, 1, 2]) self.assertTrue(g.have_term([0, 1, 2])) self.assertFalse(g.have_term('a')) self.assertFalse(g.have_term('asdf')) self.assertFalse(g.have_term(3)) if __name__ == '__main__': main()
Add tests of terminal adding when grammar is create
Add tests of terminal adding when grammar is create
Python
mit
PatrikValkovic/grammpy
from unittest import TestCase, main from grammpy import * class TerminalAddingTest(TestCase): - pass + def test_shouldAddOneTerminal(self): + g = Grammar(terminals=['asdf']) + self.assertTrue(g.have_term('asdf')) + self.assertFalse(g.have_term('a')) + + def test_shouldAddMoreTerminals(self): + g = Grammar(terminals=[0, 1, 2]) + self.assertTrue(g.have_term([0, 1, 2])) + self.assertFalse(g.have_term('a')) + self.assertFalse(g.have_term('asdf')) + self.assertFalse(g.have_term(3)) if __name__ == '__main__': main() +
Add tests of terminal adding when grammar is create
## Code Before: from unittest import TestCase, main from grammpy import * class TerminalAddingTest(TestCase): pass if __name__ == '__main__': main() ## Instruction: Add tests of terminal adding when grammar is create ## Code After: from unittest import TestCase, main from grammpy import * class TerminalAddingTest(TestCase): def test_shouldAddOneTerminal(self): g = Grammar(terminals=['asdf']) self.assertTrue(g.have_term('asdf')) self.assertFalse(g.have_term('a')) def test_shouldAddMoreTerminals(self): g = Grammar(terminals=[0, 1, 2]) self.assertTrue(g.have_term([0, 1, 2])) self.assertFalse(g.have_term('a')) self.assertFalse(g.have_term('asdf')) self.assertFalse(g.have_term(3)) if __name__ == '__main__': main()
7755dda1449f6264d7d7fe57dc776c731ab22d84
src/satosa/micro_services/processors/scope_processor.py
src/satosa/micro_services/processors/scope_processor.py
from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes value = attributes.get(attribute, [None])[0] attributes[attribute][0] = value + '@' + scope
from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes values = attributes.get(attribute, []) if not isinstance(values, list): values = [values] if values: new_values=[] for value in values: new_values.append(value + '@' + scope) attributes[attribute] = new_values
Allow scope processor to handle multivalued attributes
Allow scope processor to handle multivalued attributes
Python
apache-2.0
its-dirg/SATOSA,irtnog/SATOSA,SUNET/SATOSA,SUNET/SATOSA,irtnog/SATOSA
from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes - value = attributes.get(attribute, [None])[0] + values = attributes.get(attribute, []) + if not isinstance(values, list): + values = [values] + if values: + new_values=[] + for value in values: + new_values.append(value + '@' + scope) - attributes[attribute][0] = value + '@' + scope + attributes[attribute] = new_values
Allow scope processor to handle multivalued attributes
## Code Before: from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes value = attributes.get(attribute, [None])[0] attributes[attribute][0] = value + '@' + scope ## Instruction: Allow scope processor to handle multivalued attributes ## Code After: from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes values = attributes.get(attribute, []) if not isinstance(values, list): values = [values] if values: new_values=[] for value in values: new_values.append(value + '@' + scope) attributes[attribute] = new_values
adf3a500e8ab8115520daa16bc008faeec7cfca9
gitfs/views/view.py
gitfs/views/view.py
import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented from gitfs.filesystems.passthrough import PassthroughFuse class View(PassthroughFuse): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented class View(object): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
Make View inherit from objects instead of PassthroughFuse
Make View inherit from objects instead of PassthroughFuse
Python
apache-2.0
PressLabs/gitfs,PressLabs/gitfs,rowhit/gitfs,bussiere/gitfs,ksmaheshkumar/gitfs
import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented - from gitfs.filesystems.passthrough import PassthroughFuse - class View(PassthroughFuse): + class View(object): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) - def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
Make View inherit from objects instead of PassthroughFuse
## Code Before: import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented from gitfs.filesystems.passthrough import PassthroughFuse class View(PassthroughFuse): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented ## Instruction: Make View inherit from objects instead of PassthroughFuse ## Code After: import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented class View(object): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
ee28fdc66fbb0f91821ff18ff219791bf5de8f4d
corehq/apps/fixtures/tasks.py
corehq/apps/fixtures/tasks.py
from __future__ import absolute_import from __future__ import unicode_literals from corehq.apps.fixtures.upload import upload_fixture_file from soil import DownloadBase from celery.task import task @task(serializer='pickle') def fixture_upload_async(domain, download_id, replace): task = fixture_upload_async DownloadBase.set_progress(task, 0, 100) download_ref = DownloadBase.get(download_id) result = upload_fixture_file(domain, download_ref.get_filename(), replace, task) DownloadBase.set_progress(task, 100, 100) return { 'messages': { 'success': result.success, 'messages': result.messages, 'errors': result.errors, 'number_of_fixtures': result.number_of_fixtures, }, } @task(serializer='pickle') def fixture_download_async(prepare_download, *args, **kw): task = fixture_download_async DownloadBase.set_progress(task, 0, 100) prepare_download(task=task, *args, **kw) DownloadBase.set_progress(task, 100, 100)
from __future__ import absolute_import, unicode_literals from celery.task import task from soil import DownloadBase from corehq.apps.fixtures.upload import upload_fixture_file @task def fixture_upload_async(domain, download_id, replace): task = fixture_upload_async DownloadBase.set_progress(task, 0, 100) download_ref = DownloadBase.get(download_id) result = upload_fixture_file(domain, download_ref.get_filename(), replace, task) DownloadBase.set_progress(task, 100, 100) return { 'messages': { 'success': result.success, 'messages': result.messages, 'errors': result.errors, 'number_of_fixtures': result.number_of_fixtures, }, } @task(serializer='pickle') def fixture_download_async(prepare_download, *args, **kw): task = fixture_download_async DownloadBase.set_progress(task, 0, 100) prepare_download(task=task, *args, **kw) DownloadBase.set_progress(task, 100, 100)
Change fixture upload task to json serializer
Change fixture upload task to json serializer
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
- from __future__ import absolute_import - from __future__ import unicode_literals + from __future__ import absolute_import, unicode_literals + - from corehq.apps.fixtures.upload import upload_fixture_file - from soil import DownloadBase from celery.task import task + from soil import DownloadBase - @task(serializer='pickle') + from corehq.apps.fixtures.upload import upload_fixture_file + + + @task def fixture_upload_async(domain, download_id, replace): task = fixture_upload_async DownloadBase.set_progress(task, 0, 100) download_ref = DownloadBase.get(download_id) result = upload_fixture_file(domain, download_ref.get_filename(), replace, task) DownloadBase.set_progress(task, 100, 100) return { 'messages': { 'success': result.success, 'messages': result.messages, 'errors': result.errors, 'number_of_fixtures': result.number_of_fixtures, }, } @task(serializer='pickle') def fixture_download_async(prepare_download, *args, **kw): task = fixture_download_async DownloadBase.set_progress(task, 0, 100) prepare_download(task=task, *args, **kw) DownloadBase.set_progress(task, 100, 100)
Change fixture upload task to json serializer
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from corehq.apps.fixtures.upload import upload_fixture_file from soil import DownloadBase from celery.task import task @task(serializer='pickle') def fixture_upload_async(domain, download_id, replace): task = fixture_upload_async DownloadBase.set_progress(task, 0, 100) download_ref = DownloadBase.get(download_id) result = upload_fixture_file(domain, download_ref.get_filename(), replace, task) DownloadBase.set_progress(task, 100, 100) return { 'messages': { 'success': result.success, 'messages': result.messages, 'errors': result.errors, 'number_of_fixtures': result.number_of_fixtures, }, } @task(serializer='pickle') def fixture_download_async(prepare_download, *args, **kw): task = fixture_download_async DownloadBase.set_progress(task, 0, 100) prepare_download(task=task, *args, **kw) DownloadBase.set_progress(task, 100, 100) ## Instruction: Change fixture upload task to json serializer ## Code After: from __future__ import absolute_import, unicode_literals from celery.task import task from soil import DownloadBase from corehq.apps.fixtures.upload import upload_fixture_file @task def fixture_upload_async(domain, download_id, replace): task = fixture_upload_async DownloadBase.set_progress(task, 0, 100) download_ref = DownloadBase.get(download_id) result = upload_fixture_file(domain, download_ref.get_filename(), replace, task) DownloadBase.set_progress(task, 100, 100) return { 'messages': { 'success': result.success, 'messages': result.messages, 'errors': result.errors, 'number_of_fixtures': result.number_of_fixtures, }, } @task(serializer='pickle') def fixture_download_async(prepare_download, *args, **kw): task = fixture_download_async DownloadBase.set_progress(task, 0, 100) prepare_download(task=task, *args, **kw) DownloadBase.set_progress(task, 100, 100)
57024104a5951d62ff8a87a281a6d232583dabed
python/new_year_chaos.py
python/new_year_chaos.py
import math import os import random import re import sys # Complete the minimumBribes function below. def minimumBribes(finalLine): if invalid(finalLine): return "Too chaotic" return bubbleSort(finalLine) def invalid(finalLine): return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine)) def didBribeMoreThanTwoPeople(person, index): return index + 2 < person - 1 def bubbleSort(line): swaps = 0 numberOfPeople = len(line) for person in range(numberOfPeople): for i in range(0, numberOfPeople - person - 1): if line[i] > line[i + 1]: line[i], line[i + 1] = line[i + 1], line[i] swaps += 1 return swaps if __name__ == '__main__': t = int(input()) for t_itr in range(t): n = int(input()) q = list(map(int, input().rstrip().split())) print(minimumBribes(q))
import math import os import random import re import sys # Complete the minimumBribes function below. def minimumBribes(finalLine): if invalid(finalLine): return "Too chaotic" return bubbleSort(finalLine) def invalid(finalLine): return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine)) def didBribeMoreThanTwoPeople(person, index): return index + 2 < person - 1 def bubbleSort(line): swaps = 0 swappedInCurrentPass = False for person in range(len(line)): for i in range(0, len(line) - 1): if line[i] > line[i + 1]: line[i], line[i + 1] = line[i + 1], line[i] swaps += 1 swappedInCurrentPass = True if swappedInCurrentPass: swappedInCurrentPass = False else: break return swaps if __name__ == '__main__': t = int(input()) for t_itr in range(t): n = int(input()) q = list(map(int, input().rstrip().split())) print(minimumBribes(q))
Improve efficiency of new year chaos
Improve efficiency of new year chaos
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
import math import os import random import re import sys # Complete the minimumBribes function below. def minimumBribes(finalLine): if invalid(finalLine): return "Too chaotic" return bubbleSort(finalLine) def invalid(finalLine): return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine)) def didBribeMoreThanTwoPeople(person, index): return index + 2 < person - 1 def bubbleSort(line): swaps = 0 - numberOfPeople = len(line) + swappedInCurrentPass = False - for person in range(numberOfPeople): - for i in range(0, numberOfPeople - person - 1): + for person in range(len(line)): + for i in range(0, len(line) - 1): if line[i] > line[i + 1]: line[i], line[i + 1] = line[i + 1], line[i] swaps += 1 + swappedInCurrentPass = True + if swappedInCurrentPass: + swappedInCurrentPass = False + else: + break return swaps if __name__ == '__main__': t = int(input()) for t_itr in range(t): n = int(input()) q = list(map(int, input().rstrip().split())) print(minimumBribes(q))
Improve efficiency of new year chaos
## Code Before: import math import os import random import re import sys # Complete the minimumBribes function below. def minimumBribes(finalLine): if invalid(finalLine): return "Too chaotic" return bubbleSort(finalLine) def invalid(finalLine): return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine)) def didBribeMoreThanTwoPeople(person, index): return index + 2 < person - 1 def bubbleSort(line): swaps = 0 numberOfPeople = len(line) for person in range(numberOfPeople): for i in range(0, numberOfPeople - person - 1): if line[i] > line[i + 1]: line[i], line[i + 1] = line[i + 1], line[i] swaps += 1 return swaps if __name__ == '__main__': t = int(input()) for t_itr in range(t): n = int(input()) q = list(map(int, input().rstrip().split())) print(minimumBribes(q)) ## Instruction: Improve efficiency of new year chaos ## Code After: import math import os import random import re import sys # Complete the minimumBribes function below. def minimumBribes(finalLine): if invalid(finalLine): return "Too chaotic" return bubbleSort(finalLine) def invalid(finalLine): return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine)) def didBribeMoreThanTwoPeople(person, index): return index + 2 < person - 1 def bubbleSort(line): swaps = 0 swappedInCurrentPass = False for person in range(len(line)): for i in range(0, len(line) - 1): if line[i] > line[i + 1]: line[i], line[i + 1] = line[i + 1], line[i] swaps += 1 swappedInCurrentPass = True if swappedInCurrentPass: swappedInCurrentPass = False else: break return swaps if __name__ == '__main__': t = int(input()) for t_itr in range(t): n = int(input()) q = list(map(int, input().rstrip().split())) print(minimumBribes(q))
d90edf3b4d8fa714e7e24acbc22fb35bc828911d
services/controllers/interpolator.py
services/controllers/interpolator.py
class Interpolator: def __init__(self): self.data = [] def addIndexValue(self, index, value): self.data.append((index, value)) def valueAtIndex(self, target_index): if target_index < self.data[0][0]: return None elif self.data[-1][0] < target_index: return None else: start = None end = None for (index, value) in self.data: if index == target_index: return value else: if index <= target_index: start = (index, value) elif target_index < index: end = (index, value) break index_delta = end[0] - start[0] percent = (target_index - start[0]) / index_delta value_delta = end[1] - start[1] return start[1] + value_delta * percent if __name__ == "__main__": pass
class Interpolator: def __init__(self): self.data = [] def addIndexValue(self, index, value): self.data.append((index, value)) def valueAtIndex(self, target_index): if target_index < self.data[0][0]: return None elif self.data[-1][0] < target_index: return None else: start = None end = None for (index, value) in self.data: if index == target_index: return value else: if index <= target_index: start = (index, value) elif target_index < index: end = (index, value) break index_delta = end[0] - start[0] percent = (target_index - start[0]) / index_delta value_delta = end[1] - start[1] return start[1] + value_delta * percent def to_array(self): result = [] for (index, value) in self.data: result.append(index) result.append(value) return result def from_array(self, array): self.data = [] for i in range(0, len(array), 2): self.addIndexValue(array[i], array[i + 1]) if __name__ == "__main__": pass
Add ability to convert to/from an array
Add ability to convert to/from an array This is needed as an easy way to serialize an interpolator for sending/receiving over HTTP
Python
bsd-3-clause
gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2
class Interpolator: + def __init__(self): self.data = [] def addIndexValue(self, index, value): self.data.append((index, value)) def valueAtIndex(self, target_index): if target_index < self.data[0][0]: return None elif self.data[-1][0] < target_index: return None else: start = None end = None for (index, value) in self.data: if index == target_index: return value else: if index <= target_index: start = (index, value) elif target_index < index: end = (index, value) break index_delta = end[0] - start[0] percent = (target_index - start[0]) / index_delta value_delta = end[1] - start[1] return start[1] + value_delta * percent + def to_array(self): + result = [] + + for (index, value) in self.data: + result.append(index) + result.append(value) + + return result + + def from_array(self, array): + self.data = [] + for i in range(0, len(array), 2): + self.addIndexValue(array[i], array[i + 1]) + if __name__ == "__main__": pass
Add ability to convert to/from an array
## Code Before: class Interpolator: def __init__(self): self.data = [] def addIndexValue(self, index, value): self.data.append((index, value)) def valueAtIndex(self, target_index): if target_index < self.data[0][0]: return None elif self.data[-1][0] < target_index: return None else: start = None end = None for (index, value) in self.data: if index == target_index: return value else: if index <= target_index: start = (index, value) elif target_index < index: end = (index, value) break index_delta = end[0] - start[0] percent = (target_index - start[0]) / index_delta value_delta = end[1] - start[1] return start[1] + value_delta * percent if __name__ == "__main__": pass ## Instruction: Add ability to convert to/from an array ## Code After: class Interpolator: def __init__(self): self.data = [] def addIndexValue(self, index, value): self.data.append((index, value)) def valueAtIndex(self, target_index): if target_index < self.data[0][0]: return None elif self.data[-1][0] < target_index: return None else: start = None end = None for (index, value) in self.data: if index == target_index: return value else: if index <= target_index: start = (index, value) elif target_index < index: end = (index, value) break index_delta = end[0] - start[0] percent = (target_index - start[0]) / index_delta value_delta = end[1] - start[1] return start[1] + value_delta * percent def to_array(self): result = [] for (index, value) in self.data: result.append(index) result.append(value) return result def from_array(self, array): self.data = [] for i in range(0, len(array), 2): self.addIndexValue(array[i], array[i + 1]) if __name__ == "__main__": pass
3deffc39e1a489255272c35f7171b7e85942b108
shipyard/shipyard/host/node/build.py
shipyard/shipyard/host/node/build.py
"""Host-only environment for Node.js.""" from pathlib import Path from foreman import define_parameter, decorate_rule from shipyard import install_packages (define_parameter('npm_prefix') .with_doc("""Location host-only npm.""") .with_type(Path) .with_derive(lambda ps: ps['//base:build'] / 'host/npm-host') ) @decorate_rule('//base:build') def install(parameters): """Set up host-only environment for Node.js.""" if not Path('/usr/bin/nodejs').exists(): install_packages(['nodejs', 'npm']) contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute() (Path.home() / '.npmrc').write_text(contents)
"""Host-only environment for Node.js.""" from pathlib import Path from foreman import define_parameter, decorate_rule from shipyard import ( ensure_file, execute, install_packages, ) (define_parameter('npm_prefix') .with_doc("""Location host-only npm.""") .with_type(Path) .with_derive(lambda ps: ps['//base:build'] / 'host/npm-host') ) @decorate_rule('//base:build') def install(parameters): """Set up host-only environment for Node.js.""" if not Path('/usr/bin/node').exists(): install_packages(['nodejs', 'npm']) contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute() (Path.home() / '.npmrc').write_text(contents) # Ubuntu systems use `nodejs` rather than `node` :( if not Path('/usr/bin/node').exists(): ensure_file('/usr/bin/nodejs') execute('sudo ln --symbolic nodejs node'.split(), cwd='/usr/bin')
Fix node/nodejs name conflict on Ubuntu systems
Fix node/nodejs name conflict on Ubuntu systems
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
"""Host-only environment for Node.js.""" from pathlib import Path from foreman import define_parameter, decorate_rule - from shipyard import install_packages + from shipyard import ( + ensure_file, + execute, + install_packages, + ) (define_parameter('npm_prefix') .with_doc("""Location host-only npm.""") .with_type(Path) .with_derive(lambda ps: ps['//base:build'] / 'host/npm-host') ) @decorate_rule('//base:build') def install(parameters): """Set up host-only environment for Node.js.""" - if not Path('/usr/bin/nodejs').exists(): + if not Path('/usr/bin/node').exists(): install_packages(['nodejs', 'npm']) contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute() (Path.home() / '.npmrc').write_text(contents) + # Ubuntu systems use `nodejs` rather than `node` :( + if not Path('/usr/bin/node').exists(): + ensure_file('/usr/bin/nodejs') + execute('sudo ln --symbolic nodejs node'.split(), cwd='/usr/bin')
Fix node/nodejs name conflict on Ubuntu systems
## Code Before: """Host-only environment for Node.js.""" from pathlib import Path from foreman import define_parameter, decorate_rule from shipyard import install_packages (define_parameter('npm_prefix') .with_doc("""Location host-only npm.""") .with_type(Path) .with_derive(lambda ps: ps['//base:build'] / 'host/npm-host') ) @decorate_rule('//base:build') def install(parameters): """Set up host-only environment for Node.js.""" if not Path('/usr/bin/nodejs').exists(): install_packages(['nodejs', 'npm']) contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute() (Path.home() / '.npmrc').write_text(contents) ## Instruction: Fix node/nodejs name conflict on Ubuntu systems ## Code After: """Host-only environment for Node.js.""" from pathlib import Path from foreman import define_parameter, decorate_rule from shipyard import ( ensure_file, execute, install_packages, ) (define_parameter('npm_prefix') .with_doc("""Location host-only npm.""") .with_type(Path) .with_derive(lambda ps: ps['//base:build'] / 'host/npm-host') ) @decorate_rule('//base:build') def install(parameters): """Set up host-only environment for Node.js.""" if not Path('/usr/bin/node').exists(): install_packages(['nodejs', 'npm']) contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute() (Path.home() / '.npmrc').write_text(contents) # Ubuntu systems use `nodejs` rather than `node` :( if not Path('/usr/bin/node').exists(): ensure_file('/usr/bin/nodejs') execute('sudo ln --symbolic nodejs node'.split(), cwd='/usr/bin')
edc5564d4c3677dc8b545e9c9a6a51b481247eab
contentcuration/contentcuration/tests/test_makemessages.py
contentcuration/contentcuration/tests/test_makemessages.py
import os import subprocess import pathlib from django.conf import settings from django.test import TestCase class MakeMessagesCommandRunTestCase(TestCase): """ Sanity check to make sure makemessages runs to completion. """ def test_command_succeeds_without_postgres(self): """ Test that we can run makemessages when postgres is not activated. """ # this test can make changes to committed files, so only run it # on the CI server if 'CI' not in os.environ or not os.environ['CI']: return repo_root = pathlib.Path(settings.BASE_DIR).parent cmd = ["make", "makemessages"] env = os.environ.copy() # We fake postgres not being available, by setting the wrong IP address. # hopefully postgres isn't running at 127.0.0.2! env.update({"DATA_DB_HOST": "127.0.0.2"}) subprocess.check_output( cmd, env=env, cwd=str(repo_root) )
import os import subprocess import pathlib import pytest from django.conf import settings from django.test import TestCase class MakeMessagesCommandRunTestCase(TestCase): """ Sanity check to make sure makemessages runs to completion. """ # this test can make changes to committed files, so only run it # on the CI server @pytest.mark.skipif('CI' not in os.environ or not os.environ['CI'], reason="runs only on CI server") def test_command_succeeds_without_postgres(self): """ Test that we can run makemessages when postgres is not activated. """ repo_root = pathlib.Path(settings.BASE_DIR).parent cmd = ["make", "makemessages"] env = os.environ.copy() # We fake postgres not being available, by setting the wrong IP address. # hopefully postgres isn't running at 127.0.0.2! env.update({"DATA_DB_HOST": "127.0.0.2"}) subprocess.check_output( cmd, env=env, cwd=str(repo_root) )
Use pytest.skip so we can check the test wasn't skipped on the CI.
Use pytest.skip so we can check the test wasn't skipped on the CI.
Python
mit
DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation
import os import subprocess import pathlib + import pytest from django.conf import settings from django.test import TestCase class MakeMessagesCommandRunTestCase(TestCase): """ Sanity check to make sure makemessages runs to completion. """ + # this test can make changes to committed files, so only run it + # on the CI server + @pytest.mark.skipif('CI' not in os.environ or not os.environ['CI'], reason="runs only on CI server") def test_command_succeeds_without_postgres(self): """ Test that we can run makemessages when postgres is not activated. """ - - # this test can make changes to committed files, so only run it - # on the CI server - if 'CI' not in os.environ or not os.environ['CI']: - return repo_root = pathlib.Path(settings.BASE_DIR).parent cmd = ["make", "makemessages"] env = os.environ.copy() # We fake postgres not being available, by setting the wrong IP address. # hopefully postgres isn't running at 127.0.0.2! env.update({"DATA_DB_HOST": "127.0.0.2"}) subprocess.check_output( cmd, env=env, cwd=str(repo_root) )
Use pytest.skip so we can check the test wasn't skipped on the CI.
## Code Before: import os import subprocess import pathlib from django.conf import settings from django.test import TestCase class MakeMessagesCommandRunTestCase(TestCase): """ Sanity check to make sure makemessages runs to completion. """ def test_command_succeeds_without_postgres(self): """ Test that we can run makemessages when postgres is not activated. """ # this test can make changes to committed files, so only run it # on the CI server if 'CI' not in os.environ or not os.environ['CI']: return repo_root = pathlib.Path(settings.BASE_DIR).parent cmd = ["make", "makemessages"] env = os.environ.copy() # We fake postgres not being available, by setting the wrong IP address. # hopefully postgres isn't running at 127.0.0.2! env.update({"DATA_DB_HOST": "127.0.0.2"}) subprocess.check_output( cmd, env=env, cwd=str(repo_root) ) ## Instruction: Use pytest.skip so we can check the test wasn't skipped on the CI. ## Code After: import os import subprocess import pathlib import pytest from django.conf import settings from django.test import TestCase class MakeMessagesCommandRunTestCase(TestCase): """ Sanity check to make sure makemessages runs to completion. """ # this test can make changes to committed files, so only run it # on the CI server @pytest.mark.skipif('CI' not in os.environ or not os.environ['CI'], reason="runs only on CI server") def test_command_succeeds_without_postgres(self): """ Test that we can run makemessages when postgres is not activated. """ repo_root = pathlib.Path(settings.BASE_DIR).parent cmd = ["make", "makemessages"] env = os.environ.copy() # We fake postgres not being available, by setting the wrong IP address. # hopefully postgres isn't running at 127.0.0.2! env.update({"DATA_DB_HOST": "127.0.0.2"}) subprocess.check_output( cmd, env=env, cwd=str(repo_root) )
e29b1f6243fb7f9d2322b80573617ff9a0582d01
pinax/blog/parsers/markdown_parser.py
pinax/blog/parsers/markdown_parser.py
from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
Add some extensions to the markdown parser
Add some extensions to the markdown parser Ultimately we should make this a setting or hookset so it could be overridden at the site level.
Python
mit
swilcox/pinax-blog,pinax/pinax-blog,miurahr/pinax-blog,miurahr/pinax-blog,swilcox/pinax-blog,easton402/pinax-blog,pinax/pinax-blog,pinax/pinax-blog,easton402/pinax-blog
from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): - md = Markdown(extensions=["codehilite"]) + md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
Add some extensions to the markdown parser
## Code Before: from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html ## Instruction: Add some extensions to the markdown parser ## Code After: from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
606b2b6c84e9f9f67606a4d7e521cf4805855a98
migrations/versions/0311_populate_returned_letters.py
migrations/versions/0311_populate_returned_letters.py
from alembic import op from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" results = conn.execute(sql) returned_letters = results.fetchall() references = [x.reference for x in returned_letters] insert_or_update_returned_letters(references) def downgrade(): pass
from alembic import op revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" insert_sql = """ insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) """ results = conn.execute(sql) returned_letters = results.fetchall() for x in returned_letters: f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) conn.execute(f) def downgrade(): pass
Change the insert to use updated_at as the reported_at date
Change the insert to use updated_at as the reported_at date
Python
mit
alphagov/notifications-api,alphagov/notifications-api
from alembic import op - - from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ - select id, service_id, reference + select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" + insert_sql = """ + insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) + values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) + """ + results = conn.execute(sql) returned_letters = results.fetchall() - references = [x.reference for x in returned_letters] - insert_or_update_returned_letters(references) + for x in returned_letters: + f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) + conn.execute(f) def downgrade(): pass
Change the insert to use updated_at as the reported_at date
## Code Before: from alembic import op from app.dao.returned_letters_dao import insert_or_update_returned_letters revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" results = conn.execute(sql) returned_letters = results.fetchall() references = [x.reference for x in returned_letters] insert_or_update_returned_letters(references) def downgrade(): pass ## Instruction: Change the insert to use updated_at as the reported_at date ## Code After: from alembic import op revision = '0311_populate_returned_letters' down_revision = '0310_returned_letters_table' def upgrade(): conn = op.get_bind() sql = """ select id, service_id, reference, updated_at from notification_history where notification_type = 'letter' and notification_status = 'returned-letter'""" insert_sql = """ insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at) values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null) """ results = conn.execute(sql) returned_letters = results.fetchall() for x in returned_letters: f = insert_sql.format(x.updated_at.date(), x.service_id, x.id) conn.execute(f) def downgrade(): pass
853d2907432a8d7fbedbed12ff28efbe520d4c80
project_euler/library/number_theory/continued_fractions.py
project_euler/library/number_theory/continued_fractions.py
from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: remainders = [] continued_fraction = [] remainder = (Fraction(1), Fraction(0)) # remainder is sqrt(n) + 0. sqrt_n = sqrt(n) while remainder not in remainders: remainders.append(remainder) a = int(remainder[0] * sqrt_n + remainder[1]) continued_fraction.append(a) norm = (remainder[1] - a) ** 2 - remainder[0] ** 2 * n remainder = (-remainder[0] / norm, (remainder[1] - a) / norm) index = remainders.index(remainder) return continued_fraction[:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple from .gcd import gcd from ..sqrt import fsqrt def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: sqrt_n = sqrt(n) remainders = [] remainder = (0, 1) # remainder is an + (sqrt(n) - p) / q and these are initial. continued_fraction = [] while remainder not in remainders: remainders.append(remainder) p, q = remainder q = (n - (p * p)) // q a = int((sqrt_n + p) / q) p = a * q - p continued_fraction.append(a) remainder = (p, q) index = remainders.index(remainder) return continued_fraction[1:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
Make continued fractions sqrt much faster
Make continued fractions sqrt much faster
Python
mit
cryvate/project-euler,cryvate/project-euler
from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple + + from .gcd import gcd + from ..sqrt import fsqrt def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: + sqrt_n = sqrt(n) remainders = [] + remainder = (0, 1) + # remainder is an + (sqrt(n) - p) / q and these are initial. continued_fraction = [] - remainder = (Fraction(1), Fraction(0)) # remainder is sqrt(n) + 0. - - sqrt_n = sqrt(n) while remainder not in remainders: remainders.append(remainder) + p, q = remainder - a = int(remainder[0] * sqrt_n + remainder[1]) + q = (n - (p * p)) // q + a = int((sqrt_n + p) / q) + p = a * q - p + continued_fraction.append(a) + remainder = (p, q) - norm = (remainder[1] - a) ** 2 - remainder[0] ** 2 * n - remainder = (-remainder[0] / norm, (remainder[1] - a) / norm) index = remainders.index(remainder) - return continued_fraction[:index], continued_fraction[index:] + return continued_fraction[1:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
Make continued fractions sqrt much faster
## Code Before: from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: remainders = [] continued_fraction = [] remainder = (Fraction(1), Fraction(0)) # remainder is sqrt(n) + 0. sqrt_n = sqrt(n) while remainder not in remainders: remainders.append(remainder) a = int(remainder[0] * sqrt_n + remainder[1]) continued_fraction.append(a) norm = (remainder[1] - a) ** 2 - remainder[0] ** 2 * n remainder = (-remainder[0] / norm, (remainder[1] - a) / norm) index = remainders.index(remainder) return continued_fraction[:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents ## Instruction: Make continued fractions sqrt much faster ## Code After: from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple from .gcd import gcd from ..sqrt import fsqrt def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: sqrt_n = sqrt(n) remainders = [] remainder = (0, 1) # remainder is an + (sqrt(n) - p) / q and these are initial. continued_fraction = [] while remainder not in remainders: remainders.append(remainder) p, q = remainder q = (n - (p * p)) // q a = int((sqrt_n + p) / q) p = a * q - p continued_fraction.append(a) remainder = (p, q) index = remainders.index(remainder) return continued_fraction[1:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
b86d23b0302bb4d0efa2aa203883a78d3dcbf26e
scipy/integrate/_ivp/tests/test_rk.py
scipy/integrate/_ivp/tests/test_rk.py
import pytest from numpy.testing import assert_allclose import numpy as np from scipy.integrate import RK23, RK45, DOP853 from scipy.integrate._ivp import dop853_coefficients @pytest.mark.parametrize("solver", [RK23, RK45, DOP853]) def test_coefficient_properties(solver): assert_allclose(np.sum(solver.B), 1, rtol=1e-15) assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14) def test_coefficient_properties_dop853(): assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15) assert_allclose(np.sum(dop853_coefficients.A, axis=1), dop853_coefficients.C, rtol=1e-14)
import pytest from numpy.testing import assert_allclose, assert_ import numpy as np from scipy.integrate import RK23, RK45, DOP853 from scipy.integrate._ivp import dop853_coefficients @pytest.mark.parametrize("solver", [RK23, RK45, DOP853]) def test_coefficient_properties(solver): assert_allclose(np.sum(solver.B), 1, rtol=1e-15) assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14) def test_coefficient_properties_dop853(): assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15) assert_allclose(np.sum(dop853_coefficients.A, axis=1), dop853_coefficients.C, rtol=1e-14) @pytest.mark.parametrize("solver_class", [RK23, RK45, DOP853]) def test_error_estimation(solver_class): step = 0.2 solver = solver_class(lambda t, y: y, 0, [1], 1, first_step=step) solver.step() error_estimate = solver._estimate_errors(solver.K, step) error = solver.y - np.exp([step]) assert_(np.abs(error) < np.abs(error_estimate))
Test of error estimation of Runge-Kutta methods
TST: Test of error estimation of Runge-Kutta methods
Python
bsd-3-clause
jor-/scipy,zerothi/scipy,mdhaber/scipy,anntzer/scipy,ilayn/scipy,Eric89GXL/scipy,mdhaber/scipy,matthew-brett/scipy,endolith/scipy,jor-/scipy,anntzer/scipy,grlee77/scipy,vigna/scipy,mdhaber/scipy,andyfaff/scipy,aarchiba/scipy,aeklant/scipy,tylerjereddy/scipy,aeklant/scipy,andyfaff/scipy,perimosocordiae/scipy,tylerjereddy/scipy,aeklant/scipy,person142/scipy,jamestwebber/scipy,e-q/scipy,rgommers/scipy,endolith/scipy,perimosocordiae/scipy,jor-/scipy,Stefan-Endres/scipy,pizzathief/scipy,mdhaber/scipy,rgommers/scipy,perimosocordiae/scipy,lhilt/scipy,Eric89GXL/scipy,grlee77/scipy,person142/scipy,matthew-brett/scipy,aarchiba/scipy,gertingold/scipy,mdhaber/scipy,person142/scipy,pizzathief/scipy,matthew-brett/scipy,jamestwebber/scipy,scipy/scipy,perimosocordiae/scipy,aarchiba/scipy,endolith/scipy,Eric89GXL/scipy,Stefan-Endres/scipy,tylerjereddy/scipy,perimosocordiae/scipy,e-q/scipy,nmayorov/scipy,pizzathief/scipy,jor-/scipy,arokem/scipy,vigna/scipy,gertingold/scipy,andyfaff/scipy,scipy/scipy,pizzathief/scipy,matthew-brett/scipy,aeklant/scipy,scipy/scipy,rgommers/scipy,jamestwebber/scipy,Stefan-Endres/scipy,lhilt/scipy,nmayorov/scipy,jor-/scipy,mdhaber/scipy,vigna/scipy,grlee77/scipy,Eric89GXL/scipy,Eric89GXL/scipy,andyfaff/scipy,ilayn/scipy,jamestwebber/scipy,person142/scipy,gertingold/scipy,nmayorov/scipy,nmayorov/scipy,nmayorov/scipy,perimosocordiae/scipy,ilayn/scipy,WarrenWeckesser/scipy,arokem/scipy,lhilt/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,person142/scipy,jamestwebber/scipy,scipy/scipy,zerothi/scipy,anntzer/scipy,Stefan-Endres/scipy,scipy/scipy,scipy/scipy,grlee77/scipy,zerothi/scipy,ilayn/scipy,endolith/scipy,zerothi/scipy,rgommers/scipy,zerothi/scipy,endolith/scipy,tylerjereddy/scipy,WarrenWeckesser/scipy,andyfaff/scipy,lhilt/scipy,rgommers/scipy,gertingold/scipy,e-q/scipy,pizzathief/scipy,vigna/scipy,ilayn/scipy,aarchiba/scipy,e-q/scipy,WarrenWeckesser/scipy,matthew-brett/scipy,lhilt/scipy,arokem/scipy,zerothi/scipy,WarrenWeckesser/scipy,aeklant/scipy,ilayn/scipy,grlee77/scipy,tylerjereddy/scipy,Stefan-Endres/scipy,e-q/scipy,Eric89GXL/scipy,andyfaff/scipy,anntzer/scipy,endolith/scipy,vigna/scipy,anntzer/scipy,aarchiba/scipy,arokem/scipy,WarrenWeckesser/scipy,anntzer/scipy,arokem/scipy,gertingold/scipy
import pytest - from numpy.testing import assert_allclose + from numpy.testing import assert_allclose, assert_ import numpy as np from scipy.integrate import RK23, RK45, DOP853 from scipy.integrate._ivp import dop853_coefficients @pytest.mark.parametrize("solver", [RK23, RK45, DOP853]) def test_coefficient_properties(solver): assert_allclose(np.sum(solver.B), 1, rtol=1e-15) assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14) def test_coefficient_properties_dop853(): assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15) assert_allclose(np.sum(dop853_coefficients.A, axis=1), dop853_coefficients.C, rtol=1e-14) + + @pytest.mark.parametrize("solver_class", [RK23, RK45, DOP853]) + def test_error_estimation(solver_class): + step = 0.2 + solver = solver_class(lambda t, y: y, 0, [1], 1, first_step=step) + solver.step() + error_estimate = solver._estimate_errors(solver.K, step) + error = solver.y - np.exp([step]) + assert_(np.abs(error) < np.abs(error_estimate)) +
Test of error estimation of Runge-Kutta methods
## Code Before: import pytest from numpy.testing import assert_allclose import numpy as np from scipy.integrate import RK23, RK45, DOP853 from scipy.integrate._ivp import dop853_coefficients @pytest.mark.parametrize("solver", [RK23, RK45, DOP853]) def test_coefficient_properties(solver): assert_allclose(np.sum(solver.B), 1, rtol=1e-15) assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14) def test_coefficient_properties_dop853(): assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15) assert_allclose(np.sum(dop853_coefficients.A, axis=1), dop853_coefficients.C, rtol=1e-14) ## Instruction: Test of error estimation of Runge-Kutta methods ## Code After: import pytest from numpy.testing import assert_allclose, assert_ import numpy as np from scipy.integrate import RK23, RK45, DOP853 from scipy.integrate._ivp import dop853_coefficients @pytest.mark.parametrize("solver", [RK23, RK45, DOP853]) def test_coefficient_properties(solver): assert_allclose(np.sum(solver.B), 1, rtol=1e-15) assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14) def test_coefficient_properties_dop853(): assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15) assert_allclose(np.sum(dop853_coefficients.A, axis=1), dop853_coefficients.C, rtol=1e-14) @pytest.mark.parametrize("solver_class", [RK23, RK45, DOP853]) def test_error_estimation(solver_class): step = 0.2 solver = solver_class(lambda t, y: y, 0, [1], 1, first_step=step) solver.step() error_estimate = solver._estimate_errors(solver.K, step) error = solver.y - np.exp([step]) assert_(np.abs(error) < np.abs(error_estimate))
81dfb5cb952fbca90882bd39e76887f0fa6479eb
msmexplorer/tests/test_msm_plot.py
msmexplorer/tests/test_msm_plot.py
import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase)
import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase) def test_plot_implied_timescales(): lag_times = [1, 10, 50, 100, 200, 250, 500] msm_objs = [] for lag in lag_times: # Construct MSM msm = MarkovStateModel(lag_time=lag, n_timescales=5) msm.fit(clustered_trajs) msm_objs.append(msm) ax = plot_implied_timescales(msm_objs) assert isinstance(ax, SubplotBase)
Add test for implied timescales plot
Add test for implied timescales plot
Python
mit
msmexplorer/msmexplorer,msmexplorer/msmexplorer
import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid - from ..plots import plot_pop_resids, plot_msm_network, plot_timescales + from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase) + + def test_plot_implied_timescales(): + lag_times = [1, 10, 50, 100, 200, 250, 500] + msm_objs = [] + for lag in lag_times: + # Construct MSM + msm = MarkovStateModel(lag_time=lag, n_timescales=5) + msm.fit(clustered_trajs) + msm_objs.append(msm) + ax = plot_implied_timescales(msm_objs) + assert isinstance(ax, SubplotBase) +
Add test for implied timescales plot
## Code Before: import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase) ## Instruction: Add test for implied timescales plot ## Code After: import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase) def test_plot_implied_timescales(): lag_times = [1, 10, 50, 100, 200, 250, 500] msm_objs = [] for lag in lag_times: # Construct MSM msm = MarkovStateModel(lag_time=lag, n_timescales=5) msm.fit(clustered_trajs) msm_objs.append(msm) ax = plot_implied_timescales(msm_objs) assert isinstance(ax, SubplotBase)
5f39fd311c735593ac41ba17a060f9cadbe80e18
nlpipe/scripts/amcat_background.py
nlpipe/scripts/amcat_background.py
import sys, argparse from nlpipe import tasks from nlpipe.pipeline import parse_background from nlpipe.backend import get_input_ids from nlpipe.celery import app modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")} parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))), choices=modules, metavar="module") parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)') parser.add_argument('--max', type=int, help='maximum number of articles to assign') parser.add_argument('--queue', default='background', help='Celery queue to put the articles on') args = parser.parse_args() task = modules[args.module] body = {u'filter': {'terms': {u'sets': args.sets}}} print("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" .format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals())) ids = list(get_input_ids(body)) parse_background(ids, task, max=args.max, queue=args.queue)
import sys, argparse from nlpipe import tasks from nlpipe.pipeline import parse_background from nlpipe.backend import get_input_ids from nlpipe.celery import app import logging FORMAT = '[%(asctime)-15s] %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO) modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")} parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))), choices=modules, metavar="module") parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)') parser.add_argument('--max', type=int, help='maximum number of articles to assign') parser.add_argument('--queue', default='background', help='Celery queue to put the articles on') args = parser.parse_args() task = modules[args.module] body = {u'filter': {'terms': {u'sets': args.sets}}} logging.info("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" .format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals())) ids = list(get_input_ids(body)) logging.info("... Found {} articles".format(len(ids))) parse_background(ids, task, max=args.max, queue=args.queue)
Add logging to background assign
Add logging to background assign
Python
mit
amcat/nlpipe
import sys, argparse from nlpipe import tasks from nlpipe.pipeline import parse_background from nlpipe.backend import get_input_ids from nlpipe.celery import app + + import logging + FORMAT = '[%(asctime)-15s] %(message)s' + logging.basicConfig(format=FORMAT, level=logging.INFO) modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")} parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))), choices=modules, metavar="module") parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)') parser.add_argument('--max', type=int, help='maximum number of articles to assign') parser.add_argument('--queue', default='background', help='Celery queue to put the articles on') args = parser.parse_args() task = modules[args.module] body = {u'filter': {'terms': {u'sets': args.sets}}} - print("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" + logging.info("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" .format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals())) ids = list(get_input_ids(body)) + logging.info("... Found {} articles".format(len(ids))) parse_background(ids, task, max=args.max, queue=args.queue)
Add logging to background assign
## Code Before: import sys, argparse from nlpipe import tasks from nlpipe.pipeline import parse_background from nlpipe.backend import get_input_ids from nlpipe.celery import app modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")} parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))), choices=modules, metavar="module") parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)') parser.add_argument('--max', type=int, help='maximum number of articles to assign') parser.add_argument('--queue', default='background', help='Celery queue to put the articles on') args = parser.parse_args() task = modules[args.module] body = {u'filter': {'terms': {u'sets': args.sets}}} print("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" .format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals())) ids = list(get_input_ids(body)) parse_background(ids, task, max=args.max, queue=args.queue) ## Instruction: Add logging to background assign ## Code After: import sys, argparse from nlpipe import tasks from nlpipe.pipeline import parse_background from nlpipe.backend import get_input_ids from nlpipe.celery import app import logging FORMAT = '[%(asctime)-15s] %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO) modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")} parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))), choices=modules, metavar="module") parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)') parser.add_argument('--max', type=int, help='maximum number of articles to assign') parser.add_argument('--queue', default='background', help='Celery queue to put the articles on') args = parser.parse_args() task = modules[args.module] body = {u'filter': {'terms': {u'sets': args.sets}}} logging.info("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}" .format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals())) ids = list(get_input_ids(body)) logging.info("... Found {} articles".format(len(ids))) parse_background(ids, task, max=args.max, queue=args.queue)
457f2d1d51b2bf008f837bf3ce8ee3cb47d5ba6b
var/spack/packages/libpng/package.py
var/spack/packages/libpng/package.py
from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install")
from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') version('1.6.15', '829a256f3de9307731d4f52dc071916d') version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install")
Fix libpng to use a better URL
Fix libpng to use a better URL Sourceforge URLs like this eventually die when the libpng version is bumped: http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download But ones like this give you a "permanently moved", which curl -L will follow: http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz
Python
lgpl-2.1
mfherbst/spack,tmerrick1/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,TheTimmy/spack,lgarren/spack,EmreAtes/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,lgarren/spack,krafczyk/spack,krafczyk/spack,skosukhin/spack,TheTimmy/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,LLNL/spack,mfherbst/spack,iulian787/spack,lgarren/spack,matthiasdiener/spack,TheTimmy/spack,EmreAtes/spack,iulian787/spack,iulian787/spack,skosukhin/spack,LLNL/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack
from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" - url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download" + url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + version('1.6.15', '829a256f3de9307731d4f52dc071916d') + version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) - make() make("install")
Fix libpng to use a better URL
## Code Before: from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install") ## Instruction: Fix libpng to use a better URL ## Code After: from spack import * class Libpng(Package): """libpng graphics file format""" homepage = "http://www.libpng.org/pub/png/libpng.html" url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" version('1.6.14', '2101b3de1d5f348925990f9aa8405660') version('1.6.15', '829a256f3de9307731d4f52dc071916d') version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') def install(self, spec, prefix): configure("--prefix=%s" % prefix) make() make("install")
f4429e49c8b493fa285d169a41b82cb761716705
tests/explorers_tests/test_additive_ou.py
tests/explorers_tests/test_additive_ou.py
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 dt = 0.5 sigma = 0.001 theta = 0.3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma) for t in range(10000): a = explorer.select_action(t, greedy_action_func) print(a)
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU() for t in range(100): a = explorer.select_action(t, greedy_action_func) print(t, a)
Fix a test for AdditiveOU
Fix a test for AdditiveOU
Python
mit
toslunar/chainerrl,toslunar/chainerrl
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 - dt = 0.5 - sigma = 0.001 - theta = 0.3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) - explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma) + explorer = AdditiveOU() - for t in range(10000): + for t in range(100): a = explorer.select_action(t, greedy_action_func) - print(a) + print(t, a)
Fix a test for AdditiveOU
## Code Before: from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 dt = 0.5 sigma = 0.001 theta = 0.3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma) for t in range(10000): a = explorer.select_action(t, greedy_action_func) print(a) ## Instruction: Fix a test for AdditiveOU ## Code After: from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU() for t in range(100): a = explorer.select_action(t, greedy_action_func) print(t, a)
bea258e2affc165f610de83248d9f958eec1ef4e
cmsplugin_markdown/models.py
cmsplugin_markdown/models.py
from django.db import models from cms.models import CMSPlugin class MarkdownPlugin(CMSPlugin): markdown_text = models.TextField(max_length=8000)
from django.db import models from cms.models import CMSPlugin from cms.utils.compat.dj import python_2_unicode_compatible @python_2_unicode_compatible class MarkdownPlugin(CMSPlugin): markdown_text = models.TextField(max_length=8000) def __str__(self): text = self.markdown_text return (text[:50] + '...') if len(text) > 53 else text
Add __str__ method for better representation in frontend
Add __str__ method for better representation in frontend
Python
mit
bitmazk/cmsplugin-markdown,bitmazk/cmsplugin-markdown,bitmazk/cmsplugin-markdown
from django.db import models from cms.models import CMSPlugin + from cms.utils.compat.dj import python_2_unicode_compatible + @python_2_unicode_compatible class MarkdownPlugin(CMSPlugin): markdown_text = models.TextField(max_length=8000) + def __str__(self): + text = self.markdown_text + return (text[:50] + '...') if len(text) > 53 else text +
Add __str__ method for better representation in frontend
## Code Before: from django.db import models from cms.models import CMSPlugin class MarkdownPlugin(CMSPlugin): markdown_text = models.TextField(max_length=8000) ## Instruction: Add __str__ method for better representation in frontend ## Code After: from django.db import models from cms.models import CMSPlugin from cms.utils.compat.dj import python_2_unicode_compatible @python_2_unicode_compatible class MarkdownPlugin(CMSPlugin): markdown_text = models.TextField(max_length=8000) def __str__(self): text = self.markdown_text return (text[:50] + '...') if len(text) > 53 else text
9828e5125cdbc01a773c60b1e211d0e434a2c5aa
tests/test_modules/test_pmac/test_pmacstatuspart.py
tests/test_modules/test_pmac/test_pmacstatuspart.py
from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, i10=1705244) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 4919.300698316487
from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 2500.04
Change TestPmacStatusPart to not use i10
Change TestPmacStatusPart to not use i10
Python
apache-2.0
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) - self.set_attributes(child, i10=1705244) + self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() - assert freq == 4919.300698316487 + assert freq == 2500.04
Change TestPmacStatusPart to not use i10
## Code Before: from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, i10=1705244) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 4919.300698316487 ## Instruction: Change TestPmacStatusPart to not use i10 ## Code After: from malcolm.core import Process from malcolm.modules.builtin.controllers import ManagerController from malcolm.modules.pmac.blocks import pmac_status_block from malcolm.modules.pmac.parts import PmacStatusPart from malcolm.testutil import ChildTestCase class TestPmacStatusPart(ChildTestCase): def setUp(self): self.process = Process("Process") child = self.create_child_block( pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE" ) self.set_attributes(child, servoFreq=2500.04) c = ManagerController("PMAC", "/tmp", use_git=False) self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True) c.add_part(self.o) self.process.add_controller(c) self.process.start() self.b = c.block_view() def tearDown(self): self.process.stop(timeout=1) def test_servo_freq(self): freq = self.b.servoFrequency() assert freq == 2500.04
58dbfa0b449b8e4171c5f9cef1c15db39b52c1f0
tests/run_tests.py
tests/run_tests.py
import os.path import sys import subprocess import unittest tests_dir = os.path.dirname(__file__) sys.path.insert(0, os.path.dirname(tests_dir)) import secretstorage if __name__ == '__main__': major, minor, patch = sys.version_info[:3] print('Running with Python %d.%d.%d (SecretStorage from %s)' % (major, minor, patch, os.path.dirname(secretstorage.__file__))) mock = None if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],), stdout=subprocess.PIPE, universal_newlines=True) bus_name = mock.stdout.readline().rstrip() secretstorage.util.BUS_NAME = bus_name print('Bus name set to %r' % secretstorage.util.BUS_NAME) loader = unittest.TestLoader() runner = unittest.TextTestRunner(verbosity=2) result = runner.run(loader.discover(tests_dir)) if mock is not None: mock.terminate() sys.exit(not result.wasSuccessful())
import os.path import sys import subprocess import unittest tests_dir = os.path.dirname(__file__) sys.path.insert(0, os.path.dirname(tests_dir)) import secretstorage if __name__ == '__main__': major, minor, patch = sys.version_info[:3] print('Running with Python %d.%d.%d (SecretStorage from %s)' % (major, minor, patch, os.path.dirname(secretstorage.__file__))) mock = None if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],), stdout=subprocess.PIPE, universal_newlines=True) assert mock.stdout is not None # for mypy bus_name = mock.stdout.readline().rstrip() secretstorage.util.BUS_NAME = bus_name print('Bus name set to %r' % secretstorage.util.BUS_NAME) loader = unittest.TestLoader() runner = unittest.TextTestRunner(verbosity=2) result = runner.run(loader.discover(tests_dir)) if mock is not None: mock.terminate() sys.exit(not result.wasSuccessful())
Add an assert to make mypy check pass again
Add an assert to make mypy check pass again
Python
bsd-3-clause
mitya57/secretstorage
import os.path import sys import subprocess import unittest tests_dir = os.path.dirname(__file__) sys.path.insert(0, os.path.dirname(tests_dir)) import secretstorage if __name__ == '__main__': major, minor, patch = sys.version_info[:3] print('Running with Python %d.%d.%d (SecretStorage from %s)' % (major, minor, patch, os.path.dirname(secretstorage.__file__))) mock = None if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],), stdout=subprocess.PIPE, universal_newlines=True) + assert mock.stdout is not None # for mypy bus_name = mock.stdout.readline().rstrip() secretstorage.util.BUS_NAME = bus_name print('Bus name set to %r' % secretstorage.util.BUS_NAME) loader = unittest.TestLoader() runner = unittest.TextTestRunner(verbosity=2) result = runner.run(loader.discover(tests_dir)) if mock is not None: mock.terminate() sys.exit(not result.wasSuccessful())
Add an assert to make mypy check pass again
## Code Before: import os.path import sys import subprocess import unittest tests_dir = os.path.dirname(__file__) sys.path.insert(0, os.path.dirname(tests_dir)) import secretstorage if __name__ == '__main__': major, minor, patch = sys.version_info[:3] print('Running with Python %d.%d.%d (SecretStorage from %s)' % (major, minor, patch, os.path.dirname(secretstorage.__file__))) mock = None if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],), stdout=subprocess.PIPE, universal_newlines=True) bus_name = mock.stdout.readline().rstrip() secretstorage.util.BUS_NAME = bus_name print('Bus name set to %r' % secretstorage.util.BUS_NAME) loader = unittest.TestLoader() runner = unittest.TextTestRunner(verbosity=2) result = runner.run(loader.discover(tests_dir)) if mock is not None: mock.terminate() sys.exit(not result.wasSuccessful()) ## Instruction: Add an assert to make mypy check pass again ## Code After: import os.path import sys import subprocess import unittest tests_dir = os.path.dirname(__file__) sys.path.insert(0, os.path.dirname(tests_dir)) import secretstorage if __name__ == '__main__': major, minor, patch = sys.version_info[:3] print('Running with Python %d.%d.%d (SecretStorage from %s)' % (major, minor, patch, os.path.dirname(secretstorage.__file__))) mock = None if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],), stdout=subprocess.PIPE, universal_newlines=True) assert mock.stdout is not None # for mypy bus_name = mock.stdout.readline().rstrip() secretstorage.util.BUS_NAME = bus_name print('Bus name set to %r' % secretstorage.util.BUS_NAME) loader = unittest.TestLoader() runner = unittest.TextTestRunner(verbosity=2) result = runner.run(loader.discover(tests_dir)) if mock is not None: mock.terminate() sys.exit(not result.wasSuccessful())
99496d97f3e00284840d2127556bba0e21d1a99e
frappe/tests/test_commands.py
frappe/tests/test_commands.py
from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode)
from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
Add tests for bench execute
test: Add tests for bench execute
Python
mit
saurabh6790/frappe,StrellaGroup/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe,yashodhank/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,saurabh6790/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,adityahase/frappe,frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,frappe/frappe
from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) + + class TestCommands(BaseTestCommands, unittest.TestCase): + def test_execute(self): + # execute a command expecting a numeric output + self.execute("bench --site {site} execute frappe.db.get_database_size") + self.assertEquals(self.returncode, 0) + self.assertIsInstance(float(self.stdout), float) + + # execute a command expecting an errored output as local won't exist + self.execute("bench --site {site} execute frappe.local.site") + self.assertEquals(self.returncode, 1) + self.assertIsNotNone(self.stderr) + + # execute a command with kwargs + self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") + self.assertEquals(self.returncode, 0) + self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType')) +
Add tests for bench execute
## Code Before: from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) ## Instruction: Add tests for bench execute ## Code After: from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
fac280a022c8728f14bbe1194cf74af761b7ec3f
vfp2py/__main__.py
vfp2py/__main__.py
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) global SEARCH_PATH SEARCH_PATH = args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
Fix search paths not being added from arguments.
Fix search paths not being added from arguments.
Python
mit
mwisslead/vfp2py,mwisslead/vfp2py
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) - global SEARCH_PATH - SEARCH_PATH = args.search + vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
Fix search paths not being added from arguments.
## Code Before: import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) global SEARCH_PATH SEARCH_PATH = args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass ## Instruction: Fix search paths not being added from arguments. ## Code After: import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
2088b3df274fd31c28baa6193c937046c04b98a6
scripts/generate_wiki_languages.py
scripts/generate_wiki_languages.py
from urllib2 import urlopen import csv import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) # Column 2 is the language code lang_keys = [row[2] for row in data] del lang_keys[0] # Get rid of the headers # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) )
from urllib2 import urlopen import csv import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) lang_keys = [] lang_local_names = [] lang_eng_names = [] for row in data: lang_keys.append(row[2]) lang_local_names.append(row[10]) lang_eng_names.append(row[1]) # Generate the XML, for Android x = lb.E keys = [x.item(k) for k in lang_keys] # Skip the headers! del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) # Generate the JSON, for iOS langs_json = [] # Start from 1, to skip the headers for i in xrange(1, len(lang_keys)): langs_json.append({ "code": lang_keys[i], "name": lang_local_names[i], "canonical_name": lang_eng_names[i] }) open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
Modify language generation script to make JSON for iOS
Modify language generation script to make JSON for iOS Change-Id: Ib5aec2f6cfcb5bd1187cf8863ecd50f1b1a2d20c
Python
apache-2.0
Wikinaut/wikipedia-app,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,creaITve/apps-android-tbrc-works,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,reproio/apps-android-wikipedia,wikimedia/apps-android-wikipedia,BrunoMRodrigues/apps-android-tbrc-work,BrunoMRodrigues/apps-android-tbrc-work,carloshwa/apps-android-wikipedia,creaITve/apps-android-tbrc-works,BrunoMRodrigues/apps-android-tbrc-work,Wikinaut/wikipedia-app,Wikinaut/wikipedia-app,BrunoMRodrigues/apps-android-tbrc-work,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,Wikinaut/wikipedia-app,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,creaITve/apps-android-tbrc-works,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,parvez3019/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,creaITve/apps-android-tbrc-works,wikimedia/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Duct-and-rice/KrswtkhrWiki4Android,Wikinaut/wikipedia-app
from urllib2 import urlopen import csv + import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) - # Column 2 is the language code - lang_keys = [row[2] for row in data] + lang_keys = [] + lang_local_names = [] + lang_eng_names = [] + for row in data: + lang_keys.append(row[2]) + lang_local_names.append(row[10]) + lang_eng_names.append(row[1]) + # Generate the XML, for Android - del lang_keys[0] # Get rid of the headers - - # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] + # Skip the headers! + del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) + # Generate the JSON, for iOS + langs_json = [] + # Start from 1, to skip the headers + for i in xrange(1, len(lang_keys)): + langs_json.append({ + "code": lang_keys[i], + "name": lang_local_names[i], + "canonical_name": lang_eng_names[i] + }) + + open("languages_list.json", "w").write(json.dumps(langs_json, indent=4)) +
Modify language generation script to make JSON for iOS
## Code Before: from urllib2 import urlopen import csv import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) # Column 2 is the language code lang_keys = [row[2] for row in data] del lang_keys[0] # Get rid of the headers # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) ## Instruction: Modify language generation script to make JSON for iOS ## Code After: from urllib2 import urlopen import csv import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) lang_keys = [] lang_local_names = [] lang_eng_names = [] for row in data: lang_keys.append(row[2]) lang_local_names.append(row[10]) lang_eng_names.append(row[1]) # Generate the XML, for Android x = lb.E keys = [x.item(k) for k in lang_keys] # Skip the headers! del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) # Generate the JSON, for iOS langs_json = [] # Start from 1, to skip the headers for i in xrange(1, len(lang_keys)): langs_json.append({ "code": lang_keys[i], "name": lang_local_names[i], "canonical_name": lang_eng_names[i] }) open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
053147c19acbf467bb0e044f2fb58304b759b72d
frameworks/Python/pyramid/create_database.py
frameworks/Python/pyramid/create_database.py
import codecs from frameworkbenchmarks.models import DBSession if __name__ == "__main__": """ Initialize database """ with codecs.open('../config/create-postgres.sql', 'r', encoding='utf-8') as fp: sql = fp.read() DBSession.execute(sql) DBSession.commit()
import codecs from frameworkbenchmarks.models import DBSession if __name__ == "__main__": """ Initialize database """ with codecs.open('../../../config/create-postgres.sql', 'r', encoding='utf-8') as fp: sql = fp.read() DBSession.execute(sql) DBSession.commit()
Fix the path to create-postgres.sql
Fix the path to create-postgres.sql
Python
bsd-3-clause
k-r-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,grob/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,khellang/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,joshk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,actframework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,joshk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,testn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Verber/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Verber/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,joshk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Verber/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sxend/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zapov/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,herloct/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,grob/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,khellang/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zapov/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,herloct/FrameworkBenchmarks,testn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Verber/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sgml/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zapov/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks
import codecs from frameworkbenchmarks.models import DBSession if __name__ == "__main__": """ Initialize database """ - with codecs.open('../config/create-postgres.sql', 'r', encoding='utf-8') as fp: + with codecs.open('../../../config/create-postgres.sql', + 'r', + encoding='utf-8') as fp: sql = fp.read() DBSession.execute(sql) DBSession.commit()
Fix the path to create-postgres.sql
## Code Before: import codecs from frameworkbenchmarks.models import DBSession if __name__ == "__main__": """ Initialize database """ with codecs.open('../config/create-postgres.sql', 'r', encoding='utf-8') as fp: sql = fp.read() DBSession.execute(sql) DBSession.commit() ## Instruction: Fix the path to create-postgres.sql ## Code After: import codecs from frameworkbenchmarks.models import DBSession if __name__ == "__main__": """ Initialize database """ with codecs.open('../../../config/create-postgres.sql', 'r', encoding='utf-8') as fp: sql = fp.read() DBSession.execute(sql) DBSession.commit()
310553e1282231c35093ff355c61129e9f073a0a
src/lib/verify_email_google.py
src/lib/verify_email_google.py
import DNS from validate_email import validate_email from DNS.Lib import PackError def is_google_apps_email(email): hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
import DNS import re from validate_email import validate_email from DNS.Lib import PackError EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') def is_valid_email(email): if email.count('@') != 1: return False return bool(EMAIL_RE.match(email)) def is_google_apps_email(email): if not is_valid_email(email): return False hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
Add Google Apps email address validation
Add Google Apps email address validation
Python
agpl-3.0
juposocial/jupo,juposocial/jupo,juposocial/jupo,juposocial/jupo
import DNS + import re from validate_email import validate_email from DNS.Lib import PackError + EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') + + def is_valid_email(email): + if email.count('@') != 1: + return False + return bool(EMAIL_RE.match(email)) + def is_google_apps_email(email): + if not is_valid_email(email): + return False + hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
Add Google Apps email address validation
## Code Before: import DNS from validate_email import validate_email from DNS.Lib import PackError def is_google_apps_email(email): hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False ## Instruction: Add Google Apps email address validation ## Code After: import DNS import re from validate_email import validate_email from DNS.Lib import PackError EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$') def is_valid_email(email): if email.count('@') != 1: return False return bool(EMAIL_RE.match(email)) def is_google_apps_email(email): if not is_valid_email(email): return False hostname = email[email.find('@')+1:] try: mx_hosts = DNS.mxlookup(hostname) except DNS.ServerError as e: return False except PackError as e: return False for mx in mx_hosts: if len(mx) == 2: priority, host_server = mx else: host_server = mx if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower(): return True return False
0dc1412ad6e7cbe47eda1e476ce16603b7f6a030
raspigibbon_bringup/scripts/raspigibbon_joint_subscriber.py
raspigibbon_bringup/scripts/raspigibbon_joint_subscriber.py
from futaba_serial_servo import RS30X import rospy from sensor_msgs.msg import JointState class Slave: def __init__(self): self.rs = RS30X.RS304MD() self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10) for i in range(1,6): self.rs.setTorque(i, True) rospy.sleep(0.01) rospy.loginfo("servo initialized") def joint_callback(self, msg): for i in range(1, 6): self.rs.setAngle(i, msg.position[i-1]) rospy.sleep(0.01) if __name__ == "__main__": try: while not rospy.is_shutdown(): rospy.init_node("slave_joint_state") slave = Slave() rospy.spin() except rospy.ROSInterruptException: pass
from futaba_serial_servo import RS30X import rospy from sensor_msgs.msg import JointState class Slave: def __init__(self): self.rs = RS30X.RS304MD() self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10) for i in range(1,6): self.rs.setTorque(i, True) rospy.sleep(0.01) rospy.loginfo("servo initialized") def joint_callback(self, msg): for i in range(1, 6): self.rs.setAngle(i, msg.position[i-1]) rospy.sleep(0.01) def shutdown(self): for i in range(1,6): self.rs.setTorque(i, False) rospy.sleep(0.01) rospy.loginfo("set all servo torque_off") if __name__ == "__main__": try: while not rospy.is_shutdown(): rospy.init_node("slave_joint_state") rospy.on_shutdown(self.shutdown) slave = Slave() rospy.spin() except rospy.ROSInterruptException: pass
Add shutdown scripts to turn_off servo after subscribing
Add shutdown scripts to turn_off servo after subscribing
Python
mit
raspberrypigibbon/raspigibbon_ros
from futaba_serial_servo import RS30X import rospy from sensor_msgs.msg import JointState class Slave: def __init__(self): self.rs = RS30X.RS304MD() self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10) for i in range(1,6): self.rs.setTorque(i, True) rospy.sleep(0.01) rospy.loginfo("servo initialized") def joint_callback(self, msg): for i in range(1, 6): self.rs.setAngle(i, msg.position[i-1]) rospy.sleep(0.01) + def shutdown(self): + for i in range(1,6): + self.rs.setTorque(i, False) + rospy.sleep(0.01) + rospy.loginfo("set all servo torque_off") + if __name__ == "__main__": try: while not rospy.is_shutdown(): rospy.init_node("slave_joint_state") + rospy.on_shutdown(self.shutdown) slave = Slave() rospy.spin() except rospy.ROSInterruptException: pass
Add shutdown scripts to turn_off servo after subscribing
## Code Before: from futaba_serial_servo import RS30X import rospy from sensor_msgs.msg import JointState class Slave: def __init__(self): self.rs = RS30X.RS304MD() self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10) for i in range(1,6): self.rs.setTorque(i, True) rospy.sleep(0.01) rospy.loginfo("servo initialized") def joint_callback(self, msg): for i in range(1, 6): self.rs.setAngle(i, msg.position[i-1]) rospy.sleep(0.01) if __name__ == "__main__": try: while not rospy.is_shutdown(): rospy.init_node("slave_joint_state") slave = Slave() rospy.spin() except rospy.ROSInterruptException: pass ## Instruction: Add shutdown scripts to turn_off servo after subscribing ## Code After: from futaba_serial_servo import RS30X import rospy from sensor_msgs.msg import JointState class Slave: def __init__(self): self.rs = RS30X.RS304MD() self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10) for i in range(1,6): self.rs.setTorque(i, True) rospy.sleep(0.01) rospy.loginfo("servo initialized") def joint_callback(self, msg): for i in range(1, 6): self.rs.setAngle(i, msg.position[i-1]) rospy.sleep(0.01) def shutdown(self): for i in range(1,6): self.rs.setTorque(i, False) rospy.sleep(0.01) rospy.loginfo("set all servo torque_off") if __name__ == "__main__": try: while not rospy.is_shutdown(): rospy.init_node("slave_joint_state") rospy.on_shutdown(self.shutdown) slave = Slave() rospy.spin() except rospy.ROSInterruptException: pass
cf58ebf492cd0dfaf640d2fd8d3cf4e5b2706424
alembic/versions/47dd43c1491_create_category_tabl.py
alembic/versions/47dd43c1491_create_category_tabl.py
# revision identifiers, used by Alembic. revision = '47dd43c1491' down_revision = '27bf0aefa49d' from alembic import op import sqlalchemy as sa import datetime def make_timestamp(): now = datetime.datetime.utcnow() return now.isoformat() def upgrade(): op.create_table( 'category', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False, unique=True), sa.Column('short_name', sa.Text, nullable=False, unique=True), sa.Column('created', sa.Text, default=make_timestamp), ) def downgrade(): op.drop_table('category')
# revision identifiers, used by Alembic. revision = '47dd43c1491' down_revision = '27bf0aefa49d' from alembic import op import sqlalchemy as sa import datetime def make_timestamp(): now = datetime.datetime.utcnow() return now.isoformat() def upgrade(): op.create_table( 'category', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False, unique=True), sa.Column('short_name', sa.Text, nullable=False, unique=True), sa.Column('description', sa.Text, nullable=False), sa.Column('created', sa.Text, default=make_timestamp), ) # Add two categories query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')' op.execute(query) query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')' op.execute(query) def downgrade(): op.drop_table('category')
Add description to the table and populate it with two categories
Add description to the table and populate it with two categories
Python
agpl-3.0
geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,PyBossa/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,CulturePlex/pybossa,geotagx/pybossa,proyectos-analizo-info/pybossa-analizo-info,CulturePlex/pybossa,OpenNewsLabs/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,PyBossa/pybossa,geotagx/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,stefanhahmann/pybossa,inteligencia-coletiva-lsd/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,geotagx/geotagx-pybossa-archive
# revision identifiers, used by Alembic. revision = '47dd43c1491' down_revision = '27bf0aefa49d' from alembic import op import sqlalchemy as sa import datetime def make_timestamp(): now = datetime.datetime.utcnow() return now.isoformat() def upgrade(): op.create_table( 'category', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False, unique=True), sa.Column('short_name', sa.Text, nullable=False, unique=True), + sa.Column('description', sa.Text, nullable=False), sa.Column('created', sa.Text, default=make_timestamp), ) + + # Add two categories + query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')' + op.execute(query) + query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')' + op.execute(query) def downgrade(): op.drop_table('category')
Add description to the table and populate it with two categories
## Code Before: # revision identifiers, used by Alembic. revision = '47dd43c1491' down_revision = '27bf0aefa49d' from alembic import op import sqlalchemy as sa import datetime def make_timestamp(): now = datetime.datetime.utcnow() return now.isoformat() def upgrade(): op.create_table( 'category', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False, unique=True), sa.Column('short_name', sa.Text, nullable=False, unique=True), sa.Column('created', sa.Text, default=make_timestamp), ) def downgrade(): op.drop_table('category') ## Instruction: Add description to the table and populate it with two categories ## Code After: # revision identifiers, used by Alembic. revision = '47dd43c1491' down_revision = '27bf0aefa49d' from alembic import op import sqlalchemy as sa import datetime def make_timestamp(): now = datetime.datetime.utcnow() return now.isoformat() def upgrade(): op.create_table( 'category', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False, unique=True), sa.Column('short_name', sa.Text, nullable=False, unique=True), sa.Column('description', sa.Text, nullable=False), sa.Column('created', sa.Text, default=make_timestamp), ) # Add two categories query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')' op.execute(query) query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')' op.execute(query) def downgrade(): op.drop_table('category')
8b7ab303340ba65aa219103c568ce9d88ea39689
airmozilla/main/context_processors.py
airmozilla/main/context_processors.py
from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(public=True, featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
Fix context processor to correctly display internal featured videos.
Fix context processor to correctly display internal featured videos.
Python
bsd-3-clause
EricSekyere/airmozilla,lcamacho/airmozilla,kenrick95/airmozilla,tannishk/airmozilla,tannishk/airmozilla,a-buck/airmozilla,bugzPDX/airmozilla,ehsan/airmozilla,mythmon/airmozilla,Nolski/airmozilla,blossomica/airmozilla,EricSekyere/airmozilla,blossomica/airmozilla,zofuthan/airmozilla,bugzPDX/airmozilla,EricSekyere/airmozilla,bugzPDX/airmozilla,chirilo/airmozilla,lcamacho/airmozilla,mythmon/airmozilla,kenrick95/airmozilla,Nolski/airmozilla,lcamacho/airmozilla,chirilo/airmozilla,anjalymehla/airmozilla,tannishk/airmozilla,ehsan/airmozilla,ehsan/airmozilla,anjalymehla/airmozilla,peterbe/airmozilla,chirilo/airmozilla,mozilla/airmozilla,EricSekyere/airmozilla,peterbe/airmozilla,EricSekyere/airmozilla,zofuthan/airmozilla,chirilo/airmozilla,anu7495/airmozilla,Nolski/airmozilla,chirilo/airmozilla,anu7495/airmozilla,mozilla/airmozilla,anjalymehla/airmozilla,anjalymehla/airmozilla,mozilla/airmozilla,mythmon/airmozilla,lcamacho/airmozilla,ehsan/airmozilla,kenrick95/airmozilla,lcamacho/airmozilla,tannishk/airmozilla,zofuthan/airmozilla,a-buck/airmozilla,mythmon/airmozilla,zofuthan/airmozilla,anjalymehla/airmozilla,peterbe/airmozilla,kenrick95/airmozilla,blossomica/airmozilla,Nolski/airmozilla,kenrick95/airmozilla,a-buck/airmozilla,anu7495/airmozilla,ehsan/airmozilla,a-buck/airmozilla,bugzPDX/airmozilla,Nolski/airmozilla,zofuthan/airmozilla,tannishk/airmozilla,blossomica/airmozilla,mythmon/airmozilla,mozilla/airmozilla,anu7495/airmozilla,anu7495/airmozilla
from django.conf import settings from airmozilla.main.models import Event def sidebar(request): - featured = Event.objects.approved().filter(public=True, featured=True) + featured = Event.objects.approved().filter(featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
Fix context processor to correctly display internal featured videos.
## Code Before: from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(public=True, featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured } ## Instruction: Fix context processor to correctly display internal featured videos. ## Code After: from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
ac44332d53736f1ac3e067eecf1064bcef038b3a
core/platform/transactions/django_transaction_services.py
core/platform/transactions/django_transaction_services.py
"""Provides a seam for transaction services.""" __author__ = 'Sean Lip' def run_in_transaction(fn, *args, **kwargs): """Run a function in a transaction.""" # TODO(sll): Actually run the function in a transaction. return fn(*args, **kwargs)
"""Provides a seam for transaction services.""" __author__ = 'Sean Lip' from django.db import transaction def run_in_transaction(fn, *args, **kwargs): """Run a function in a transaction.""" with transaction.commit_on_success(): return fn(*args, **kwargs)
Add transaction support for django models.
Add transaction support for django models.
Python
apache-2.0
oulan/oppia,directorlive/oppia,google-code-export/oppia,oulan/oppia,michaelWagner/oppia,edallison/oppia,terrameijar/oppia,Dev4X/oppia,amitdeutsch/oppia,zgchizi/oppia-uc,virajprabhu/oppia,won0089/oppia,sunu/oppia,mit0110/oppia,sanyaade-teachings/oppia,kennho/oppia,BenHenning/oppia,CMDann/oppia,whygee/oppia,gale320/oppia,kevinlee12/oppia,won0089/oppia,bjvoth/oppia,kaffeel/oppia,won0089/oppia,cleophasmashiri/oppia,danieljjh/oppia,openhatch/oh-missions-oppia-beta,nagyistoce/oppia,kaffeel/oppia,mit0110/oppia,kevinlee12/oppia,kennho/oppia,rackstar17/oppia,toooooper/oppia,won0089/oppia,jestapinski/oppia,Dev4X/oppia,mit0110/oppia,BenHenning/oppia,sdulal/oppia,sanyaade-teachings/oppia,himanshu-dixit/oppia,leandrotoledo/oppia,kevinlee12/oppia,edallison/oppia,toooooper/oppia,souravbadami/oppia,openhatch/oh-missions-oppia-beta,google-code-export/oppia,aldeka/oppia,MaximLich/oppia,kingctan/oppia,oppia/oppia,google-code-export/oppia,AllanYangZhou/oppia,sunu/oppia,sdulal/oppia,sbhowmik89/oppia,sunu/oppia,danieljjh/oppia,Cgruppo/oppia,nagyistoce/oppia,terrameijar/oppia,toooooper/oppia,souravbadami/oppia,mindpin/mindpin_oppia,fernandopinhati/oppia,bjvoth/oppia,danieljjh/oppia,felipecocco/oppia,Atlas-Sailed-Co/oppia,miyucy/oppia,kaffeel/oppia,Atlas-Sailed-Co/oppia,nagyistoce/oppia,aldeka/oppia,dippatel1994/oppia,Cgruppo/oppia,leandrotoledo/oppia,leandrotoledo/oppia,mindpin/mindpin_oppia,raju249/oppia,BenHenning/oppia,kennho/oppia,raju249/oppia,dippatel1994/oppia,VictoriaRoux/oppia,oppia/oppia,mindpin/mindpin_oppia,wangsai/oppia,virajprabhu/oppia,miyucy/oppia,VictoriaRoux/oppia,fernandopinhati/oppia,infinyte/oppia,infinyte/oppia,anthkris/oppia,Dev4X/oppia,MaximLich/oppia,CMDann/oppia,brianrodri/oppia,jestapinski/oppia,kingctan/oppia,amitdeutsch/oppia,brylie/oppia,brianrodri/oppia,MAKOSCAFEE/oppia,nagyistoce/oppia,fernandopinhati/oppia,mindpin/mindpin_oppia,whygee/oppia,Atlas-Sailed-Co/oppia,amitdeutsch/oppia,MAKOSCAFEE/oppia,whygee/oppia,infinyte/oppia,danieljjh/oppia,Cgruppo/oppia,directorlive/oppia,CMDann/oppia,asandyz/oppia,gale320/oppia,souravbadami/oppia,cleophasmashiri/oppia,virajprabhu/oppia,brylie/oppia,cleophasmashiri/oppia,amgowano/oppia,sarahfo/oppia,bjvoth/oppia,sunu/oh-missions-oppia-beta,dippatel1994/oppia,prasanna08/oppia,brylie/oppia,himanshu-dixit/oppia,edallison/oppia,mit0110/oppia,zgchizi/oppia-uc,DewarM/oppia,anthkris/oppia,infinyte/oppia,DewarM/oppia,edallison/oppia,cleophasmashiri/oppia,himanshu-dixit/oppia,sanyaade-teachings/oppia,dippatel1994/oppia,sarahfo/oppia,michaelWagner/oppia,amgowano/oppia,bjvoth/oppia,sanyaade-teachings/oppia,amgowano/oppia,kevinlee12/oppia,shaz13/oppia,oulan/oppia,sbhowmik89/oppia,kevinlee12/oppia,zgchizi/oppia-uc,felipecocco/oppia,wangsai/oppia,openhatch/oh-missions-oppia-beta,BenHenning/oppia,MAKOSCAFEE/oppia,rackstar17/oppia,michaelWagner/oppia,sdulal/oppia,leandrotoledo/oppia,oppia/oppia,gale320/oppia,shaz13/oppia,sanyaade-teachings/oppia,virajprabhu/oppia,prasanna08/oppia,amitdeutsch/oppia,kingctan/oppia,himanshu-dixit/oppia,rackstar17/oppia,sunu/oppia,MAKOSCAFEE/oppia,oppia/oppia,felipecocco/oppia,hazmatzo/oppia,sunu/oppia,VictoriaRoux/oppia,aldeka/oppia,directorlive/oppia,sdulal/oppia,shaz13/oppia,wangsai/oppia,toooooper/oppia,oulan/oppia,bjvoth/oppia,aldeka/oppia,zgchizi/oppia-uc,jestapinski/oppia,danieljjh/oppia,Dev4X/oppia,anthkris/oppia,Atlas-Sailed-Co/oppia,kaffeel/oppia,oppia/oppia,BenHenning/oppia,asandyz/oppia,DewarM/oppia,CMDann/oppia,won0089/oppia,VictoriaRoux/oppia,miyucy/oppia,sunu/oh-missions-oppia-beta,kennho/oppia,wangsai/oppia,fernandopinhati/oppia,oulan/oppia,terrameijar/oppia,Cgruppo/oppia,shaz13/oppia,brylie/oppia,anthkris/oppia,miyucy/oppia,souravbadami/oppia,mit0110/oppia,sarahfo/oppia,kingctan/oppia,felipecocco/oppia,openhatch/oh-missions-oppia-beta,hazmatzo/oppia,anggorodewanto/oppia,amitdeutsch/oppia,felipecocco/oppia,prasanna08/oppia,dippatel1994/oppia,sarahfo/oppia,kennho/oppia,CMDann/oppia,terrameijar/oppia,fernandopinhati/oppia,prasanna08/oppia,google-code-export/oppia,gale320/oppia,hazmatzo/oppia,leandrotoledo/oppia,sdulal/oppia,brylie/oppia,toooooper/oppia,hazmatzo/oppia,anggorodewanto/oppia,Cgruppo/oppia,gale320/oppia,sunu/oh-missions-oppia-beta,kingctan/oppia,anggorodewanto/oppia,brianrodri/oppia,MaximLich/oppia,AllanYangZhou/oppia,raju249/oppia,anggorodewanto/oppia,sbhowmik89/oppia,asandyz/oppia,sunu/oh-missions-oppia-beta,brianrodri/oppia,DewarM/oppia,hazmatzo/oppia,sbhowmik89/oppia,asandyz/oppia,asandyz/oppia,sbhowmik89/oppia,AllanYangZhou/oppia,directorlive/oppia,DewarM/oppia,whygee/oppia,Atlas-Sailed-Co/oppia,jestapinski/oppia,wangsai/oppia,amgowano/oppia,infinyte/oppia,prasanna08/oppia,brianrodri/oppia,google-code-export/oppia,raju249/oppia,VictoriaRoux/oppia,sarahfo/oppia,virajprabhu/oppia,whygee/oppia,michaelWagner/oppia,MaximLich/oppia,souravbadami/oppia,kaffeel/oppia,Dev4X/oppia,michaelWagner/oppia,rackstar17/oppia,directorlive/oppia,AllanYangZhou/oppia,cleophasmashiri/oppia,nagyistoce/oppia
"""Provides a seam for transaction services.""" __author__ = 'Sean Lip' + from django.db import transaction + + def run_in_transaction(fn, *args, **kwargs): """Run a function in a transaction.""" - # TODO(sll): Actually run the function in a transaction. + with transaction.commit_on_success(): - return fn(*args, **kwargs) + return fn(*args, **kwargs)
Add transaction support for django models.
## Code Before: """Provides a seam for transaction services.""" __author__ = 'Sean Lip' def run_in_transaction(fn, *args, **kwargs): """Run a function in a transaction.""" # TODO(sll): Actually run the function in a transaction. return fn(*args, **kwargs) ## Instruction: Add transaction support for django models. ## Code After: """Provides a seam for transaction services.""" __author__ = 'Sean Lip' from django.db import transaction def run_in_transaction(fn, *args, **kwargs): """Run a function in a transaction.""" with transaction.commit_on_success(): return fn(*args, **kwargs)
e5bd4884fc7ea4389315d0d2b8ff248bbda9a905
custom/enikshay/integrations/utils.py
custom/enikshay/integrations/utils.py
from corehq.apps.locations.models import SQLLocation from dimagi.utils.logging import notify_exception def is_submission_from_test_location(person_case): try: phi_location = SQLLocation.objects.get(location_id=person_case.owner_id) except SQLLocation.DoesNotExist: message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}" .format(location_id=person_case.owner_id, person_id=person_case.case_id)) notify_exception(None, message="[ENIKSHAY] {}".format(message)) return True return phi_location.metadata.get('is_test', "yes") == "yes"
from corehq.apps.locations.models import SQLLocation from custom.enikshay.exceptions import NikshayLocationNotFound def is_submission_from_test_location(person_case): try: phi_location = SQLLocation.objects.get(location_id=person_case.owner_id) except SQLLocation.DoesNotExist: raise NikshayLocationNotFound( "Location with id {location_id} not found. This is the owner for person with id: {person_id}" .format(location_id=person_case.owner_id, person_id=person_case.case_id) ) return phi_location.metadata.get('is_test', "yes") == "yes"
Revert "Fallback is test location"
Revert "Fallback is test location" This reverts commit 2ba9865fa0f05e9ae244b2513e046c961540fca1.
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from corehq.apps.locations.models import SQLLocation - from dimagi.utils.logging import notify_exception + from custom.enikshay.exceptions import NikshayLocationNotFound def is_submission_from_test_location(person_case): try: phi_location = SQLLocation.objects.get(location_id=person_case.owner_id) except SQLLocation.DoesNotExist: + raise NikshayLocationNotFound( - message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}" + "Location with id {location_id} not found. This is the owner for person with id: {person_id}" - .format(location_id=person_case.owner_id, person_id=person_case.case_id)) + .format(location_id=person_case.owner_id, person_id=person_case.case_id) + ) - notify_exception(None, message="[ENIKSHAY] {}".format(message)) - return True - return phi_location.metadata.get('is_test', "yes") == "yes"
Revert "Fallback is test location"
## Code Before: from corehq.apps.locations.models import SQLLocation from dimagi.utils.logging import notify_exception def is_submission_from_test_location(person_case): try: phi_location = SQLLocation.objects.get(location_id=person_case.owner_id) except SQLLocation.DoesNotExist: message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}" .format(location_id=person_case.owner_id, person_id=person_case.case_id)) notify_exception(None, message="[ENIKSHAY] {}".format(message)) return True return phi_location.metadata.get('is_test', "yes") == "yes" ## Instruction: Revert "Fallback is test location" ## Code After: from corehq.apps.locations.models import SQLLocation from custom.enikshay.exceptions import NikshayLocationNotFound def is_submission_from_test_location(person_case): try: phi_location = SQLLocation.objects.get(location_id=person_case.owner_id) except SQLLocation.DoesNotExist: raise NikshayLocationNotFound( "Location with id {location_id} not found. This is the owner for person with id: {person_id}" .format(location_id=person_case.owner_id, person_id=person_case.case_id) ) return phi_location.metadata.get('is_test', "yes") == "yes"
78136c619ebafb54e4bd65af3cfd85a8ff67766b
osfclient/tests/test_cloning.py
osfclient/tests/test_cloning.py
"""Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
"""Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): with patch('osfclient.cli.os.getenv', side_effect='SECRET'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
Fix osf clone test that was asking for a password
Fix osf clone test that was asking for a password
Python
bsd-3-clause
betatim/osf-cli,betatim/osf-cli
"""Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): + with patch('osfclient.cli.os.getenv', side_effect='SECRET'): - clone(args) + clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
Fix osf clone test that was asking for a password
## Code Before: """Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls ## Instruction: Fix osf clone test that was asking for a password ## Code After: """Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): with patch('osfclient.cli.os.getenv', side_effect='SECRET'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
9c7ff0d98d324e3a52664f9dcd6fe64334778e00
web/dbconfig/dbconfigbock7k.py
web/dbconfig/dbconfigbock7k.py
import dbconfig class dbConfigBock7k ( dbconfig.dbConfig ): # cubedim is a dictionary so it can vary # size of the cube at resolution cubedim = { 0: [128, 128, 16] } #information about the image stack slicerange = [0,61] tilesz = [ 256,256 ] #resolution information -- lowest resolution and list of resolution resolutions = [ 0 ] imagesz = { 0: [ 7198, 7352 ] } # Resize factor to eliminate distortion zscale = { 0: 1.0 }
import dbconfig class dbConfigBock7k ( dbconfig.dbConfig ): # cubedim is a dictionary so it can vary # size of the cube at resolution cubedim = { 0: [128, 128, 16], 1: [128, 128, 16], 2: [128, 128, 16], 3: [128, 128, 16] } #information about the image stack slicerange = [0,61] tilesz = [ 256,256 ] #resolution information -- lowest resolution and list of resolution resolutions = [ 0, 1, 2, 3 ] imagesz = { 0: [ 7198, 7352 ], 1: [ 3599, 3676 ], 2: [ 1800, 1838 ], 3: [ 900, 919 ] } # Resize factor to eliminate distortion zscale = { 0: 10.0, 1: 5.0, 2: 2.5, 3: 1.25 }
Expand bock7k to be a multi-resolution project.
Expand bock7k to be a multi-resolution project.
Python
apache-2.0
neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome
import dbconfig class dbConfigBock7k ( dbconfig.dbConfig ): # cubedim is a dictionary so it can vary # size of the cube at resolution - cubedim = { 0: [128, 128, 16] } + cubedim = { 0: [128, 128, 16], + 1: [128, 128, 16], + 2: [128, 128, 16], + 3: [128, 128, 16] } #information about the image stack slicerange = [0,61] tilesz = [ 256,256 ] #resolution information -- lowest resolution and list of resolution - resolutions = [ 0 ] + resolutions = [ 0, 1, 2, 3 ] - imagesz = { 0: [ 7198, 7352 ] } + imagesz = { 0: [ 7198, 7352 ], + 1: [ 3599, 3676 ], + 2: [ 1800, 1838 ], + 3: [ 900, 919 ] } # Resize factor to eliminate distortion - zscale = { 0: 1.0 } + zscale = { 0: 10.0, + 1: 5.0, + 2: 2.5, + 3: 1.25 }
Expand bock7k to be a multi-resolution project.
## Code Before: import dbconfig class dbConfigBock7k ( dbconfig.dbConfig ): # cubedim is a dictionary so it can vary # size of the cube at resolution cubedim = { 0: [128, 128, 16] } #information about the image stack slicerange = [0,61] tilesz = [ 256,256 ] #resolution information -- lowest resolution and list of resolution resolutions = [ 0 ] imagesz = { 0: [ 7198, 7352 ] } # Resize factor to eliminate distortion zscale = { 0: 1.0 } ## Instruction: Expand bock7k to be a multi-resolution project. ## Code After: import dbconfig class dbConfigBock7k ( dbconfig.dbConfig ): # cubedim is a dictionary so it can vary # size of the cube at resolution cubedim = { 0: [128, 128, 16], 1: [128, 128, 16], 2: [128, 128, 16], 3: [128, 128, 16] } #information about the image stack slicerange = [0,61] tilesz = [ 256,256 ] #resolution information -- lowest resolution and list of resolution resolutions = [ 0, 1, 2, 3 ] imagesz = { 0: [ 7198, 7352 ], 1: [ 3599, 3676 ], 2: [ 1800, 1838 ], 3: [ 900, 919 ] } # Resize factor to eliminate distortion zscale = { 0: 10.0, 1: 5.0, 2: 2.5, 3: 1.25 }
d82111c5415176ea07674723151f14445e4b52ab
fire_rs/firemodel/test_propagation.py
fire_rs/firemodel/test_propagation.py
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20) # prop.plot(blocking=True)
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
Set test area to a burnable one.
[fire-models] Set test area to a burnable one.
Python
bsd-2-clause
fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop
import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): - env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) + env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) - prop = propagation.propagate(env, 10, 20) + prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
Set test area to a burnable one.
## Code Before: import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20) # prop.plot(blocking=True) ## Instruction: Set test area to a burnable one. ## Code After: import unittest import fire_rs.firemodel.propagation as propagation class TestPropagation(unittest.TestCase): def test_propagate(self): env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0) prop = propagation.propagate(env, 10, 20, horizon=3*3600) # prop.plot(blocking=True)
d919c1e29645a52e795e85686de6de8f1e57196e
glue/plugins/ginga_viewer/__init__.py
glue/plugins/ginga_viewer/__init__.py
try: from .client import * from .qt_widget import * except ImportError: import warnings warnings.warn("Could not import ginga plugin, since ginga is required") # Register qt client from ...config import qt_client qt_client.add(GingaWidget)
try: from .client import * from .qt_widget import * except ImportError: import warnings warnings.warn("Could not import ginga plugin, since ginga is required") else: # Register qt client from ...config import qt_client qt_client.add(GingaWidget)
Fix if ginga is not installed
Fix if ginga is not installed
Python
bsd-3-clause
JudoWill/glue,stscieisenhamer/glue,saimn/glue,JudoWill/glue,saimn/glue,stscieisenhamer/glue
try: from .client import * from .qt_widget import * except ImportError: import warnings warnings.warn("Could not import ginga plugin, since ginga is required") + else: + # Register qt client + from ...config import qt_client + qt_client.add(GingaWidget) - # Register qt client - from ...config import qt_client - qt_client.add(GingaWidget) -
Fix if ginga is not installed
## Code Before: try: from .client import * from .qt_widget import * except ImportError: import warnings warnings.warn("Could not import ginga plugin, since ginga is required") # Register qt client from ...config import qt_client qt_client.add(GingaWidget) ## Instruction: Fix if ginga is not installed ## Code After: try: from .client import * from .qt_widget import * except ImportError: import warnings warnings.warn("Could not import ginga plugin, since ginga is required") else: # Register qt client from ...config import qt_client qt_client.add(GingaWidget)
ee425b43502054895986c447e4cdae2c7e6c9278
Lib/fontTools/misc/timeTools.py
Lib/fontTools/misc/timeTools.py
"""fontTools.misc.timeTools.py -- miscellaneous routines.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * import time import calendar # OpenType timestamp handling epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) def timestampToString(value): try: value = time.asctime(time.gmtime(max(0, value + epoch_diff))) except ValueError: value = time.asctime(time.gmtime(0)) def timestampFromString(value): return calendar.timegm(time.strptime(value)) - epoch_diff def timestampNow(): return int(time.time() - epoch_diff)
"""fontTools.misc.timeTools.py -- miscellaneous routines.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * import time import calendar # OpenType timestamp handling epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) def timestampToString(value): # https://github.com/behdad/fonttools/issues/99#issuecomment-66776810 try: value = time.asctime(time.gmtime(max(0, value + epoch_diff))) except (OverflowError, ValueError): value = time.asctime(time.gmtime(0)) def timestampFromString(value): return calendar.timegm(time.strptime(value)) - epoch_diff def timestampNow(): return int(time.time() - epoch_diff)
Adjust for Python 3.3 change in gmtime() exception type
Adjust for Python 3.3 change in gmtime() exception type https://github.com/behdad/fonttools/issues/99#issuecomment-66776810 Fixes https://github.com/behdad/fonttools/issues/99
Python
mit
googlefonts/fonttools,fonttools/fonttools
"""fontTools.misc.timeTools.py -- miscellaneous routines.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * import time import calendar # OpenType timestamp handling epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) def timestampToString(value): + # https://github.com/behdad/fonttools/issues/99#issuecomment-66776810 try: value = time.asctime(time.gmtime(max(0, value + epoch_diff))) - except ValueError: + except (OverflowError, ValueError): value = time.asctime(time.gmtime(0)) def timestampFromString(value): return calendar.timegm(time.strptime(value)) - epoch_diff def timestampNow(): return int(time.time() - epoch_diff)
Adjust for Python 3.3 change in gmtime() exception type
## Code Before: """fontTools.misc.timeTools.py -- miscellaneous routines.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * import time import calendar # OpenType timestamp handling epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) def timestampToString(value): try: value = time.asctime(time.gmtime(max(0, value + epoch_diff))) except ValueError: value = time.asctime(time.gmtime(0)) def timestampFromString(value): return calendar.timegm(time.strptime(value)) - epoch_diff def timestampNow(): return int(time.time() - epoch_diff) ## Instruction: Adjust for Python 3.3 change in gmtime() exception type ## Code After: """fontTools.misc.timeTools.py -- miscellaneous routines.""" from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * import time import calendar # OpenType timestamp handling epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0)) def timestampToString(value): # https://github.com/behdad/fonttools/issues/99#issuecomment-66776810 try: value = time.asctime(time.gmtime(max(0, value + epoch_diff))) except (OverflowError, ValueError): value = time.asctime(time.gmtime(0)) def timestampFromString(value): return calendar.timegm(time.strptime(value)) - epoch_diff def timestampNow(): return int(time.time() - epoch_diff)
fbdc69e218a71e984982a39fc36de19b7cf56f90
Publishers/SamplePachube.py
Publishers/SamplePachube.py
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" environmentId = 2065 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "<Your-Pachube-Api-Key-Here>" environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
Change to sample pachube script
Change to sample pachube script
Python
mit
markallanson/sspe,markallanson/sspe
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" - apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" + apiKey = "<Your-Pachube-Api-Key-Here>" - environmentId = 2065 + environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
Change to sample pachube script
## Code Before: import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" environmentId = 2065 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"] ## Instruction: Change to sample pachube script ## Code After: import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "<Your-Pachube-Api-Key-Here>" environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
2a8a564fbd48fba25c4876ff3d4317152a1d647c
tests/basics/builtin_range.py
tests/basics/builtin_range.py
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) print(range(1,4)[:]) print(range(1,4)[0:]) print(range(1,4)[1:]) print(range(1,4)[:-1]) print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
Test slicing a range that does not start at zero.
tests: Test slicing a range that does not start at zero.
Python
mit
torwag/micropython,TDAbboud/micropython,dinau/micropython,dmazzella/micropython,pramasoul/micropython,adafruit/micropython,danicampora/micropython,misterdanb/micropython,trezor/micropython,misterdanb/micropython,redbear/micropython,noahwilliamsson/micropython,adafruit/circuitpython,alex-robbins/micropython,torwag/micropython,alex-robbins/micropython,mpalomer/micropython,ernesto-g/micropython,drrk/micropython,trezor/micropython,dinau/micropython,adamkh/micropython,pfalcon/micropython,matthewelse/micropython,xhat/micropython,cloudformdesign/micropython,chrisdearman/micropython,puuu/micropython,ernesto-g/micropython,turbinenreiter/micropython,bvernoux/micropython,alex-robbins/micropython,dxxb/micropython,alex-march/micropython,deshipu/micropython,MrSurly/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,ryannathans/micropython,ernesto-g/micropython,matthewelse/micropython,HenrikSolver/micropython,jmarcelino/pycom-micropython,MrSurly/micropython-esp32,pramasoul/micropython,adafruit/micropython,ganshun666/micropython,puuu/micropython,selste/micropython,martinribelotta/micropython,ruffy91/micropython,omtinez/micropython,Peetz0r/micropython-esp32,hosaka/micropython,lowRISC/micropython,kerneltask/micropython,selste/micropython,micropython/micropython-esp32,misterdanb/micropython,cloudformdesign/micropython,deshipu/micropython,supergis/micropython,tuc-osg/micropython,mianos/micropython,alex-robbins/micropython,xuxiaoxin/micropython,mpalomer/micropython,ChuckM/micropython,blmorris/micropython,turbinenreiter/micropython,PappaPeppar/micropython,Timmenem/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,pramasoul/micropython,redbear/micropython,xhat/micropython,dhylands/micropython,ganshun666/micropython,supergis/micropython,infinnovation/micropython,praemdonck/micropython,cloudformdesign/micropython,mhoffma/micropython,AriZuu/micropython,bvernoux/micropython,turbinenreiter/micropython,bvernoux/micropython,mhoffma/micropython,selste/micropython,xuxiaoxin/micropython,infinnovation/micropython,bvernoux/micropython,dmazzella/micropython,Timmenem/micropython,tralamazza/micropython,dxxb/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,kerneltask/micropython,infinnovation/micropython,vitiral/micropython,misterdanb/micropython,lowRISC/micropython,tobbad/micropython,adamkh/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,tuc-osg/micropython,pramasoul/micropython,omtinez/micropython,blmorris/micropython,puuu/micropython,swegener/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,hosaka/micropython,cloudformdesign/micropython,mianos/micropython,tuc-osg/micropython,xuxiaoxin/micropython,xuxiaoxin/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,misterdanb/micropython,mianos/micropython,swegener/micropython,noahwilliamsson/micropython,pozetroninc/micropython,cwyark/micropython,chrisdearman/micropython,hosaka/micropython,deshipu/micropython,toolmacher/micropython,dhylands/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,cwyark/micropython,drrk/micropython,MrSurly/micropython,redbear/micropython,henriknelson/micropython,matthewelse/micropython,mhoffma/micropython,dinau/micropython,trezor/micropython,kerneltask/micropython,feilongfl/micropython,galenhz/micropython,ChuckM/micropython,toolmacher/micropython,alex-march/micropython,dhylands/micropython,torwag/micropython,selste/micropython,EcmaXp/micropython,noahwilliamsson/micropython,galenhz/micropython,micropython/micropython-esp32,feilongfl/micropython,ruffy91/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,kerneltask/micropython,ryannathans/micropython,Timmenem/micropython,vitiral/micropython,chrisdearman/micropython,supergis/micropython,adamkh/micropython,lowRISC/micropython,MrSurly/micropython,dhylands/micropython,blazewicz/micropython,adamkh/micropython,toolmacher/micropython,omtinez/micropython,cwyark/micropython,tuc-osg/micropython,swegener/micropython,danicampora/micropython,Peetz0r/micropython-esp32,dxxb/micropython,xhat/micropython,toolmacher/micropython,ruffy91/micropython,henriknelson/micropython,mpalomer/micropython,oopy/micropython,danicampora/micropython,ChuckM/micropython,deshipu/micropython,EcmaXp/micropython,PappaPeppar/micropython,feilongfl/micropython,ganshun666/micropython,alex-march/micropython,hiway/micropython,trezor/micropython,mhoffma/micropython,dhylands/micropython,AriZuu/micropython,chrisdearman/micropython,puuu/micropython,swegener/micropython,drrk/micropython,mhoffma/micropython,HenrikSolver/micropython,emfcamp/micropython,pfalcon/micropython,turbinenreiter/micropython,AriZuu/micropython,blazewicz/micropython,hiway/micropython,hosaka/micropython,neilh10/micropython,adafruit/circuitpython,EcmaXp/micropython,hosaka/micropython,martinribelotta/micropython,hiway/micropython,feilongfl/micropython,xhat/micropython,pozetroninc/micropython,pfalcon/micropython,supergis/micropython,emfcamp/micropython,HenrikSolver/micropython,praemdonck/micropython,hiway/micropython,vitiral/micropython,oopy/micropython,chrisdearman/micropython,pozetroninc/micropython,noahwilliamsson/micropython,PappaPeppar/micropython,pramasoul/micropython,ryannathans/micropython,xuxiaoxin/micropython,blmorris/micropython,adafruit/micropython,blmorris/micropython,tobbad/micropython,ChuckM/micropython,selste/micropython,matthewelse/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,lowRISC/micropython,EcmaXp/micropython,galenhz/micropython,MrSurly/micropython-esp32,deshipu/micropython,TDAbboud/micropython,matthewelse/micropython,HenrikSolver/micropython,AriZuu/micropython,pozetroninc/micropython,feilongfl/micropython,blazewicz/micropython,henriknelson/micropython,vitiral/micropython,SHA2017-badge/micropython-esp32,omtinez/micropython,bvernoux/micropython,tralamazza/micropython,adafruit/circuitpython,galenhz/micropython,jmarcelino/pycom-micropython,turbinenreiter/micropython,AriZuu/micropython,xhat/micropython,tobbad/micropython,TDAbboud/micropython,danicampora/micropython,ChuckM/micropython,infinnovation/micropython,tralamazza/micropython,martinribelotta/micropython,blmorris/micropython,MrSurly/micropython,noahwilliamsson/micropython,henriknelson/micropython,martinribelotta/micropython,redbear/micropython,puuu/micropython,ryannathans/micropython,ernesto-g/micropython,omtinez/micropython,adamkh/micropython,dxxb/micropython,EcmaXp/micropython,dmazzella/micropython,mianos/micropython,torwag/micropython,cwyark/micropython,drrk/micropython,danicampora/micropython,ruffy91/micropython,ernesto-g/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,drrk/micropython,ganshun666/micropython,neilh10/micropython,MrSurly/micropython,alex-march/micropython,neilh10/micropython,adafruit/circuitpython,TDAbboud/micropython,jmarcelino/pycom-micropython,cwyark/micropython,ruffy91/micropython,praemdonck/micropython,swegener/micropython,cloudformdesign/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,infinnovation/micropython,supergis/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,dxxb/micropython,Timmenem/micropython,matthewelse/micropython,oopy/micropython,PappaPeppar/micropython,tuc-osg/micropython,trezor/micropython,praemdonck/micropython,ganshun666/micropython,lowRISC/micropython,oopy/micropython,tobbad/micropython,dinau/micropython,mianos/micropython,tralamazza/micropython,mpalomer/micropython,tobbad/micropython,dmazzella/micropython,praemdonck/micropython,redbear/micropython,alex-march/micropython,neilh10/micropython,galenhz/micropython,vitiral/micropython,toolmacher/micropython,neilh10/micropython,oopy/micropython,blazewicz/micropython,micropython/micropython-esp32,kerneltask/micropython,adafruit/micropython,mpalomer/micropython,emfcamp/micropython,alex-robbins/micropython,martinribelotta/micropython,emfcamp/micropython,adafruit/circuitpython,ryannathans/micropython,torwag/micropython,HenrikSolver/micropython,dinau/micropython
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) + print(range(1,4)[:]) + print(range(1,4)[0:]) + print(range(1,4)[1:]) + print(range(1,4)[:-1]) + print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
Test slicing a range that does not start at zero.
## Code Before: print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError") ## Instruction: Test slicing a range that does not start at zero. ## Code After: print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) print(range(1,4)[:]) print(range(1,4)[0:]) print(range(1,4)[1:]) print(range(1,4)[:-1]) print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
73cb3c6883940e96e656b9b7dd6033ed2e41cb33
custom/intrahealth/reports/recap_passage_report_v2.py
custom/intrahealth/reports/recap_passage_report_v2.py
from __future__ import absolute_import from __future__ import unicode_literals from memoized import memoized from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter from custom.intrahealth.sqldata import RecapPassageData2, DateSource2 from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport class RecapPassageReport2(MultiReport): title = "Recap Passage NEW" name = "Recap Passage NEW" slug = 'recap_passage2' report_title = "Recap Passage" exportable = True default_rows = 10 fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2] def config_update(self, config): if self.location and self.location.location_type_name.lower() == 'pps': config['location_id'] = self.location.location_id @property @memoized def data_providers(self): dates = DateSource2(config=self.report_config).rows data_providers = [] for date in dates: config = self.report_config config.update(dict(startdate=date, enddate=date)) data_providers.append(RecapPassageData2(config=config)) if not data_providers: data_providers.append(RecapPassageData2(config=self.report_config)) return data_providers
from __future__ import absolute_import from __future__ import unicode_literals from memoized import memoized from corehq.apps.reports.standard import MonthYearMixin from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter from custom.intrahealth.sqldata import RecapPassageData2, DateSource2 from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport class RecapPassageReport2(MonthYearMixin, MultiReport): title = "Recap Passage NEW" name = "Recap Passage NEW" slug = 'recap_passage2' report_title = "Recap Passage" exportable = True default_rows = 10 fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2] def config_update(self, config): if self.location and self.location.location_type_name.lower() == 'pps': config['location_id'] = self.location.location_id @property @memoized def data_providers(self): dates = DateSource2(config=self.report_config).rows data_providers = [] for date in dates: config = self.report_config config.update(dict(startdate=date, enddate=date)) data_providers.append(RecapPassageData2(config=config)) if not data_providers: data_providers.append(RecapPassageData2(config=self.report_config)) return data_providers
Fix month filter for recap passage report
Fix month filter for recap passage report
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from __future__ import absolute_import from __future__ import unicode_literals from memoized import memoized + + from corehq.apps.reports.standard import MonthYearMixin from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter from custom.intrahealth.sqldata import RecapPassageData2, DateSource2 from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport - class RecapPassageReport2(MultiReport): + class RecapPassageReport2(MonthYearMixin, MultiReport): title = "Recap Passage NEW" name = "Recap Passage NEW" slug = 'recap_passage2' report_title = "Recap Passage" exportable = True default_rows = 10 fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2] def config_update(self, config): if self.location and self.location.location_type_name.lower() == 'pps': config['location_id'] = self.location.location_id @property @memoized def data_providers(self): dates = DateSource2(config=self.report_config).rows data_providers = [] for date in dates: config = self.report_config config.update(dict(startdate=date, enddate=date)) data_providers.append(RecapPassageData2(config=config)) if not data_providers: data_providers.append(RecapPassageData2(config=self.report_config)) return data_providers
Fix month filter for recap passage report
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from memoized import memoized from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter from custom.intrahealth.sqldata import RecapPassageData2, DateSource2 from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport class RecapPassageReport2(MultiReport): title = "Recap Passage NEW" name = "Recap Passage NEW" slug = 'recap_passage2' report_title = "Recap Passage" exportable = True default_rows = 10 fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2] def config_update(self, config): if self.location and self.location.location_type_name.lower() == 'pps': config['location_id'] = self.location.location_id @property @memoized def data_providers(self): dates = DateSource2(config=self.report_config).rows data_providers = [] for date in dates: config = self.report_config config.update(dict(startdate=date, enddate=date)) data_providers.append(RecapPassageData2(config=config)) if not data_providers: data_providers.append(RecapPassageData2(config=self.report_config)) return data_providers ## Instruction: Fix month filter for recap passage report ## Code After: from __future__ import absolute_import from __future__ import unicode_literals from memoized import memoized from corehq.apps.reports.standard import MonthYearMixin from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter from custom.intrahealth.sqldata import RecapPassageData2, DateSource2 from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport class RecapPassageReport2(MonthYearMixin, MultiReport): title = "Recap Passage NEW" name = "Recap Passage NEW" slug = 'recap_passage2' report_title = "Recap Passage" exportable = True default_rows = 10 fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2] def config_update(self, config): if self.location and self.location.location_type_name.lower() == 'pps': config['location_id'] = self.location.location_id @property @memoized def data_providers(self): dates = DateSource2(config=self.report_config).rows data_providers = [] for date in dates: config = self.report_config config.update(dict(startdate=date, enddate=date)) data_providers.append(RecapPassageData2(config=config)) if not data_providers: data_providers.append(RecapPassageData2(config=self.report_config)) return data_providers
23a88191e5d827dea84ad533853657110c94c840
app/public/views.py
app/public/views.py
from flask import Blueprint, render_template, redirect, session, url_for from app.decorators import login_required blueprint = Blueprint('public', __name__) @blueprint.route('/') def home(): """Return Home Page""" return render_template('public/index.html') @blueprint.route('/login', methods=['GET', 'POST']) def login(): """Return Login page""" session['logged_in'] = True return render_template('public/login.html') @blueprint.route('/logout') def logout(): session.pop('logged_in', None) return redirect(url_for('home'))
import os from flask import Blueprint, redirect, render_template, request, session, url_for from app.decorators import login_required ADMIN_USERNAME = os.environ['CUSTOMER_INFO_ADMIN_USERNAME'] ADMIN_PASSWORD_HASH = os.environ['CUSTOMER_INFO_ADMIN_PASSWORD_HASH'] blueprint = Blueprint('public', __name__) @blueprint.route('/') def home(): """Return Home Page""" return render_template('public/index.html') def _validate_credentials(username, password): return (username == ADMIN_USERNAME and check_password_hash(ADMIN_PASSWORD_HASH, password)) @blueprint.route('/login', methods=['GET', 'POST']) def login(): """Return Login page""" error = None if request.method == 'POST': username = request.form['username'] password = request.form['password'] if _validate_credentials(username, password): session['logged_in'] = True return redirect(url_for('customer/index.html')) else: error = 'Invalid username or password' return render_template('public/login.html', error=error) @blueprint.route('/logout') def logout(): session.pop('logged_in', None) return redirect(url_for('home'))
Add logic to verify and login admin
Add logic to verify and login admin
Python
apache-2.0
ueg1990/customer-info,ueg1990/customer-info
+ import os - from flask import Blueprint, render_template, redirect, session, url_for + from flask import Blueprint, redirect, render_template, request, session, url_for from app.decorators import login_required + + ADMIN_USERNAME = os.environ['CUSTOMER_INFO_ADMIN_USERNAME'] + ADMIN_PASSWORD_HASH = os.environ['CUSTOMER_INFO_ADMIN_PASSWORD_HASH'] blueprint = Blueprint('public', __name__) @blueprint.route('/') def home(): """Return Home Page""" return render_template('public/index.html') + def _validate_credentials(username, password): + return (username == ADMIN_USERNAME and + check_password_hash(ADMIN_PASSWORD_HASH, password)) + + @blueprint.route('/login', methods=['GET', 'POST']) def login(): """Return Login page""" + error = None + if request.method == 'POST': + username = request.form['username'] + password = request.form['password'] + if _validate_credentials(username, password): - session['logged_in'] = True + session['logged_in'] = True + return redirect(url_for('customer/index.html')) + else: + error = 'Invalid username or password' - return render_template('public/login.html') + return render_template('public/login.html', error=error) @blueprint.route('/logout') def logout(): session.pop('logged_in', None) return redirect(url_for('home'))
Add logic to verify and login admin
## Code Before: from flask import Blueprint, render_template, redirect, session, url_for from app.decorators import login_required blueprint = Blueprint('public', __name__) @blueprint.route('/') def home(): """Return Home Page""" return render_template('public/index.html') @blueprint.route('/login', methods=['GET', 'POST']) def login(): """Return Login page""" session['logged_in'] = True return render_template('public/login.html') @blueprint.route('/logout') def logout(): session.pop('logged_in', None) return redirect(url_for('home')) ## Instruction: Add logic to verify and login admin ## Code After: import os from flask import Blueprint, redirect, render_template, request, session, url_for from app.decorators import login_required ADMIN_USERNAME = os.environ['CUSTOMER_INFO_ADMIN_USERNAME'] ADMIN_PASSWORD_HASH = os.environ['CUSTOMER_INFO_ADMIN_PASSWORD_HASH'] blueprint = Blueprint('public', __name__) @blueprint.route('/') def home(): """Return Home Page""" return render_template('public/index.html') def _validate_credentials(username, password): return (username == ADMIN_USERNAME and check_password_hash(ADMIN_PASSWORD_HASH, password)) @blueprint.route('/login', methods=['GET', 'POST']) def login(): """Return Login page""" error = None if request.method == 'POST': username = request.form['username'] password = request.form['password'] if _validate_credentials(username, password): session['logged_in'] = True return redirect(url_for('customer/index.html')) else: error = 'Invalid username or password' return render_template('public/login.html', error=error) @blueprint.route('/logout') def logout(): session.pop('logged_in', None) return redirect(url_for('home'))
0c6dfa4ad297562ec263a8e98bb75d836d2ab054
src/python/expedient/ui/html/forms.py
src/python/expedient/ui/html/forms.py
''' Created on Jun 20, 2010 @author: jnaous ''' from django import forms from expedient.ui.html.models import SliceFlowSpace class FlowSpaceForm(forms.ModelForm): """ Form to edit flowspace. """ class Meta: model = SliceFlowSpace exclude = ["slice"]
''' Created on Jun 20, 2010 @author: jnaous ''' from django import forms from openflow.plugin.models import FlowSpaceRule class FlowSpaceForm(forms.ModelForm): """ Form to edit flowspace. """ class Meta: model = FlowSpaceRule def __init__(self, sliver_qs, *args, **kwargs): super(FlowSpaceForm, self).__init__(*args, **kwargs) self.fields["slivers"].queryset = sliver_qs
Modify FlowSpaceForm to use actual stored rules
Modify FlowSpaceForm to use actual stored rules
Python
bsd-3-clause
avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf
''' Created on Jun 20, 2010 @author: jnaous ''' from django import forms - from expedient.ui.html.models import SliceFlowSpace + from openflow.plugin.models import FlowSpaceRule class FlowSpaceForm(forms.ModelForm): """ Form to edit flowspace. """ class Meta: - model = SliceFlowSpace + model = FlowSpaceRule - exclude = ["slice"] + + def __init__(self, sliver_qs, *args, **kwargs): + super(FlowSpaceForm, self).__init__(*args, **kwargs) + self.fields["slivers"].queryset = sliver_qs
Modify FlowSpaceForm to use actual stored rules
## Code Before: ''' Created on Jun 20, 2010 @author: jnaous ''' from django import forms from expedient.ui.html.models import SliceFlowSpace class FlowSpaceForm(forms.ModelForm): """ Form to edit flowspace. """ class Meta: model = SliceFlowSpace exclude = ["slice"] ## Instruction: Modify FlowSpaceForm to use actual stored rules ## Code After: ''' Created on Jun 20, 2010 @author: jnaous ''' from django import forms from openflow.plugin.models import FlowSpaceRule class FlowSpaceForm(forms.ModelForm): """ Form to edit flowspace. """ class Meta: model = FlowSpaceRule def __init__(self, sliver_qs, *args, **kwargs): super(FlowSpaceForm, self).__init__(*args, **kwargs) self.fields["slivers"].queryset = sliver_qs
cf1da65820085a84eee51884431b0020d3018f23
bot/project_info.py
bot/project_info.py
name = 'telegram-bot-framework' description = 'Python Telegram bot API framework' url = 'https://github.com/alvarogzp/telegram-bot-framework' author_name = 'Alvaro Gutierrez Perez' author_email = 'alvarogzp@gmail.com' authors_credits = ( ("@AlvaroGP", "main developer"), ("@KouteiCheke", "i18n & UI/UX support") ) is_open_source = True license_name = 'GNU AGPL 3.0+' license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html' donation_addresses = ()
name = 'telegram-bot-framework' description = 'Python Telegram bot API framework' url = 'https://github.com/alvarogzp/telegram-bot-framework' author_name = 'Alvaro Gutierrez Perez' author_email = 'alvarogzp@gmail.com' authors_credits = ( ("@AlvaroGP", "main developer"), ("@KouteiCheke", "i18n & UI/UX support") ) is_open_source = True license_name = 'GNU AGPL 3.0+' license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html' donation_addresses = ( ("Bitcoin", "36rwcSgcU1H9fuMvZoebZD3auus6h9wVXk"), ("Bitcoin (bech32 format)", "bc1q4943c5p5dl0hujmmcg2g0568hetynajd3qqtv0") )
Add bitcoin address to donation addresses
Add bitcoin address to donation addresses
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
name = 'telegram-bot-framework' description = 'Python Telegram bot API framework' url = 'https://github.com/alvarogzp/telegram-bot-framework' author_name = 'Alvaro Gutierrez Perez' author_email = 'alvarogzp@gmail.com' authors_credits = ( ("@AlvaroGP", "main developer"), ("@KouteiCheke", "i18n & UI/UX support") ) is_open_source = True license_name = 'GNU AGPL 3.0+' license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html' - donation_addresses = () + donation_addresses = ( + ("Bitcoin", "36rwcSgcU1H9fuMvZoebZD3auus6h9wVXk"), + ("Bitcoin (bech32 format)", "bc1q4943c5p5dl0hujmmcg2g0568hetynajd3qqtv0") + )
Add bitcoin address to donation addresses
## Code Before: name = 'telegram-bot-framework' description = 'Python Telegram bot API framework' url = 'https://github.com/alvarogzp/telegram-bot-framework' author_name = 'Alvaro Gutierrez Perez' author_email = 'alvarogzp@gmail.com' authors_credits = ( ("@AlvaroGP", "main developer"), ("@KouteiCheke", "i18n & UI/UX support") ) is_open_source = True license_name = 'GNU AGPL 3.0+' license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html' donation_addresses = () ## Instruction: Add bitcoin address to donation addresses ## Code After: name = 'telegram-bot-framework' description = 'Python Telegram bot API framework' url = 'https://github.com/alvarogzp/telegram-bot-framework' author_name = 'Alvaro Gutierrez Perez' author_email = 'alvarogzp@gmail.com' authors_credits = ( ("@AlvaroGP", "main developer"), ("@KouteiCheke", "i18n & UI/UX support") ) is_open_source = True license_name = 'GNU AGPL 3.0+' license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html' donation_addresses = ( ("Bitcoin", "36rwcSgcU1H9fuMvZoebZD3auus6h9wVXk"), ("Bitcoin (bech32 format)", "bc1q4943c5p5dl0hujmmcg2g0568hetynajd3qqtv0") )
2adf8e8bbf1d0f623e14b8490d511ac45cbb7430
djangochurch_data/management/commands/djangochurchimages.py
djangochurch_data/management/commands/djangochurchimages.py
import os.path from blanc_basic_assets.models import Image from django.apps import apps from django.core.files import File from django.core.management.base import BaseCommand IMAGE_LIST = [ (1, 'remember.jpg'), (2, 'sample-image-1.jpg'), (3, 'sample-image-2.jpg'), (4, 'sample-image-3.jpg'), (5, 'sample-image-4.jpg'), ] class Command(BaseCommand): help = 'Load Django Church images' def handle(self, directory=None, *args, **options): image_dir = os.path.join(apps.get_app_path('djangochurch_data'), 'images') for image_id, image_name in IMAGE_LIST: self.stdout.write('Importing: %s' % (image_name,)) image = Image.objects.get(id=image_id) image_file = os.path.join(image_dir, image_name) with open(image_file, 'rb') as f: image.file.save(image_name, File(f))
import os.path from blanc_basic_assets.models import Image from django.apps import apps from django.core.files import File from django.core.management.base import BaseCommand IMAGE_LIST = [ (1, 'remember.jpg'), (2, 'sample-image-1.jpg'), (3, 'sample-image-2.jpg'), (4, 'sample-image-3.jpg'), (5, 'sample-image-4.jpg'), ] class Command(BaseCommand): help = 'Load Django Church images' def handle(self, directory=None, *args, **options): data_app = apps.get_app_config('djangochurch_data') image_dir = os.path.join(data_app.path, 'images') for image_id, image_name in IMAGE_LIST: self.stdout.write('Importing: %s' % (image_name,)) image = Image.objects.get(id=image_id) image_file = os.path.join(image_dir, image_name) with open(image_file, 'rb') as f: image.file.save(image_name, File(f))
Use updated app config for getting the path
Use updated app config for getting the path Prevent warning with Django 1.8, fixes #3
Python
bsd-3-clause
djangochurch/djangochurch-data
import os.path from blanc_basic_assets.models import Image from django.apps import apps from django.core.files import File from django.core.management.base import BaseCommand IMAGE_LIST = [ (1, 'remember.jpg'), (2, 'sample-image-1.jpg'), (3, 'sample-image-2.jpg'), (4, 'sample-image-3.jpg'), (5, 'sample-image-4.jpg'), ] class Command(BaseCommand): help = 'Load Django Church images' def handle(self, directory=None, *args, **options): + data_app = apps.get_app_config('djangochurch_data') - image_dir = os.path.join(apps.get_app_path('djangochurch_data'), 'images') + image_dir = os.path.join(data_app.path, 'images') for image_id, image_name in IMAGE_LIST: self.stdout.write('Importing: %s' % (image_name,)) image = Image.objects.get(id=image_id) image_file = os.path.join(image_dir, image_name) with open(image_file, 'rb') as f: image.file.save(image_name, File(f))
Use updated app config for getting the path
## Code Before: import os.path from blanc_basic_assets.models import Image from django.apps import apps from django.core.files import File from django.core.management.base import BaseCommand IMAGE_LIST = [ (1, 'remember.jpg'), (2, 'sample-image-1.jpg'), (3, 'sample-image-2.jpg'), (4, 'sample-image-3.jpg'), (5, 'sample-image-4.jpg'), ] class Command(BaseCommand): help = 'Load Django Church images' def handle(self, directory=None, *args, **options): image_dir = os.path.join(apps.get_app_path('djangochurch_data'), 'images') for image_id, image_name in IMAGE_LIST: self.stdout.write('Importing: %s' % (image_name,)) image = Image.objects.get(id=image_id) image_file = os.path.join(image_dir, image_name) with open(image_file, 'rb') as f: image.file.save(image_name, File(f)) ## Instruction: Use updated app config for getting the path ## Code After: import os.path from blanc_basic_assets.models import Image from django.apps import apps from django.core.files import File from django.core.management.base import BaseCommand IMAGE_LIST = [ (1, 'remember.jpg'), (2, 'sample-image-1.jpg'), (3, 'sample-image-2.jpg'), (4, 'sample-image-3.jpg'), (5, 'sample-image-4.jpg'), ] class Command(BaseCommand): help = 'Load Django Church images' def handle(self, directory=None, *args, **options): data_app = apps.get_app_config('djangochurch_data') image_dir = os.path.join(data_app.path, 'images') for image_id, image_name in IMAGE_LIST: self.stdout.write('Importing: %s' % (image_name,)) image = Image.objects.get(id=image_id) image_file = os.path.join(image_dir, image_name) with open(image_file, 'rb') as f: image.file.save(image_name, File(f))
43e3df5a07caa1370e71858f593c9c8bd73d1e2f
cloudly/rqworker.py
cloudly/rqworker.py
from rq import Worker, Queue, Connection from rq.job import Job from cloudly.cache import redis from cloudly.memoized import Memoized def enqueue(function, *args): return _get_queue().enqueue(function, *args) def fetch_job(job_id): return Job.fetch(job_id, redis) @Memoized def _get_queue(): return Queue(connection=redis) def work(setup_fct=None): if setup_fct: setup_fct() listen = ['high', 'default', 'low'] with Connection(redis): worker = Worker(map(Queue, listen)) worker.work() if __name__ == '__main__': work()
from rq import Worker, Queue, Connection from rq.job import Job from cloudly.cache import redis from cloudly.memoized import Memoized def enqueue(function, *args, **kwargs): return _get_queue().enqueue(function, *args, **kwargs) def fetch_job(job_id): return Job.fetch(job_id, redis) @Memoized def _get_queue(): return Queue(connection=redis) def work(setup_fct=None): if setup_fct: setup_fct() listen = ['high', 'default', 'low'] with Connection(redis): worker = Worker(map(Queue, listen)) worker.work() if __name__ == '__main__': work()
Fix missing `kwargs` argument to enqueue.
Fix missing `kwargs` argument to enqueue.
Python
mit
ooda/cloudly,ooda/cloudly
from rq import Worker, Queue, Connection from rq.job import Job from cloudly.cache import redis from cloudly.memoized import Memoized + - def enqueue(function, *args): + def enqueue(function, *args, **kwargs): - return _get_queue().enqueue(function, *args) + return _get_queue().enqueue(function, *args, **kwargs) def fetch_job(job_id): return Job.fetch(job_id, redis) @Memoized def _get_queue(): return Queue(connection=redis) def work(setup_fct=None): if setup_fct: setup_fct() listen = ['high', 'default', 'low'] with Connection(redis): worker = Worker(map(Queue, listen)) worker.work() if __name__ == '__main__': work()
Fix missing `kwargs` argument to enqueue.
## Code Before: from rq import Worker, Queue, Connection from rq.job import Job from cloudly.cache import redis from cloudly.memoized import Memoized def enqueue(function, *args): return _get_queue().enqueue(function, *args) def fetch_job(job_id): return Job.fetch(job_id, redis) @Memoized def _get_queue(): return Queue(connection=redis) def work(setup_fct=None): if setup_fct: setup_fct() listen = ['high', 'default', 'low'] with Connection(redis): worker = Worker(map(Queue, listen)) worker.work() if __name__ == '__main__': work() ## Instruction: Fix missing `kwargs` argument to enqueue. ## Code After: from rq import Worker, Queue, Connection from rq.job import Job from cloudly.cache import redis from cloudly.memoized import Memoized def enqueue(function, *args, **kwargs): return _get_queue().enqueue(function, *args, **kwargs) def fetch_job(job_id): return Job.fetch(job_id, redis) @Memoized def _get_queue(): return Queue(connection=redis) def work(setup_fct=None): if setup_fct: setup_fct() listen = ['high', 'default', 'low'] with Connection(redis): worker = Worker(map(Queue, listen)) worker.work() if __name__ == '__main__': work()
0c0e81798b078547bc5931c26dd2b0ab6507db94
devilry/project/common/devilry_test_runner.py
devilry/project/common/devilry_test_runner.py
import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning, RemovedInDjango110Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning) warnings.filterwarnings('ignore', category=RemovedInDjango110Warning)
import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning)
Update warning ignores for Django 1.10.
project...DevilryTestRunner: Update warning ignores for Django 1.10.
Python
bsd-3-clause
devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django
import warnings from django.test.runner import DiscoverRunner - from django.utils.deprecation import RemovedInDjango20Warning, RemovedInDjango110Warning + from django.utils.deprecation import RemovedInDjango20Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning) - warnings.filterwarnings('ignore', category=RemovedInDjango110Warning)
Update warning ignores for Django 1.10.
## Code Before: import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning, RemovedInDjango110Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning) warnings.filterwarnings('ignore', category=RemovedInDjango110Warning) ## Instruction: Update warning ignores for Django 1.10. ## Code After: import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning)
979d84f965b0118f86a8df7aa0311f65f8e36170
indra/tools/reading/readers/trips/__init__.py
indra/tools/reading/readers/trips/__init__.py
from indra.tools.reading.readers.core import EmptyReader from indra.sources import trips class TripsReader(EmptyReader): """A stand-in for TRIPS reading. Currently, we do not run TRIPS (more specifically DRUM) regularly at large scales, however on occasion we have outputs from TRIPS that were generated a while ago. """ name = 'TRIPS' def __init__(self, *args, **kwargs): self.version = self.get_version() return def _read(self, *args, **kwargs): return [] @classmethod def get_version(cls): return 'STATIC' @staticmethod def get_processor(content): return trips.process_xml(content)
import os import subprocess as sp from indra.tools.reading.readers.core import Reader from indra.sources.trips import client, process_xml from indra_db import formats class TripsReader(Reader): """A stand-in for TRIPS reading. Currently, we do not run TRIPS (more specifically DRUM) regularly at large scales, however on occasion we have outputs from TRIPS that were generated a while ago. """ name = 'TRIPS' result_format = formats.XML def __init__(self, *args, **kwargs): self.version = self.get_version() return def _read(self, content_iter, verbose=False, log=False, n_per_proc=None): # Start trips running if os.environ.get("IN_TRIPS_DOCKER", 'false') != 'true': return [] p = sp.Popen('/sw/drum/bin/startup.sh', stdout=sp.PIPE, stderr=sp.STDOUT) service_endpoint = 'http://localhost:80/cgi/' service_host = 'drum' # Process all the content. for content in content_iter: html = client.send_query(content.get_text(), service_endpoint=service_endpoint, service_host=service_host) xml = client.get_xml(html) self.add_result(content.get_id(), xml) return self.results @classmethod def get_version(cls): return 'STATIC' @staticmethod def get_processor(reading_content): return process_xml(reading_content)
Implement the basics of the TRIPS reader.
Implement the basics of the TRIPS reader.
Python
bsd-2-clause
sorgerlab/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,bgyori/indra
- from indra.tools.reading.readers.core import EmptyReader + import os + import subprocess as sp - from indra.sources import trips + from indra.tools.reading.readers.core import Reader + + from indra.sources.trips import client, process_xml + from indra_db import formats - class TripsReader(EmptyReader): + class TripsReader(Reader): """A stand-in for TRIPS reading. Currently, we do not run TRIPS (more specifically DRUM) regularly at large scales, however on occasion we have outputs from TRIPS that were generated a while ago. """ name = 'TRIPS' + result_format = formats.XML def __init__(self, *args, **kwargs): self.version = self.get_version() return - def _read(self, *args, **kwargs): + def _read(self, content_iter, verbose=False, log=False, n_per_proc=None): + # Start trips running + if os.environ.get("IN_TRIPS_DOCKER", 'false') != 'true': - return [] + return [] + + p = sp.Popen('/sw/drum/bin/startup.sh', stdout=sp.PIPE, + stderr=sp.STDOUT) + service_endpoint = 'http://localhost:80/cgi/' + service_host = 'drum' + + # Process all the content. + for content in content_iter: + html = client.send_query(content.get_text(), + service_endpoint=service_endpoint, + service_host=service_host) + xml = client.get_xml(html) + self.add_result(content.get_id(), xml) + + return self.results @classmethod def get_version(cls): return 'STATIC' @staticmethod - def get_processor(content): + def get_processor(reading_content): - return trips.process_xml(content) + return process_xml(reading_content)
Implement the basics of the TRIPS reader.
## Code Before: from indra.tools.reading.readers.core import EmptyReader from indra.sources import trips class TripsReader(EmptyReader): """A stand-in for TRIPS reading. Currently, we do not run TRIPS (more specifically DRUM) regularly at large scales, however on occasion we have outputs from TRIPS that were generated a while ago. """ name = 'TRIPS' def __init__(self, *args, **kwargs): self.version = self.get_version() return def _read(self, *args, **kwargs): return [] @classmethod def get_version(cls): return 'STATIC' @staticmethod def get_processor(content): return trips.process_xml(content) ## Instruction: Implement the basics of the TRIPS reader. ## Code After: import os import subprocess as sp from indra.tools.reading.readers.core import Reader from indra.sources.trips import client, process_xml from indra_db import formats class TripsReader(Reader): """A stand-in for TRIPS reading. Currently, we do not run TRIPS (more specifically DRUM) regularly at large scales, however on occasion we have outputs from TRIPS that were generated a while ago. """ name = 'TRIPS' result_format = formats.XML def __init__(self, *args, **kwargs): self.version = self.get_version() return def _read(self, content_iter, verbose=False, log=False, n_per_proc=None): # Start trips running if os.environ.get("IN_TRIPS_DOCKER", 'false') != 'true': return [] p = sp.Popen('/sw/drum/bin/startup.sh', stdout=sp.PIPE, stderr=sp.STDOUT) service_endpoint = 'http://localhost:80/cgi/' service_host = 'drum' # Process all the content. for content in content_iter: html = client.send_query(content.get_text(), service_endpoint=service_endpoint, service_host=service_host) xml = client.get_xml(html) self.add_result(content.get_id(), xml) return self.results @classmethod def get_version(cls): return 'STATIC' @staticmethod def get_processor(reading_content): return process_xml(reading_content)
493ce497e5d84d8db9c37816aefea9099df42e90
pywatson/answer/synonym.py
pywatson/answer/synonym.py
class Synonym(object): def __init__(self): pass
from pywatson.util.map_initializable import MapInitializable class SynSetSynonym(MapInitializable): def __init__(self, is_chosen, value, weight): self.is_chosen = is_chosen self.value = value self.weight = weight @classmethod def from_mapping(cls, syn_mapping): return cls(is_chosen=syn_mapping['isChosen'], value=syn_mapping['value'], weight=syn_mapping['weight']) class SynSet(MapInitializable): def __init__(self, name, synonyms=()): self.name = name self.synonyms = tuple(synonyms) @classmethod def from_mapping(cls, synset_mapping): return cls(name=synset_mapping[0]['name'], synonyms=(SynSetSynonym.from_mapping(s) for s in synset_mapping[0]['synonym'])) class Synonym(MapInitializable): def __init__(self, part_of_speech, lemma, value, syn_set): self.part_of_speech = part_of_speech self.lemma = lemma self.value = value self.syn_set = syn_set @classmethod def from_mapping(cls, synonym_mapping): return cls(part_of_speech=synonym_mapping['partOfSpeech'], lemma=synonym_mapping['lemma'], value=synonym_mapping['value'], syn_set=SynSet.from_mapping(synonym_mapping['synSet']))
Add Synonym and related classes
Add Synonym and related classes
Python
mit
sherlocke/pywatson
+ from pywatson.util.map_initializable import MapInitializable - class Synonym(object): - def __init__(self): - pass + + class SynSetSynonym(MapInitializable): + def __init__(self, is_chosen, value, weight): + self.is_chosen = is_chosen + self.value = value + self.weight = weight + + @classmethod + def from_mapping(cls, syn_mapping): + return cls(is_chosen=syn_mapping['isChosen'], + value=syn_mapping['value'], + weight=syn_mapping['weight']) + + + class SynSet(MapInitializable): + def __init__(self, name, synonyms=()): + self.name = name + self.synonyms = tuple(synonyms) + + @classmethod + def from_mapping(cls, synset_mapping): + return cls(name=synset_mapping[0]['name'], + synonyms=(SynSetSynonym.from_mapping(s) for s in synset_mapping[0]['synonym'])) + + + class Synonym(MapInitializable): + def __init__(self, part_of_speech, lemma, value, syn_set): + self.part_of_speech = part_of_speech + self.lemma = lemma + self.value = value + self.syn_set = syn_set + + @classmethod + def from_mapping(cls, synonym_mapping): + return cls(part_of_speech=synonym_mapping['partOfSpeech'], + lemma=synonym_mapping['lemma'], + value=synonym_mapping['value'], + syn_set=SynSet.from_mapping(synonym_mapping['synSet'])) +
Add Synonym and related classes
## Code Before: class Synonym(object): def __init__(self): pass ## Instruction: Add Synonym and related classes ## Code After: from pywatson.util.map_initializable import MapInitializable class SynSetSynonym(MapInitializable): def __init__(self, is_chosen, value, weight): self.is_chosen = is_chosen self.value = value self.weight = weight @classmethod def from_mapping(cls, syn_mapping): return cls(is_chosen=syn_mapping['isChosen'], value=syn_mapping['value'], weight=syn_mapping['weight']) class SynSet(MapInitializable): def __init__(self, name, synonyms=()): self.name = name self.synonyms = tuple(synonyms) @classmethod def from_mapping(cls, synset_mapping): return cls(name=synset_mapping[0]['name'], synonyms=(SynSetSynonym.from_mapping(s) for s in synset_mapping[0]['synonym'])) class Synonym(MapInitializable): def __init__(self, part_of_speech, lemma, value, syn_set): self.part_of_speech = part_of_speech self.lemma = lemma self.value = value self.syn_set = syn_set @classmethod def from_mapping(cls, synonym_mapping): return cls(part_of_speech=synonym_mapping['partOfSpeech'], lemma=synonym_mapping['lemma'], value=synonym_mapping['value'], syn_set=SynSet.from_mapping(synonym_mapping['synSet']))
fb53f2ed0e6337d6f5766f47cb67c204c89c0568
src/oauth2client/__init__.py
src/oauth2client/__init__.py
"""Client library for using OAuth2, especially with Google APIs.""" __version__ = '4.1.3' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code' GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke' GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token' GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
"""Client library for using OAuth2, especially with Google APIs.""" __version__ = '4.1.3' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code' GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token' GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
Fix oauth2 revoke URI, new URL doesn't seem to work
Fix oauth2 revoke URI, new URL doesn't seem to work
Python
apache-2.0
GAM-team/GAM,GAM-team/GAM
"""Client library for using OAuth2, especially with Google APIs.""" __version__ = '4.1.3' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code' - GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke' + GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token' GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
Fix oauth2 revoke URI, new URL doesn't seem to work
## Code Before: """Client library for using OAuth2, especially with Google APIs.""" __version__ = '4.1.3' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code' GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke' GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token' GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo' ## Instruction: Fix oauth2 revoke URI, new URL doesn't seem to work ## Code After: """Client library for using OAuth2, especially with Google APIs.""" __version__ = '4.1.3' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code' GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token' GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
f0246b9897d89c1ec6f2361bbb488c4e162e5c5e
reddit_liveupdate/utils.py
reddit_liveupdate/utils.py
import itertools import pytz from babel.dates import format_time from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, )
import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, )
Make timestamps more specific as temporal context fades.
Make timestamps more specific as temporal context fades. Fixes #6.
Python
bsd-3-clause
madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate
+ import datetime import itertools import pytz - from babel.dates import format_time + from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) + today = datetime.datetime.now(display_tz).date() + date = dt.astimezone(display_tz).date() + if date == today: - return format_time( + return format_time( - time=dt, + time=dt, - tzinfo=display_tz, + tzinfo=display_tz, - format="HH:mm z", + format="HH:mm z", - locale=c.locale, + locale=c.locale, - ) + ) + elif today - date < datetime.timedelta(days=365): + return format_datetime( + datetime=dt, + tzinfo=display_tz, + format="dd MMM HH:mm z", + locale=c.locale, + ) + else: + return format_datetime( + datetime=dt, + tzinfo=display_tz, + format="dd MMM YYYY HH:mm z", + locale=c.locale, + )
Make timestamps more specific as temporal context fades.
## Code Before: import itertools import pytz from babel.dates import format_time from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) ## Instruction: Make timestamps more specific as temporal context fades. ## Code After: import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, )
540c5f2969e75a0f461e9d46090cfe8d92c53b00
Simulator/plot.py
Simulator/plot.py
from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] return 'history_' + y + 'txt' def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] y = y + 'txt' i = len(y) - 1 while i>=0 : if y[i]=='\\' or y[i]=='/' : break i-=1 if i>=0 : return y[:i+1] + 'history_' + y[i+1:] else: return 'history_' + y def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
Remove history name error for absolute paths
Remove history name error for absolute paths
Python
mit
aayushkapadia/chemical_reaction_simulator
from Simulator import * import XMLParser import textToXML + def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] + y = y + 'txt' + + i = len(y) - 1 + while i>=0 : + if y[i]=='\\' or y[i]=='/' : + break + i-=1 + + if i>=0 : + return y[:i+1] + 'history_' + y[i+1:] + else: - return 'history_' + y + 'txt' + return 'history_' + y + def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
Remove history name error for absolute paths
## Code Before: from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] return 'history_' + y + 'txt' def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList) ## Instruction: Remove history name error for absolute paths ## Code After: from Simulator import * import XMLParser import textToXML def getHistoryFileName(xmlFileName): y = xmlFileName[:-3] y = y + 'txt' i = len(y) - 1 while i>=0 : if y[i]=='\\' or y[i]=='/' : break i-=1 if i>=0 : return y[:i+1] + 'history_' + y[i+1:] else: return 'history_' + y def plotFromXML(fileName,simulationTime,chemicalList): historyFile = getHistoryFileName(fileName) sim = XMLParser.getSimulator(fileName) sim.simulate(int(simulationTime),historyFile) sim.plot(chemicalList) def plotFromTxt(fileName,simulationTime,chemicalList): xmlFile = textToXML.getXMLFromTxt(fileName) plotFromXML(xmlFile,simulationTime,chemicalList)
ffab98b03588cef69ab11a10a440d02952661edf
cyder/cydns/soa/forms.py
cyder/cydns/soa/forms.py
from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) SOAAVForm = get_eav_form(SOAAV, SOA)
from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) def clean(self, *args, **kwargs): contact = self.cleaned_data['contact'] self.cleaned_data['contact'] = contact.replace('@', '.') return super(SOAForm, self).clean(*args, **kwargs) SOAAVForm = get_eav_form(SOAAV, SOA)
Replace @ with . in soa form clean
Replace @ with . in soa form clean
Python
bsd-3-clause
OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder
from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) + def clean(self, *args, **kwargs): + contact = self.cleaned_data['contact'] + self.cleaned_data['contact'] = contact.replace('@', '.') + return super(SOAForm, self).clean(*args, **kwargs) + + SOAAVForm = get_eav_form(SOAAV, SOA)
Replace @ with . in soa form clean
## Code Before: from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) SOAAVForm = get_eav_form(SOAAV, SOA) ## Instruction: Replace @ with . in soa form clean ## Code After: from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) def clean(self, *args, **kwargs): contact = self.cleaned_data['contact'] self.cleaned_data['contact'] = contact.replace('@', '.') return super(SOAForm, self).clean(*args, **kwargs) SOAAVForm = get_eav_form(SOAAV, SOA)
26f984a7732491e87e4eb756caf0056a7ac71484
contract_invoice_merge_by_partner/models/account_analytic_analysis.py
contract_invoice_merge_by_partner/models/account_analytic_analysis.py
from openerp import api, models class PurchaseOrderLine(models.Model): _inherit = 'account.analytic.account' @api.multi def _recurring_create_invoice(self, automatic=False): invoice_obj = self.env['account.invoice'] invoices = invoice_obj.browse( super(PurchaseOrderLine, self)._recurring_create_invoice(automatic)) res = [] unlink_list = [] for partner in invoices.mapped('partner_id'): inv_to_merge = invoices.filtered( lambda x: x.partner_id.id == partner) if partner.contract_invoice_merge: invoices_merged = inv_to_merge.do_merge() res.extend(invoices_merged) unlink_list.extend(inv_to_merge) else: res.extend(inv_to_merge) if unlink_list: invoice_obj.unlink([x.id for x in unlink_list]) return res
from openerp import api, models class PurchaseOrderLine(models.Model): _inherit = 'account.analytic.account' @api.multi def _recurring_create_invoice(self, automatic=False): invoice_obj = self.env['account.invoice'] invoices = invoice_obj.browse( super(PurchaseOrderLine, self)._recurring_create_invoice( automatic)) res = [] unlink_list = [] for partner in invoices.mapped('partner_id'): inv_to_merge = invoices.filtered( lambda x: x.partner_id.id == partner) if partner.contract_invoice_merge and len(inv_to_merge) > 1: invoices_merged = inv_to_merge.do_merge() res.extend(invoices_merged) unlink_list.extend(inv_to_merge) else: res.extend(inv_to_merge) if unlink_list: invoice_obj.browse(unlink_list).unlink() return res
Fix unlink, >1 filter and lines too long
Fix unlink, >1 filter and lines too long
Python
agpl-3.0
bullet92/contract,open-synergy/contract
from openerp import api, models class PurchaseOrderLine(models.Model): _inherit = 'account.analytic.account' @api.multi def _recurring_create_invoice(self, automatic=False): invoice_obj = self.env['account.invoice'] invoices = invoice_obj.browse( - super(PurchaseOrderLine, self)._recurring_create_invoice(automatic)) + super(PurchaseOrderLine, self)._recurring_create_invoice( + automatic)) res = [] unlink_list = [] for partner in invoices.mapped('partner_id'): inv_to_merge = invoices.filtered( lambda x: x.partner_id.id == partner) - if partner.contract_invoice_merge: + if partner.contract_invoice_merge and len(inv_to_merge) > 1: invoices_merged = inv_to_merge.do_merge() res.extend(invoices_merged) unlink_list.extend(inv_to_merge) else: res.extend(inv_to_merge) if unlink_list: - invoice_obj.unlink([x.id for x in unlink_list]) + invoice_obj.browse(unlink_list).unlink() return res
Fix unlink, >1 filter and lines too long
## Code Before: from openerp import api, models class PurchaseOrderLine(models.Model): _inherit = 'account.analytic.account' @api.multi def _recurring_create_invoice(self, automatic=False): invoice_obj = self.env['account.invoice'] invoices = invoice_obj.browse( super(PurchaseOrderLine, self)._recurring_create_invoice(automatic)) res = [] unlink_list = [] for partner in invoices.mapped('partner_id'): inv_to_merge = invoices.filtered( lambda x: x.partner_id.id == partner) if partner.contract_invoice_merge: invoices_merged = inv_to_merge.do_merge() res.extend(invoices_merged) unlink_list.extend(inv_to_merge) else: res.extend(inv_to_merge) if unlink_list: invoice_obj.unlink([x.id for x in unlink_list]) return res ## Instruction: Fix unlink, >1 filter and lines too long ## Code After: from openerp import api, models class PurchaseOrderLine(models.Model): _inherit = 'account.analytic.account' @api.multi def _recurring_create_invoice(self, automatic=False): invoice_obj = self.env['account.invoice'] invoices = invoice_obj.browse( super(PurchaseOrderLine, self)._recurring_create_invoice( automatic)) res = [] unlink_list = [] for partner in invoices.mapped('partner_id'): inv_to_merge = invoices.filtered( lambda x: x.partner_id.id == partner) if partner.contract_invoice_merge and len(inv_to_merge) > 1: invoices_merged = inv_to_merge.do_merge() res.extend(invoices_merged) unlink_list.extend(inv_to_merge) else: res.extend(inv_to_merge) if unlink_list: invoice_obj.browse(unlink_list).unlink() return res
7ad47fad53be18a07aede85c02e41176a96c5de2
learnwithpeople/__init__.py
learnwithpeople/__init__.py
from .celery import app as celery_app __version__ = "dev" GIT_REVISION = "dev"
from .celery import app as celery_app __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
Update celery setup according to docs
Update celery setup according to docs
Python
mit
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
from .celery import app as celery_app + + __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
Update celery setup according to docs
## Code Before: from .celery import app as celery_app __version__ = "dev" GIT_REVISION = "dev" ## Instruction: Update celery setup according to docs ## Code After: from .celery import app as celery_app __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
e67c57128f88b61eac08e488e54343d48f1454c7
ddcz/forms/authentication.py
ddcz/forms/authentication.py
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=20) password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=25) password = forms.CharField( label="Heslo", max_length=100, widget=forms.PasswordInput ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
Update LoginForm to match reality
Update LoginForm to match reality
Python
mit
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): - nick = forms.CharField(label="Nick", max_length=20) + nick = forms.CharField(label="Nick", max_length=25) + password = forms.CharField( - password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) + label="Heslo", max_length=100, widget=forms.PasswordInput + ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
Update LoginForm to match reality
## Code Before: import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=20) password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users ## Instruction: Update LoginForm to match reality ## Code After: import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=25) password = forms.CharField( label="Heslo", max_length=100, widget=forms.PasswordInput ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
14d6955118893c532c1d9f8f6037d1da1b18dbbb
analysis/plot-skeleton.py
analysis/plot-skeleton.py
import climate import database import plots @climate.annotate( root='plot data rooted at this path', pattern=('plot data from files matching this pattern', 'option'), ) def main(root, pattern='*/*block02/*trial00*.csv.gz'): for trial in database.Experiment(root).trials_matching(pattern): with plots.space() as ax: plots.skeleton(ax, trial, 100) if __name__ == '__main__': climate.call(main)
import climate import pandas as pd import database import plots @climate.annotate( root='plot data rooted at this path', pattern=('plot data from files matching this pattern', 'option'), ) def main(root, pattern='*/*block03/*trial00*.csv.gz'): for trial in database.Experiment(root).trials_matching(pattern): with plots.space() as ax: for i in range(3): plots.skeleton(ax, trial, 1000 + 300 * i, lw=2, color='#fd3220', alpha=0.3) #trial.rotate_heading(pd.Series([-6.28 / 10] * len(trial.df))) trial.make_body_relative() for i in range(3): plots.skeleton(ax, trial, 1000 + 300 * i, offset=(0.5 * i, 0.5 * i), lw=2, color='#111111', alpha=0.3) if __name__ == '__main__': climate.call(main)
Add multiple skeletons for the moment.
Add multiple skeletons for the moment.
Python
mit
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
import climate + import pandas as pd import database import plots @climate.annotate( root='plot data rooted at this path', pattern=('plot data from files matching this pattern', 'option'), ) - def main(root, pattern='*/*block02/*trial00*.csv.gz'): + def main(root, pattern='*/*block03/*trial00*.csv.gz'): for trial in database.Experiment(root).trials_matching(pattern): with plots.space() as ax: - plots.skeleton(ax, trial, 100) + for i in range(3): + plots.skeleton(ax, trial, 1000 + 300 * i, lw=2, color='#fd3220', alpha=0.3) + #trial.rotate_heading(pd.Series([-6.28 / 10] * len(trial.df))) + trial.make_body_relative() + for i in range(3): + plots.skeleton(ax, trial, 1000 + 300 * i, offset=(0.5 * i, 0.5 * i), lw=2, color='#111111', alpha=0.3) if __name__ == '__main__': climate.call(main)
Add multiple skeletons for the moment.
## Code Before: import climate import database import plots @climate.annotate( root='plot data rooted at this path', pattern=('plot data from files matching this pattern', 'option'), ) def main(root, pattern='*/*block02/*trial00*.csv.gz'): for trial in database.Experiment(root).trials_matching(pattern): with plots.space() as ax: plots.skeleton(ax, trial, 100) if __name__ == '__main__': climate.call(main) ## Instruction: Add multiple skeletons for the moment. ## Code After: import climate import pandas as pd import database import plots @climate.annotate( root='plot data rooted at this path', pattern=('plot data from files matching this pattern', 'option'), ) def main(root, pattern='*/*block03/*trial00*.csv.gz'): for trial in database.Experiment(root).trials_matching(pattern): with plots.space() as ax: for i in range(3): plots.skeleton(ax, trial, 1000 + 300 * i, lw=2, color='#fd3220', alpha=0.3) #trial.rotate_heading(pd.Series([-6.28 / 10] * len(trial.df))) trial.make_body_relative() for i in range(3): plots.skeleton(ax, trial, 1000 + 300 * i, offset=(0.5 * i, 0.5 * i), lw=2, color='#111111', alpha=0.3) if __name__ == '__main__': climate.call(main)
bfd75a927da2b46cb8630fab0cd3828ba71bf4ee
dependencies.py
dependencies.py
from setuptools.command import easy_install requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"] for module in requires: easy_install.main( ["-U",module] )
import subprocess import sys requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"] def install(package): subprocess.call([sys.executable, "-m", "pip", "install", package]) for module in requires: install(module)
Use pip instead of easy_install
Use pip instead of easy_install
Python
mit
ValiMail/arc_test_suite
- from setuptools.command import easy_install + import subprocess + import sys requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"] + def install(package): + subprocess.call([sys.executable, "-m", "pip", "install", package]) + for module in requires: - easy_install.main( ["-U",module] ) + install(module)
Use pip instead of easy_install
## Code Before: from setuptools.command import easy_install requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"] for module in requires: easy_install.main( ["-U",module] ) ## Instruction: Use pip instead of easy_install ## Code After: import subprocess import sys requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"] def install(package): subprocess.call([sys.executable, "-m", "pip", "install", package]) for module in requires: install(module)
3171e7e355536f41a6c517ca7128a152c2577829
anndata/tests/test_uns.py
anndata/tests/test_uns.py
import numpy as np import pandas as pd from anndata import AnnData def test_uns_color_subset(): # Tests for https://github.com/theislab/anndata/issues/257 obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)]) obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category") obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category") uns = dict( cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"], ) adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns) # If number of categories does not match number of colors, # they should be reset v = adata[:, [0, 1]] assert "cat1_colors" not in v.uns assert "cat2_colors" not in v.uns # Otherwise the colors should still match after reseting adata.uns["cat1_colors"] = ["red", "green", "blue", "yellow"] v = adata[[0, 1], :] assert len(v.uns["cat1_colors"]) == 1 assert v.uns["cat1_colors"][0] == "red"
import numpy as np import pandas as pd from anndata import AnnData def test_uns_color_subset(): # Tests for https://github.com/theislab/anndata/issues/257 obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)]) obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category") obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category") uns = dict( cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"], ) adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns) # If number of categories does not match number of colors, # they should be reset v = adata[:, [0, 1]] assert "cat1_colors" not in v.uns assert "cat2_colors" not in v.uns # Otherwise the colors should still match after reseting cat1_colors = ["red", "green", "blue", "yellow"] adata.uns["cat1_colors"] = cat1_colors.copy() v = adata[[0, 1], :] assert len(v.uns["cat1_colors"]) == 1 assert v.uns["cat1_colors"][0] == "red" # But original object should not change assert list(adata.uns["cat1_colors"]) == cat1_colors
Add test for categorical colors staying around after subsetting
Add test for categorical colors staying around after subsetting
Python
bsd-3-clause
theislab/anndata
import numpy as np import pandas as pd from anndata import AnnData def test_uns_color_subset(): # Tests for https://github.com/theislab/anndata/issues/257 obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)]) obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category") obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category") uns = dict( cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"], ) adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns) # If number of categories does not match number of colors, # they should be reset v = adata[:, [0, 1]] assert "cat1_colors" not in v.uns assert "cat2_colors" not in v.uns # Otherwise the colors should still match after reseting - adata.uns["cat1_colors"] = ["red", "green", "blue", "yellow"] + cat1_colors = ["red", "green", "blue", "yellow"] + adata.uns["cat1_colors"] = cat1_colors.copy() v = adata[[0, 1], :] assert len(v.uns["cat1_colors"]) == 1 assert v.uns["cat1_colors"][0] == "red" + # But original object should not change + assert list(adata.uns["cat1_colors"]) == cat1_colors
Add test for categorical colors staying around after subsetting
## Code Before: import numpy as np import pandas as pd from anndata import AnnData def test_uns_color_subset(): # Tests for https://github.com/theislab/anndata/issues/257 obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)]) obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category") obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category") uns = dict( cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"], ) adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns) # If number of categories does not match number of colors, # they should be reset v = adata[:, [0, 1]] assert "cat1_colors" not in v.uns assert "cat2_colors" not in v.uns # Otherwise the colors should still match after reseting adata.uns["cat1_colors"] = ["red", "green", "blue", "yellow"] v = adata[[0, 1], :] assert len(v.uns["cat1_colors"]) == 1 assert v.uns["cat1_colors"][0] == "red" ## Instruction: Add test for categorical colors staying around after subsetting ## Code After: import numpy as np import pandas as pd from anndata import AnnData def test_uns_color_subset(): # Tests for https://github.com/theislab/anndata/issues/257 obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)]) obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category") obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category") uns = dict( cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"], ) adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns) # If number of categories does not match number of colors, # they should be reset v = adata[:, [0, 1]] assert "cat1_colors" not in v.uns assert "cat2_colors" not in v.uns # Otherwise the colors should still match after reseting cat1_colors = ["red", "green", "blue", "yellow"] adata.uns["cat1_colors"] = cat1_colors.copy() v = adata[[0, 1], :] assert len(v.uns["cat1_colors"]) == 1 assert v.uns["cat1_colors"][0] == "red" # But original object should not change assert list(adata.uns["cat1_colors"]) == cat1_colors
2dece45476170e24e14903f19f9bf400c10ebf42
djangocms_wow/cms_plugins.py
djangocms_wow/cms_plugins.py
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
Allow WOW animations to be used in text plugin.
Allow WOW animations to be used in text plugin.
Python
bsd-3-clause
narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow
from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True + text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True + text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
Allow WOW animations to be used in text plugin.
## Code Before: from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin) ## Instruction: Allow WOW animations to be used in text plugin. ## Code After: from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
99fba41b7392b1e5e4216145f1e8913698b60914
mopidy_gmusic/commands.py
mopidy_gmusic/commands.py
import gmusicapi from mopidy import commands from oauth2client.client import OAuth2WebServerFlow class GMusicCommand(commands.Command): def __init__(self): super().__init__() self.add_child("login", LoginCommand()) class LoginCommand(commands.Command): def run(self, args, config): oauth_info = gmusicapi.Mobileclient._session_class.oauth flow = OAuth2WebServerFlow(**oauth_info._asdict()) print() print( "Go to the following URL to get an initial auth code, then " + "provide it below: " + flow.step1_get_authorize_url() ) print() try: initial_code = raw_input("code: ") except NameError: # Python 3 initial_code = input("code: ") credentials = flow.step2_exchange(initial_code) refresh_token = credentials.refresh_token print("\nPlease update your config to include the following:") print() print("[gmusic]") print("refresh_token =", refresh_token) print()
import gmusicapi from mopidy import commands from oauth2client.client import OAuth2WebServerFlow class GMusicCommand(commands.Command): def __init__(self): super().__init__() self.add_child("login", LoginCommand()) class LoginCommand(commands.Command): def run(self, args, config): oauth_info = gmusicapi.Mobileclient._session_class.oauth flow = OAuth2WebServerFlow(**oauth_info._asdict()) print() print( "Go to the following URL to get an initial auth code, " "then provide it below:" ) print(flow.step1_get_authorize_url()) print() initial_code = input("code: ") credentials = flow.step2_exchange(initial_code) refresh_token = credentials.refresh_token print("\nPlease update your config to include the following:") print() print("[gmusic]") print("refresh_token =", refresh_token) print()
Remove Python 2 compatibility code
py3: Remove Python 2 compatibility code
Python
apache-2.0
hechtus/mopidy-gmusic,mopidy/mopidy-gmusic
import gmusicapi from mopidy import commands from oauth2client.client import OAuth2WebServerFlow class GMusicCommand(commands.Command): def __init__(self): super().__init__() self.add_child("login", LoginCommand()) class LoginCommand(commands.Command): def run(self, args, config): oauth_info = gmusicapi.Mobileclient._session_class.oauth flow = OAuth2WebServerFlow(**oauth_info._asdict()) print() print( - "Go to the following URL to get an initial auth code, then " + "Go to the following URL to get an initial auth code, " - + "provide it below: " + "then provide it below:" - + flow.step1_get_authorize_url() ) + print(flow.step1_get_authorize_url()) print() - try: - initial_code = raw_input("code: ") - except NameError: - # Python 3 - initial_code = input("code: ") + initial_code = input("code: ") credentials = flow.step2_exchange(initial_code) refresh_token = credentials.refresh_token print("\nPlease update your config to include the following:") print() print("[gmusic]") print("refresh_token =", refresh_token) print()
Remove Python 2 compatibility code
## Code Before: import gmusicapi from mopidy import commands from oauth2client.client import OAuth2WebServerFlow class GMusicCommand(commands.Command): def __init__(self): super().__init__() self.add_child("login", LoginCommand()) class LoginCommand(commands.Command): def run(self, args, config): oauth_info = gmusicapi.Mobileclient._session_class.oauth flow = OAuth2WebServerFlow(**oauth_info._asdict()) print() print( "Go to the following URL to get an initial auth code, then " + "provide it below: " + flow.step1_get_authorize_url() ) print() try: initial_code = raw_input("code: ") except NameError: # Python 3 initial_code = input("code: ") credentials = flow.step2_exchange(initial_code) refresh_token = credentials.refresh_token print("\nPlease update your config to include the following:") print() print("[gmusic]") print("refresh_token =", refresh_token) print() ## Instruction: Remove Python 2 compatibility code ## Code After: import gmusicapi from mopidy import commands from oauth2client.client import OAuth2WebServerFlow class GMusicCommand(commands.Command): def __init__(self): super().__init__() self.add_child("login", LoginCommand()) class LoginCommand(commands.Command): def run(self, args, config): oauth_info = gmusicapi.Mobileclient._session_class.oauth flow = OAuth2WebServerFlow(**oauth_info._asdict()) print() print( "Go to the following URL to get an initial auth code, " "then provide it below:" ) print(flow.step1_get_authorize_url()) print() initial_code = input("code: ") credentials = flow.step2_exchange(initial_code) refresh_token = credentials.refresh_token print("\nPlease update your config to include the following:") print() print("[gmusic]") print("refresh_token =", refresh_token) print()
c322e4f2202f3b004a4f41bd4c2786f88292cf37
deconstrst/deconstrst.py
deconstrst/deconstrst.py
import argparse import sys from os import path from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
from __future__ import print_function import argparse import sys import os from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) content_store_url = os.getenv("CONTENT_STORE") if args.submit and not content_store_url: print("Please set CONTENT_STORE if submitting results.", file=sys.stderr) sys.exit(1) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = os.path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
Validate the presence of CONTENT_STORE.
Validate the presence of CONTENT_STORE.
Python
apache-2.0
ktbartholomew/preparer-sphinx,ktbartholomew/preparer-sphinx,deconst/preparer-sphinx,deconst/preparer-sphinx
+ + from __future__ import print_function import argparse import sys - from os import path + import os from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) + content_store_url = os.getenv("CONTENT_STORE") + + if args.submit and not content_store_url: + print("Please set CONTENT_STORE if submitting results.", + file=sys.stderr) + sys.exit(1) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' - doctreedir = path.join(destdir, '.doctrees') + doctreedir = os.path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
Validate the presence of CONTENT_STORE.
## Code Before: import argparse import sys from os import path from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0 ## Instruction: Validate the presence of CONTENT_STORE. ## Code After: from __future__ import print_function import argparse import sys import os from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) content_store_url = os.getenv("CONTENT_STORE") if args.submit and not content_store_url: print("Please set CONTENT_STORE if submitting results.", file=sys.stderr) sys.exit(1) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = os.path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
88de184c1d9daa79e47873b0bd8912ea67b32ec1
app/__init__.py
app/__init__.py
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = \ cf_services['elasticsearch-compose'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write( base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64']) ) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
Change the VCAP_SERVICE key for elasticsearch
Change the VCAP_SERVICE key for elasticsearch GOV.UK PaaS have recently changed the name of their elasticsearch service in preparation for migration. This quick fix will work until elasticsearch-compose is withdrawn; a future solution should use a more robust way of determining the elasticsearch URI.
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) - application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] + application.config['ELASTICSEARCH_HOST'] = \ + cf_services['elasticsearch-compose'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: + es_certfile.write( - es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) + base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64']) + ) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
Change the VCAP_SERVICE key for elasticsearch
## Code Before: from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application ## Instruction: Change the VCAP_SERVICE key for elasticsearch ## Code After: from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = \ cf_services['elasticsearch-compose'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write( base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64']) ) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
c037f405de773a3c9e9a7affedf2ee154a3c1766
django_q/migrations/0003_auto_20150708_1326.py
django_q/migrations/0003_auto_20150708_1326.py
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_q', '0002_auto_20150630_1624'), ] operations = [ migrations.AlterModelOptions( name='failure', options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'}, ), migrations.AlterModelOptions( name='schedule', options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'}, ), migrations.AlterModelOptions( name='success', options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'}, ), migrations.AlterField( model_name='task', name='id', field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False), ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_q', '0002_auto_20150630_1624'), ] operations = [ migrations.AlterModelOptions( name='failure', options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'}, ), migrations.AlterModelOptions( name='schedule', options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'}, ), migrations.AlterModelOptions( name='success', options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'}, ), migrations.RemoveField( model_name='task', name='id', ), migrations.AddField( model_name='task', name='id', field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False), ), ]
Remove and replace task.id field, instead of Alter
Remove and replace task.id field, instead of Alter
Python
mit
Koed00/django-q
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_q', '0002_auto_20150630_1624'), ] operations = [ migrations.AlterModelOptions( name='failure', options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'}, ), migrations.AlterModelOptions( name='schedule', options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'}, ), migrations.AlterModelOptions( name='success', options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'}, ), + migrations.RemoveField( + model_name='task', + name='id', + ), - migrations.AlterField( + migrations.AddField( model_name='task', name='id', field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False), ), ]
Remove and replace task.id field, instead of Alter
## Code Before: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_q', '0002_auto_20150630_1624'), ] operations = [ migrations.AlterModelOptions( name='failure', options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'}, ), migrations.AlterModelOptions( name='schedule', options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'}, ), migrations.AlterModelOptions( name='success', options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'}, ), migrations.AlterField( model_name='task', name='id', field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False), ), ] ## Instruction: Remove and replace task.id field, instead of Alter ## Code After: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('django_q', '0002_auto_20150630_1624'), ] operations = [ migrations.AlterModelOptions( name='failure', options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'}, ), migrations.AlterModelOptions( name='schedule', options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'}, ), migrations.AlterModelOptions( name='success', options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'}, ), migrations.RemoveField( model_name='task', name='id', ), migrations.AddField( model_name='task', name='id', field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False), ), ]
66e2e3bee9996a0cb55c7b802a638e42bc72ccbe
zazu/plugins/astyle_styler.py
zazu/plugins/astyle_styler.py
"""astyle plugin for zazu""" import zazu.styler import zazu.util __author__ = "Nicholas Wiles" __copyright__ = "Copyright 2017" class AstyleStyler(zazu.styler.Styler): """Astyle plugin for code styling""" def style_file(self, file, verbose, dry_run): """Run astyle on a file""" args = ['astyle', '-v'] + self.options if dry_run: args.append('--dry-run') args.append(file) output = zazu.util.check_output(args) fix_needed = output.startswith('Formatted ') return file, fix_needed @staticmethod def default_extensions(): return ['*.c', '*.cc', '*.cpp', '*.h', '*.hpp', '*.java'] @staticmethod def type(): return 'astyle'
"""astyle plugin for zazu""" import zazu.styler import zazu.util __author__ = "Nicholas Wiles" __copyright__ = "Copyright 2017" class AstyleStyler(zazu.styler.Styler): """Astyle plugin for code styling""" def style_file(self, file, verbose, dry_run): """Run astyle on a file""" args = ['astyle', '--formatted'] + self.options if dry_run: args.append('--dry-run') args.append(file) output = zazu.util.check_output(args) return file, bool(output) @staticmethod def default_extensions(): return ['*.c', '*.cc', '*.cpp', '*.h', '*.hpp', '*.java'] @staticmethod def type(): return 'astyle'
Use formatted flag on astyle to simplify code
Use formatted flag on astyle to simplify code
Python
mit
stopthatcow/zazu,stopthatcow/zazu
"""astyle plugin for zazu""" import zazu.styler import zazu.util __author__ = "Nicholas Wiles" __copyright__ = "Copyright 2017" class AstyleStyler(zazu.styler.Styler): """Astyle plugin for code styling""" def style_file(self, file, verbose, dry_run): """Run astyle on a file""" - args = ['astyle', '-v'] + self.options + args = ['astyle', '--formatted'] + self.options if dry_run: args.append('--dry-run') args.append(file) output = zazu.util.check_output(args) + return file, bool(output) - fix_needed = output.startswith('Formatted ') - return file, fix_needed @staticmethod def default_extensions(): return ['*.c', '*.cc', '*.cpp', '*.h', '*.hpp', '*.java'] @staticmethod def type(): return 'astyle'
Use formatted flag on astyle to simplify code
## Code Before: """astyle plugin for zazu""" import zazu.styler import zazu.util __author__ = "Nicholas Wiles" __copyright__ = "Copyright 2017" class AstyleStyler(zazu.styler.Styler): """Astyle plugin for code styling""" def style_file(self, file, verbose, dry_run): """Run astyle on a file""" args = ['astyle', '-v'] + self.options if dry_run: args.append('--dry-run') args.append(file) output = zazu.util.check_output(args) fix_needed = output.startswith('Formatted ') return file, fix_needed @staticmethod def default_extensions(): return ['*.c', '*.cc', '*.cpp', '*.h', '*.hpp', '*.java'] @staticmethod def type(): return 'astyle' ## Instruction: Use formatted flag on astyle to simplify code ## Code After: """astyle plugin for zazu""" import zazu.styler import zazu.util __author__ = "Nicholas Wiles" __copyright__ = "Copyright 2017" class AstyleStyler(zazu.styler.Styler): """Astyle plugin for code styling""" def style_file(self, file, verbose, dry_run): """Run astyle on a file""" args = ['astyle', '--formatted'] + self.options if dry_run: args.append('--dry-run') args.append(file) output = zazu.util.check_output(args) return file, bool(output) @staticmethod def default_extensions(): return ['*.c', '*.cc', '*.cpp', '*.h', '*.hpp', '*.java'] @staticmethod def type(): return 'astyle'
887cb1b1a021b6d4a1952fdeb178e602d8cabfdc
clifford/test/__init__.py
clifford/test/__init__.py
from .test_algebra_initialisation import * from .test_clifford import * from .test_io import * from .test_g3c_tools import * from .test_tools import * from .test_g3c_CUDA import * import unittest def run_all_tests(): unittest.main()
import os import pytest def run_all_tests(*args): """ Invoke pytest, forwarding options to pytest.main """ pytest.main([os.path.dirname(__file__)] + list(args))
Fix `clifford.test.run_all_tests` to use pytest
Fix `clifford.test.run_all_tests` to use pytest Closes gh-91. Tests can be run with ```python import clifford.test clifford.test.run_all_tests() ```
Python
bsd-3-clause
arsenovic/clifford,arsenovic/clifford
+ import os + import pytest - from .test_algebra_initialisation import * - from .test_clifford import * - from .test_io import * - from .test_g3c_tools import * - from .test_tools import * - from .test_g3c_CUDA import * - import unittest + def run_all_tests(*args): + """ Invoke pytest, forwarding options to pytest.main """ + pytest.main([os.path.dirname(__file__)] + list(args)) - - def run_all_tests(): - unittest.main() -
Fix `clifford.test.run_all_tests` to use pytest
## Code Before: from .test_algebra_initialisation import * from .test_clifford import * from .test_io import * from .test_g3c_tools import * from .test_tools import * from .test_g3c_CUDA import * import unittest def run_all_tests(): unittest.main() ## Instruction: Fix `clifford.test.run_all_tests` to use pytest ## Code After: import os import pytest def run_all_tests(*args): """ Invoke pytest, forwarding options to pytest.main """ pytest.main([os.path.dirname(__file__)] + list(args))
9633f3ee1a3431cb373a4652afbfc2cd8b3b4c23
test_utils/anki/__init__.py
test_utils/anki/__init__.py
import sys from unittest.mock import MagicMock class MockAnkiModules: """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] def __init__(self): self.shadowed_modules = {} for module in self.modules_list: self.shadowed_modules[module] = sys.modules.get(module) sys.modules[module] = MagicMock() def unmock(self): for module in self.modules_list: shadowed_module = self.shadowed_modules[module] if shadowed_module is not None: sys.modules[module] = shadowed_module else: if module in sys.modules: del sys.modules[module]
from typing import List from typing import Optional import sys from unittest.mock import MagicMock class MockAnkiModules: """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] def __init__(self, module_names_list: Optional[List[str]] = None): if module_names_list is None: module_names_list = self.module_names_list self.shadowed_modules = {} for module_name in module_names_list: self.shadowed_modules[module_name] = sys.modules.get(module_name) sys.modules[module_name] = MagicMock() def unmock(self): for module_name, module in self.shadowed_modules.items(): if module is not None: sys.modules[module_name] = module else: if module_name in sys.modules: del sys.modules[module_name]
Allow specifying modules to be mocked
Allow specifying modules to be mocked
Python
mit
Stvad/CrowdAnki,Stvad/CrowdAnki,Stvad/CrowdAnki
+ from typing import List + from typing import Optional import sys from unittest.mock import MagicMock class MockAnkiModules: """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ - modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', + module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', - 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] + 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] - def __init__(self): + def __init__(self, module_names_list: Optional[List[str]] = None): + if module_names_list is None: + module_names_list = self.module_names_list + self.shadowed_modules = {} - for module in self.modules_list: + for module_name in module_names_list: - self.shadowed_modules[module] = sys.modules.get(module) + self.shadowed_modules[module_name] = sys.modules.get(module_name) - sys.modules[module] = MagicMock() + sys.modules[module_name] = MagicMock() def unmock(self): + for module_name, module in self.shadowed_modules.items(): - for module in self.modules_list: - shadowed_module = self.shadowed_modules[module] - if shadowed_module is not None: + if module is not None: - sys.modules[module] = shadowed_module + sys.modules[module_name] = module else: - if module in sys.modules: + if module_name in sys.modules: - del sys.modules[module] + del sys.modules[module_name]
Allow specifying modules to be mocked
## Code Before: import sys from unittest.mock import MagicMock class MockAnkiModules: """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] def __init__(self): self.shadowed_modules = {} for module in self.modules_list: self.shadowed_modules[module] = sys.modules.get(module) sys.modules[module] = MagicMock() def unmock(self): for module in self.modules_list: shadowed_module = self.shadowed_modules[module] if shadowed_module is not None: sys.modules[module] = shadowed_module else: if module in sys.modules: del sys.modules[module] ## Instruction: Allow specifying modules to be mocked ## Code After: from typing import List from typing import Optional import sys from unittest.mock import MagicMock class MockAnkiModules: """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] def __init__(self, module_names_list: Optional[List[str]] = None): if module_names_list is None: module_names_list = self.module_names_list self.shadowed_modules = {} for module_name in module_names_list: self.shadowed_modules[module_name] = sys.modules.get(module_name) sys.modules[module_name] = MagicMock() def unmock(self): for module_name, module in self.shadowed_modules.items(): if module is not None: sys.modules[module_name] = module else: if module_name in sys.modules: del sys.modules[module_name]
deb87fefcc7fa76de3ae29ae58e816e49184d100
openfisca_core/model_api.py
openfisca_core/model_api.py
from datetime import date # noqa analysis:ignore from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore from .columns import ( # noqa analysis:ignore AgeCol, BoolCol, DateCol, EnumCol, FixedStrCol, FloatCol, IntCol, PeriodSizeIndependentIntCol, StrCol, ) from .enumerations import Enum # noqa analysis:ignore from .formulas import ( # noqa analysis:ignore ADD, calculate_output_add, calculate_output_divide, dated_function, DIVIDE, set_input_dispatch_by_period, set_input_divide_by_period, missing_value ) from .base_functions import ( # noqa analysis:ignore requested_period_added_value, requested_period_default_value, requested_period_last_or_next_value, requested_period_last_value, ) from .variables import DatedVariable, Variable # noqa analysis:ignore from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore from .reforms import Reform # noqa analysis:ignore
from datetime import date # noqa analysis:ignore from numpy import ( # noqa analysis:ignore logical_not as not_, maximum as max_, minimum as min_, round as round_, select, where, ) from .columns import ( # noqa analysis:ignore AgeCol, BoolCol, DateCol, EnumCol, FixedStrCol, FloatCol, IntCol, PeriodSizeIndependentIntCol, StrCol, ) from .enumerations import Enum # noqa analysis:ignore from .formulas import ( # noqa analysis:ignore ADD, calculate_output_add, calculate_output_divide, dated_function, DIVIDE, set_input_dispatch_by_period, set_input_divide_by_period, missing_value ) from .base_functions import ( # noqa analysis:ignore requested_period_added_value, requested_period_default_value, requested_period_last_or_next_value, requested_period_last_value, ) from .variables import DatedVariable, Variable # noqa analysis:ignore from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore from .reforms import Reform # noqa analysis:ignore
Add numpy.round to model api
Add numpy.round to model api
Python
agpl-3.0
openfisca/openfisca-core,openfisca/openfisca-core
from datetime import date # noqa analysis:ignore - from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore + from numpy import ( # noqa analysis:ignore + logical_not as not_, + maximum as max_, + minimum as min_, + round as round_, + select, + where, + ) from .columns import ( # noqa analysis:ignore AgeCol, BoolCol, DateCol, EnumCol, FixedStrCol, FloatCol, IntCol, PeriodSizeIndependentIntCol, StrCol, ) from .enumerations import Enum # noqa analysis:ignore from .formulas import ( # noqa analysis:ignore ADD, calculate_output_add, calculate_output_divide, dated_function, DIVIDE, set_input_dispatch_by_period, set_input_divide_by_period, missing_value ) from .base_functions import ( # noqa analysis:ignore requested_period_added_value, requested_period_default_value, requested_period_last_or_next_value, requested_period_last_value, ) from .variables import DatedVariable, Variable # noqa analysis:ignore from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore from .reforms import Reform # noqa analysis:ignore
Add numpy.round to model api
## Code Before: from datetime import date # noqa analysis:ignore from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore from .columns import ( # noqa analysis:ignore AgeCol, BoolCol, DateCol, EnumCol, FixedStrCol, FloatCol, IntCol, PeriodSizeIndependentIntCol, StrCol, ) from .enumerations import Enum # noqa analysis:ignore from .formulas import ( # noqa analysis:ignore ADD, calculate_output_add, calculate_output_divide, dated_function, DIVIDE, set_input_dispatch_by_period, set_input_divide_by_period, missing_value ) from .base_functions import ( # noqa analysis:ignore requested_period_added_value, requested_period_default_value, requested_period_last_or_next_value, requested_period_last_value, ) from .variables import DatedVariable, Variable # noqa analysis:ignore from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore from .reforms import Reform # noqa analysis:ignore ## Instruction: Add numpy.round to model api ## Code After: from datetime import date # noqa analysis:ignore from numpy import ( # noqa analysis:ignore logical_not as not_, maximum as max_, minimum as min_, round as round_, select, where, ) from .columns import ( # noqa analysis:ignore AgeCol, BoolCol, DateCol, EnumCol, FixedStrCol, FloatCol, IntCol, PeriodSizeIndependentIntCol, StrCol, ) from .enumerations import Enum # noqa analysis:ignore from .formulas import ( # noqa analysis:ignore ADD, calculate_output_add, calculate_output_divide, dated_function, DIVIDE, set_input_dispatch_by_period, set_input_divide_by_period, missing_value ) from .base_functions import ( # noqa analysis:ignore requested_period_added_value, requested_period_default_value, requested_period_last_or_next_value, requested_period_last_value, ) from .variables import DatedVariable, Variable # noqa analysis:ignore from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore from .reforms import Reform # noqa analysis:ignore
ccd2afdc687c3d6b7d01bed130e1b0097a4fdc2d
src/damis/run_experiment.py
src/damis/run_experiment.py
import sys from damis.models import Experiment exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) exp.status = 'FINISHED' exp.save()
import sys from damis.models import Experiment, Connection from damis.settings import BUILDOUT_DIR from os.path import splitext from algorithms.preprocess import transpose def transpose_data_callable(X, c, *args, **kwargs): X_absolute = BUILDOUT_DIR + '/var/www' + X Y = '%s_transposed%s' % splitext(X) Y_absolute = BUILDOUT_DIR + '/var/www' + Y transpose(X_absolute, Y_absolute, int(c)) return [('Y', Y)] def do_nothing(*args, **kwargs): return [] # Collables which get SERVICES = { "UPLOAD FILE": do_nothing, "EXISTING FILE": do_nothing, "MIDAS FILE": do_nothing, "TECHNICAL DETAILS": do_nothing, "CHART": do_nothing, # "CLEAN DATA", # "FILTER DATA", # "SPLIT DATA", "TRANSPOSE DATA": transpose_data_callable, # "TRANSFORM DATA": transform_data_callable, # "STAT PRIMITIVES", # "MLP", # "C45", # "KMEANS", # "PCA", # "SMACOF", # "DMA", # "SDS", # "SAMANN", # "SOM", # "SOMMDS", # "SELECT FEATURES", } ## Recursively walk through through tasks. def execute_tasks(task): # Get INPUT and COMMON parameter values. kwargs = {} for pv in task.parameter_values.all(): cons = Connection.objects.filter(target=pv) if cons: value = cons[0].source.value else: value = pv.value kwargs[pv.parameter.name] = value # Call executable service = SERVICES[task.algorithm.title] response = service(**kwargs) # Response dict: name -> value # Set OUTPUT parameter values and save. for name, value in response: pv = task.parameter_values.get(parameter__name=name) pv.value = value pv.save() task.status = 'SAVED' task.save() ## Call its following tasks for pv in task.parameter_values.all(): for con in Connection.objects.filter(source=pv): next_task = con.target.task if next_task.status == 'SAVED': execute_tasks(next_task) if __name__ == '__main__': exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) first_task = exp.tasks.filter(algorithm__category='DATA')[0] execute_tasks(first_task) exp.status = 'FINISHED' exp.save()
Implement experiment workflow execution with transpose method.
Implement experiment workflow execution with transpose method.
Python
agpl-3.0
InScience/DAMIS-old,InScience/DAMIS-old
import sys - from damis.models import Experiment + from damis.models import Experiment, Connection + from damis.settings import BUILDOUT_DIR + from os.path import splitext + from algorithms.preprocess import transpose - exp_pk = sys.argv[1] - exp = Experiment.objects.get(pk=exp_pk) - exp.status = 'FINISHED' - exp.save() + def transpose_data_callable(X, c, *args, **kwargs): + X_absolute = BUILDOUT_DIR + '/var/www' + X + Y = '%s_transposed%s' % splitext(X) + Y_absolute = BUILDOUT_DIR + '/var/www' + Y + transpose(X_absolute, Y_absolute, int(c)) + return [('Y', Y)] + def do_nothing(*args, **kwargs): + return [] + + # Collables which get + SERVICES = { + "UPLOAD FILE": do_nothing, + "EXISTING FILE": do_nothing, + "MIDAS FILE": do_nothing, + "TECHNICAL DETAILS": do_nothing, + "CHART": do_nothing, + # "CLEAN DATA", + # "FILTER DATA", + # "SPLIT DATA", + "TRANSPOSE DATA": transpose_data_callable, + # "TRANSFORM DATA": transform_data_callable, + # "STAT PRIMITIVES", + # "MLP", + # "C45", + # "KMEANS", + # "PCA", + # "SMACOF", + # "DMA", + # "SDS", + # "SAMANN", + # "SOM", + # "SOMMDS", + # "SELECT FEATURES", + } + + ## Recursively walk through through tasks. + def execute_tasks(task): + # Get INPUT and COMMON parameter values. + kwargs = {} + for pv in task.parameter_values.all(): + cons = Connection.objects.filter(target=pv) + if cons: + value = cons[0].source.value + else: + value = pv.value + kwargs[pv.parameter.name] = value + + # Call executable + service = SERVICES[task.algorithm.title] + response = service(**kwargs) # Response dict: name -> value + + # Set OUTPUT parameter values and save. + for name, value in response: + pv = task.parameter_values.get(parameter__name=name) + pv.value = value + pv.save() + + task.status = 'SAVED' + task.save() + + ## Call its following tasks + for pv in task.parameter_values.all(): + for con in Connection.objects.filter(source=pv): + next_task = con.target.task + if next_task.status == 'SAVED': + execute_tasks(next_task) + + + if __name__ == '__main__': + exp_pk = sys.argv[1] + exp = Experiment.objects.get(pk=exp_pk) + first_task = exp.tasks.filter(algorithm__category='DATA')[0] + execute_tasks(first_task) + exp.status = 'FINISHED' + exp.save() +
Implement experiment workflow execution with transpose method.
## Code Before: import sys from damis.models import Experiment exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) exp.status = 'FINISHED' exp.save() ## Instruction: Implement experiment workflow execution with transpose method. ## Code After: import sys from damis.models import Experiment, Connection from damis.settings import BUILDOUT_DIR from os.path import splitext from algorithms.preprocess import transpose def transpose_data_callable(X, c, *args, **kwargs): X_absolute = BUILDOUT_DIR + '/var/www' + X Y = '%s_transposed%s' % splitext(X) Y_absolute = BUILDOUT_DIR + '/var/www' + Y transpose(X_absolute, Y_absolute, int(c)) return [('Y', Y)] def do_nothing(*args, **kwargs): return [] # Collables which get SERVICES = { "UPLOAD FILE": do_nothing, "EXISTING FILE": do_nothing, "MIDAS FILE": do_nothing, "TECHNICAL DETAILS": do_nothing, "CHART": do_nothing, # "CLEAN DATA", # "FILTER DATA", # "SPLIT DATA", "TRANSPOSE DATA": transpose_data_callable, # "TRANSFORM DATA": transform_data_callable, # "STAT PRIMITIVES", # "MLP", # "C45", # "KMEANS", # "PCA", # "SMACOF", # "DMA", # "SDS", # "SAMANN", # "SOM", # "SOMMDS", # "SELECT FEATURES", } ## Recursively walk through through tasks. def execute_tasks(task): # Get INPUT and COMMON parameter values. kwargs = {} for pv in task.parameter_values.all(): cons = Connection.objects.filter(target=pv) if cons: value = cons[0].source.value else: value = pv.value kwargs[pv.parameter.name] = value # Call executable service = SERVICES[task.algorithm.title] response = service(**kwargs) # Response dict: name -> value # Set OUTPUT parameter values and save. for name, value in response: pv = task.parameter_values.get(parameter__name=name) pv.value = value pv.save() task.status = 'SAVED' task.save() ## Call its following tasks for pv in task.parameter_values.all(): for con in Connection.objects.filter(source=pv): next_task = con.target.task if next_task.status == 'SAVED': execute_tasks(next_task) if __name__ == '__main__': exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) first_task = exp.tasks.filter(algorithm__category='DATA')[0] execute_tasks(first_task) exp.status = 'FINISHED' exp.save()
00cea9f8e51f53f338e19adf0165031d2f9cad77
c2corg_ui/templates/utils/format.py
c2corg_ui/templates/utils/format.py
import bbcode import markdown import html from c2corg_ui.format.wikilinks import C2CWikiLinkExtension _markdown_parser = None _bbcode_parser = None def _get_markdown_parser(): global _markdown_parser if not _markdown_parser: extensions = [ C2CWikiLinkExtension(), ] _markdown_parser = markdown.Markdown(output_format='xhtml5', extensions=extensions) return _markdown_parser def _get_bbcode_parser(): global _bbcode_parser if not _bbcode_parser: _bbcode_parser = bbcode.Parser(escape_html=False, newline='\n') return _bbcode_parser def parse_code(text, md=True, bb=True): if md: text = _get_markdown_parser().convert(text) if bb: text = _get_bbcode_parser().format(text) return text def sanitize(text): return html.escape(text)
import bbcode import markdown import html from c2corg_ui.format.wikilinks import C2CWikiLinkExtension from markdown.extensions.nl2br import Nl2BrExtension from markdown.extensions.toc import TocExtension _markdown_parser = None _bbcode_parser = None def _get_markdown_parser(): global _markdown_parser if not _markdown_parser: extensions = [ C2CWikiLinkExtension(), Nl2BrExtension(), TocExtension(marker='[toc]', baselevel=2), ] _markdown_parser = markdown.Markdown(output_format='xhtml5', extensions=extensions) return _markdown_parser def _get_bbcode_parser(): global _bbcode_parser if not _bbcode_parser: _bbcode_parser = bbcode.Parser(escape_html=False, newline='\n') return _bbcode_parser def parse_code(text, md=True, bb=True): if md: text = _get_markdown_parser().convert(text) if bb: text = _get_bbcode_parser().format(text) return text def sanitize(text): return html.escape(text)
Enable markdown extensions for TOC and linebreaks
Enable markdown extensions for TOC and linebreaks
Python
agpl-3.0
Courgetteandratatouille/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,c2corg/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui
import bbcode import markdown import html from c2corg_ui.format.wikilinks import C2CWikiLinkExtension + from markdown.extensions.nl2br import Nl2BrExtension + from markdown.extensions.toc import TocExtension _markdown_parser = None _bbcode_parser = None def _get_markdown_parser(): global _markdown_parser if not _markdown_parser: extensions = [ C2CWikiLinkExtension(), + Nl2BrExtension(), + TocExtension(marker='[toc]', baselevel=2), ] _markdown_parser = markdown.Markdown(output_format='xhtml5', extensions=extensions) return _markdown_parser def _get_bbcode_parser(): global _bbcode_parser if not _bbcode_parser: _bbcode_parser = bbcode.Parser(escape_html=False, newline='\n') return _bbcode_parser def parse_code(text, md=True, bb=True): if md: text = _get_markdown_parser().convert(text) if bb: text = _get_bbcode_parser().format(text) return text def sanitize(text): return html.escape(text)
Enable markdown extensions for TOC and linebreaks
## Code Before: import bbcode import markdown import html from c2corg_ui.format.wikilinks import C2CWikiLinkExtension _markdown_parser = None _bbcode_parser = None def _get_markdown_parser(): global _markdown_parser if not _markdown_parser: extensions = [ C2CWikiLinkExtension(), ] _markdown_parser = markdown.Markdown(output_format='xhtml5', extensions=extensions) return _markdown_parser def _get_bbcode_parser(): global _bbcode_parser if not _bbcode_parser: _bbcode_parser = bbcode.Parser(escape_html=False, newline='\n') return _bbcode_parser def parse_code(text, md=True, bb=True): if md: text = _get_markdown_parser().convert(text) if bb: text = _get_bbcode_parser().format(text) return text def sanitize(text): return html.escape(text) ## Instruction: Enable markdown extensions for TOC and linebreaks ## Code After: import bbcode import markdown import html from c2corg_ui.format.wikilinks import C2CWikiLinkExtension from markdown.extensions.nl2br import Nl2BrExtension from markdown.extensions.toc import TocExtension _markdown_parser = None _bbcode_parser = None def _get_markdown_parser(): global _markdown_parser if not _markdown_parser: extensions = [ C2CWikiLinkExtension(), Nl2BrExtension(), TocExtension(marker='[toc]', baselevel=2), ] _markdown_parser = markdown.Markdown(output_format='xhtml5', extensions=extensions) return _markdown_parser def _get_bbcode_parser(): global _bbcode_parser if not _bbcode_parser: _bbcode_parser = bbcode.Parser(escape_html=False, newline='\n') return _bbcode_parser def parse_code(text, md=True, bb=True): if md: text = _get_markdown_parser().convert(text) if bb: text = _get_bbcode_parser().format(text) return text def sanitize(text): return html.escape(text)
53f7acf5fc04ca6f86456fda95504ba41046d860
openedx/features/specializations/templatetags/sso_meta_tag.py
openedx/features/specializations/templatetags/sso_meta_tag.py
from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context);
from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
Add Django Custom Tag SSO
Add Django Custom Tag SSO
Python
agpl-3.0
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
from django import template - from django.template import Template + from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): + return get_template('features/specializations/sso_meta_template.html').render(context.flatten()) - return Template('<meta name="title" content="${ title }">' + ' ' + - '<meta name="description" content="${ subtitle }">' + ' ' + - ## OG (Open Graph) title and description added below to give social media info to display - ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) - '<meta property="og:title" content="${ title }">' + ' ' + - '<meta property="og:description" content="${ subtitle }">' + ' ' + - '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta property="og:image:width" content="512">' + ' ' + - '<meta property="og:image:height" content="512">' + ' ' + - '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - - '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + - '<meta name="twitter:title" content="${ title }">' + ' ' + - '<meta name="twitter:description" content="${ subtitle }">').render(context); - -
Add Django Custom Tag SSO
## Code Before: from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context); ## Instruction: Add Django Custom Tag SSO ## Code After: from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
27bf030df4c2f46eef8cdcd9441bd5d21a22e5cc
parkings/api/public/urls.py
parkings/api/public/urls.py
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .parking_area import PublicAPIParkingAreaViewSet from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet router = DefaultRouter() router.register(r'parking_area', PublicAPIParkingAreaViewSet) router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet) urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), ]
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .parking_area import PublicAPIParkingAreaViewSet from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet router = DefaultRouter() router.register(r'parking_area', PublicAPIParkingAreaViewSet, base_name='parkingarea') router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet, base_name='parkingareastatistics') urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), ]
Fix public API root view links
Fix public API root view links
Python
mit
tuomas777/parkkihubi
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .parking_area import PublicAPIParkingAreaViewSet from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet router = DefaultRouter() - router.register(r'parking_area', PublicAPIParkingAreaViewSet) + router.register(r'parking_area', PublicAPIParkingAreaViewSet, base_name='parkingarea') - router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet) + router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet, base_name='parkingareastatistics') urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), ]
Fix public API root view links
## Code Before: from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .parking_area import PublicAPIParkingAreaViewSet from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet router = DefaultRouter() router.register(r'parking_area', PublicAPIParkingAreaViewSet) router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet) urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), ] ## Instruction: Fix public API root view links ## Code After: from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .parking_area import PublicAPIParkingAreaViewSet from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet router = DefaultRouter() router.register(r'parking_area', PublicAPIParkingAreaViewSet, base_name='parkingarea') router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet, base_name='parkingareastatistics') urlpatterns = [ url(r'^', include(router.urls, namespace='v1')), ]
521b4fbec142306fad2347a5dd3a56aeec2f9498
events/search_indexes.py
events/search_indexes.py
from haystack import indexes from .models import Event, Place, PublicationStatus from django.utils.html import strip_tags class EventIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') start_time = indexes.DateTimeField(model_attr='start_time') end_time = indexes.DateTimeField(model_attr='end_time') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Event def prepare(self, obj): #obj.lang_keywords = obj.keywords.filter(language=get_language()) if obj.description: obj.description = strip_tags(obj.description) return super(EventIndex, self).prepare(obj) def index_queryset(self, using=None): return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC) class PlaceIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Place
from haystack import indexes from .models import Event, Place, PublicationStatus from django.utils.html import strip_tags class EventIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') start_time = indexes.DateTimeField(model_attr='start_time') end_time = indexes.DateTimeField(model_attr='end_time') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Event def prepare(self, obj): #obj.lang_keywords = obj.keywords.filter(language=get_language()) if obj.description: obj.description = strip_tags(obj.description) return super(EventIndex, self).prepare(obj) def index_queryset(self, using=None): return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC) class PlaceIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Place def index_queryset(self, using=None): return self.get_model().objects.filter(deleted=False)
Remove deleted places from place index
Remove deleted places from place index
Python
mit
aapris/linkedevents,aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,aapris/linkedevents
from haystack import indexes from .models import Event, Place, PublicationStatus from django.utils.html import strip_tags class EventIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') start_time = indexes.DateTimeField(model_attr='start_time') end_time = indexes.DateTimeField(model_attr='end_time') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Event def prepare(self, obj): #obj.lang_keywords = obj.keywords.filter(language=get_language()) if obj.description: obj.description = strip_tags(obj.description) return super(EventIndex, self).prepare(obj) def index_queryset(self, using=None): return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC) class PlaceIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Place + def index_queryset(self, using=None): + return self.get_model().objects.filter(deleted=False) +
Remove deleted places from place index
## Code Before: from haystack import indexes from .models import Event, Place, PublicationStatus from django.utils.html import strip_tags class EventIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') start_time = indexes.DateTimeField(model_attr='start_time') end_time = indexes.DateTimeField(model_attr='end_time') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Event def prepare(self, obj): #obj.lang_keywords = obj.keywords.filter(language=get_language()) if obj.description: obj.description = strip_tags(obj.description) return super(EventIndex, self).prepare(obj) def index_queryset(self, using=None): return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC) class PlaceIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Place ## Instruction: Remove deleted places from place index ## Code After: from haystack import indexes from .models import Event, Place, PublicationStatus from django.utils.html import strip_tags class EventIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') start_time = indexes.DateTimeField(model_attr='start_time') end_time = indexes.DateTimeField(model_attr='end_time') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Event def prepare(self, obj): #obj.lang_keywords = obj.keywords.filter(language=get_language()) if obj.description: obj.description = strip_tags(obj.description) return super(EventIndex, self).prepare(obj) def index_queryset(self, using=None): return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC) class PlaceIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) autosuggest = indexes.EdgeNgramField(model_attr='name') def get_updated_field(self): return 'last_modified_time' def get_model(self): return Place def index_queryset(self, using=None): return self.get_model().objects.filter(deleted=False)
84f4626a623283c3c4d98d9be0ccd69fe837f772
download_data.py
download_data.py
from lbtoolbox.download import download import os import inspect import tarfile def here(f): me = inspect.getsourcefile(here) return os.path.join(os.path.dirname(os.path.abspath(me)), f) def download_extract(url, into): fname = download(url, into) print("Extracting...") with tarfile.open(fname) as f: f.extractall(path=into) if __name__ == '__main__': baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/tosato/' datadir = here('data') # First, download the Tosato datasets. download_extract(baseurl + 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) download_extract(baseurl + 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) download_extract(baseurl + 'HIIT6HeadPose.tar.bz2', into=datadir) download_extract(baseurl + 'HOC.tar.bz2', into=datadir) download_extract(baseurl + 'HOCoffee.tar.bz2', into=datadir) download_extract(baseurl + 'IHDPHeadPose.tar.bz2', into=datadir) download_extract(baseurl + 'QMULPoseHeads.tar.bz2', into=datadir)
from lbtoolbox.download import download import os import inspect import tarfile def here(f): me = inspect.getsourcefile(here) return os.path.join(os.path.dirname(os.path.abspath(me)), f) def download_extract(urlbase, name, into): print("Downloading " + name) fname = download(os.path.join(urlbase, name), into) print("Extracting...") with tarfile.open(fname) as f: f.extractall(path=into) if __name__ == '__main__': baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/BiternionNets/' datadir = here('data') # First, download the Tosato datasets. download_extract(baseurl, 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) download_extract(baseurl, 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) download_extract(baseurl, 'HIIT6HeadPose.tar.bz2', into=datadir) download_extract(baseurl, 'HOC.tar.bz2', into=datadir) download_extract(baseurl, 'HOCoffee.tar.bz2', into=datadir) download_extract(baseurl, 'IHDPHeadPose.tar.bz2', into=datadir) download_extract(baseurl, 'QMULPoseHeads.tar.bz2', into=datadir) print("Done.")
Update download URL and add more output to downloader.
Update download URL and add more output to downloader.
Python
mit
lucasb-eyer/BiternionNet
from lbtoolbox.download import download import os import inspect import tarfile def here(f): me = inspect.getsourcefile(here) return os.path.join(os.path.dirname(os.path.abspath(me)), f) - def download_extract(url, into): + def download_extract(urlbase, name, into): - fname = download(url, into) + print("Downloading " + name) + fname = download(os.path.join(urlbase, name), into) print("Extracting...") with tarfile.open(fname) as f: f.extractall(path=into) if __name__ == '__main__': - baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/tosato/' + baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/BiternionNets/' datadir = here('data') # First, download the Tosato datasets. - download_extract(baseurl + 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) + download_extract(baseurl, 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) - download_extract(baseurl + 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) + download_extract(baseurl, 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) - download_extract(baseurl + 'HIIT6HeadPose.tar.bz2', into=datadir) + download_extract(baseurl, 'HIIT6HeadPose.tar.bz2', into=datadir) - download_extract(baseurl + 'HOC.tar.bz2', into=datadir) + download_extract(baseurl, 'HOC.tar.bz2', into=datadir) - download_extract(baseurl + 'HOCoffee.tar.bz2', into=datadir) + download_extract(baseurl, 'HOCoffee.tar.bz2', into=datadir) - download_extract(baseurl + 'IHDPHeadPose.tar.bz2', into=datadir) + download_extract(baseurl, 'IHDPHeadPose.tar.bz2', into=datadir) - download_extract(baseurl + 'QMULPoseHeads.tar.bz2', into=datadir) + download_extract(baseurl, 'QMULPoseHeads.tar.bz2', into=datadir) + print("Done.") +
Update download URL and add more output to downloader.
## Code Before: from lbtoolbox.download import download import os import inspect import tarfile def here(f): me = inspect.getsourcefile(here) return os.path.join(os.path.dirname(os.path.abspath(me)), f) def download_extract(url, into): fname = download(url, into) print("Extracting...") with tarfile.open(fname) as f: f.extractall(path=into) if __name__ == '__main__': baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/tosato/' datadir = here('data') # First, download the Tosato datasets. download_extract(baseurl + 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) download_extract(baseurl + 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) download_extract(baseurl + 'HIIT6HeadPose.tar.bz2', into=datadir) download_extract(baseurl + 'HOC.tar.bz2', into=datadir) download_extract(baseurl + 'HOCoffee.tar.bz2', into=datadir) download_extract(baseurl + 'IHDPHeadPose.tar.bz2', into=datadir) download_extract(baseurl + 'QMULPoseHeads.tar.bz2', into=datadir) ## Instruction: Update download URL and add more output to downloader. ## Code After: from lbtoolbox.download import download import os import inspect import tarfile def here(f): me = inspect.getsourcefile(here) return os.path.join(os.path.dirname(os.path.abspath(me)), f) def download_extract(urlbase, name, into): print("Downloading " + name) fname = download(os.path.join(urlbase, name), into) print("Extracting...") with tarfile.open(fname) as f: f.extractall(path=into) if __name__ == '__main__': baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/BiternionNets/' datadir = here('data') # First, download the Tosato datasets. download_extract(baseurl, 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir) download_extract(baseurl, 'CAVIARShoppingCenterFull.tar.bz2', into=datadir) download_extract(baseurl, 'HIIT6HeadPose.tar.bz2', into=datadir) download_extract(baseurl, 'HOC.tar.bz2', into=datadir) download_extract(baseurl, 'HOCoffee.tar.bz2', into=datadir) download_extract(baseurl, 'IHDPHeadPose.tar.bz2', into=datadir) download_extract(baseurl, 'QMULPoseHeads.tar.bz2', into=datadir) print("Done.")
c94c86df52184af6b07dcf58951688cea178b8e6
dmoj/executors/LUA.py
dmoj/executors/LUA.py
from .base_executor import ScriptExecutor class Executor(ScriptExecutor): ext = '.lua' name = 'LUA' command = 'lua' address_grace = 131072 test_program = "io.write(io.read('*all'))" @classmethod def get_version_flags(cls, command): return ['-v']
from .base_executor import ScriptExecutor class Executor(ScriptExecutor): ext = '.lua' name = 'LUA' command = 'lua' command_paths = ['lua', 'lua5.3', 'lua5.2', 'lua5.1'] address_grace = 131072 test_program = "io.write(io.read('*all'))" @classmethod def get_version_flags(cls, command): return ['-v']
Make lua autoconfig work better.
Make lua autoconfig work better.
Python
agpl-3.0
DMOJ/judge,DMOJ/judge,DMOJ/judge
from .base_executor import ScriptExecutor class Executor(ScriptExecutor): ext = '.lua' name = 'LUA' command = 'lua' + command_paths = ['lua', 'lua5.3', 'lua5.2', 'lua5.1'] address_grace = 131072 test_program = "io.write(io.read('*all'))" @classmethod def get_version_flags(cls, command): return ['-v']
Make lua autoconfig work better.
## Code Before: from .base_executor import ScriptExecutor class Executor(ScriptExecutor): ext = '.lua' name = 'LUA' command = 'lua' address_grace = 131072 test_program = "io.write(io.read('*all'))" @classmethod def get_version_flags(cls, command): return ['-v'] ## Instruction: Make lua autoconfig work better. ## Code After: from .base_executor import ScriptExecutor class Executor(ScriptExecutor): ext = '.lua' name = 'LUA' command = 'lua' command_paths = ['lua', 'lua5.3', 'lua5.2', 'lua5.1'] address_grace = 131072 test_program = "io.write(io.read('*all'))" @classmethod def get_version_flags(cls, command): return ['-v']
7cef87a81278c227db0cb07329d1b659dbd175b3
mail_factory/models.py
mail_factory/models.py
import django from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
Use standard library instead of django.utils.importlib
Use standard library instead of django.utils.importlib > django.utils.importlib is a compatibility library for when Python 2.6 was > still supported. It has been obsolete since Django 1.7, which dropped support > for Python 2.6, and is removed in 1.9 per the deprecation cycle. > Use Python's import_module function instead > — [1] References: [1] http://stackoverflow.com/a/32763639 [2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
Python
bsd-3-clause
novafloss/django-mail-factory,novafloss/django-mail-factory
import django from django.conf import settings - from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule + + try: + from importlib import import_module + except ImportError: + # Compatibility for python-2.6 + from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
Use standard library instead of django.utils.importlib
## Code Before: import django from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover() ## Instruction: Use standard library instead of django.utils.importlib ## Code After: import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
4b659b7b2552da033753349e059eee172025e00e
adbwp/__init__.py
adbwp/__init__.py
# pylint: disable=wildcard-import from . import exceptions from .exceptions import * from . import header from .header import Header from . import message from .message import Message __all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message'] __version__ = '0.0.1'
# pylint: disable=wildcard-import from . import exceptions, header, message from .exceptions import * from .header import Header from .message import Message __all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message'] __version__ = '0.0.1'
Reorder imports based on isort rules.
Reorder imports based on isort rules.
Python
apache-2.0
adbpy/wire-protocol
# pylint: disable=wildcard-import - from . import exceptions + from . import exceptions, header, message from .exceptions import * - from . import header from .header import Header - from . import message from .message import Message __all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message'] __version__ = '0.0.1'
Reorder imports based on isort rules.
## Code Before: # pylint: disable=wildcard-import from . import exceptions from .exceptions import * from . import header from .header import Header from . import message from .message import Message __all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message'] __version__ = '0.0.1' ## Instruction: Reorder imports based on isort rules. ## Code After: # pylint: disable=wildcard-import from . import exceptions, header, message from .exceptions import * from .header import Header from .message import Message __all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message'] __version__ = '0.0.1'
014c8ca68b196c78b9044b194b762cdb3dfe6c78
app/hooks/views.py
app/hooks/views.py
from __future__ import absolute_import from __future__ import unicode_literals from app import app, webhooks @webhooks.hook( app.config.get('GITLAB_HOOK','/hooks/gitlab'), handler='gitlab') class Gitlab: def issue(self, data): pass def push(self, data): pass def tag_push(self, data): pass def merge_request(self, data): pass def commit_comment(self, data): pass def issue_comment(self, data): pass def merge_request_comment(self, data): pass def snippet_comment(self, data): pass
from __future__ import absolute_import from __future__ import unicode_literals from app import app, webhooks @webhooks.hook( app.config.get('GITLAB_HOOK','/hooks/gitlab'), handler='gitlab') class Gitlab: def issue(self, data): # if the repository belongs to a group check if a channel with the same # name (lowercased and hyphened) exists # Check if a channel with the same repository name exists # If the channel exists post to that channel # If not post to general or other defined by configuration # publish the issue to the found channel including the Title, Message # and the creator and responsible if defined pass def push(self, data): # Read commit list to update commit count for user pass def tag_push(self, data): # Publish news of the new version of the repo in general pass def merge_request(self, data): # Notify in the channel pass def commit_comment(self, data): # Notify comment and receiver in the channel pass def issue_comment(self, data): # Notify comment and receiver in the channel pass def merge_request_comment(self, data): # Notify comment and receiver in the channel pass def snippet_comment(self, data): # Do nothing for now pass
Add comment description of methods for gitlab hook
Add comment description of methods for gitlab hook
Python
apache-2.0
pipex/gitbot,pipex/gitbot,pipex/gitbot
from __future__ import absolute_import from __future__ import unicode_literals from app import app, webhooks @webhooks.hook( app.config.get('GITLAB_HOOK','/hooks/gitlab'), handler='gitlab') class Gitlab: def issue(self, data): + # if the repository belongs to a group check if a channel with the same + # name (lowercased and hyphened) exists + # Check if a channel with the same repository name exists + + # If the channel exists post to that channel + + # If not post to general or other defined by configuration + + # publish the issue to the found channel including the Title, Message + # and the creator and responsible if defined pass def push(self, data): + # Read commit list to update commit count for user pass def tag_push(self, data): + # Publish news of the new version of the repo in general pass def merge_request(self, data): + # Notify in the channel pass def commit_comment(self, data): + # Notify comment and receiver in the channel pass def issue_comment(self, data): + # Notify comment and receiver in the channel pass def merge_request_comment(self, data): + # Notify comment and receiver in the channel pass def snippet_comment(self, data): + # Do nothing for now pass
Add comment description of methods for gitlab hook
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from app import app, webhooks @webhooks.hook( app.config.get('GITLAB_HOOK','/hooks/gitlab'), handler='gitlab') class Gitlab: def issue(self, data): pass def push(self, data): pass def tag_push(self, data): pass def merge_request(self, data): pass def commit_comment(self, data): pass def issue_comment(self, data): pass def merge_request_comment(self, data): pass def snippet_comment(self, data): pass ## Instruction: Add comment description of methods for gitlab hook ## Code After: from __future__ import absolute_import from __future__ import unicode_literals from app import app, webhooks @webhooks.hook( app.config.get('GITLAB_HOOK','/hooks/gitlab'), handler='gitlab') class Gitlab: def issue(self, data): # if the repository belongs to a group check if a channel with the same # name (lowercased and hyphened) exists # Check if a channel with the same repository name exists # If the channel exists post to that channel # If not post to general or other defined by configuration # publish the issue to the found channel including the Title, Message # and the creator and responsible if defined pass def push(self, data): # Read commit list to update commit count for user pass def tag_push(self, data): # Publish news of the new version of the repo in general pass def merge_request(self, data): # Notify in the channel pass def commit_comment(self, data): # Notify comment and receiver in the channel pass def issue_comment(self, data): # Notify comment and receiver in the channel pass def merge_request_comment(self, data): # Notify comment and receiver in the channel pass def snippet_comment(self, data): # Do nothing for now pass
6ecada90e944ee976197e0ee79baf1d711a20803
cla_public/apps/base/forms.py
cla_public/apps/base/forms.py
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM class FeedbackForm(Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM from cla_public.apps.checker.honeypot import Honeypot class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
Add honeypot field to feedback form
Add honeypot field to feedback form
Python
mit
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM + from cla_public.apps.checker.honeypot import Honeypot + - class FeedbackForm(Form): + class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
Add honeypot field to feedback form
## Code Before: "Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM class FeedbackForm(Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM) ## Instruction: Add honeypot field to feedback form ## Code After: "Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM from cla_public.apps.checker.honeypot import Honeypot class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
e2fbf646b193284fc5d01684193b9c5aeb415efe
generate_html.py
generate_html.py
from jinja2 import Environment, FileSystemLoader import datetime import json env = Environment(loader=FileSystemLoader('templates'), autoescape=True) names_template = env.get_template('names.html') area_template = env.get_template('areas.html') with open("output/templates.js") as templatesjs: templates = templatesjs.read() with open("processed/area_matches.json") as area_matches_file: area_matches = json.load(area_matches_file) with open('output/areas.html', 'w+') as name_output: name_output.write(area_template.render( templates=templates, area_matches=area_matches, date=datetime.date.today().isoformat(), )) with open("processed/interesting_names.json") as interesting_names_file: interesting_names = json.load(interesting_names_file) with open('output/names.html', 'w+') as name_output: name_output.write(names_template.render( templates=templates, interesting_names=interesting_names, interesting_names_json=json.dumps(interesting_names), date=datetime.date.today().isoformat(), ))
from jinja2 import Environment, FileSystemLoader import datetime import json env = Environment(loader=FileSystemLoader('templates'), autoescape=True) names_template = env.get_template('names.html') area_template = env.get_template('areas.html') with open("output/templates.js") as templatesjs: templates = templatesjs.read() with open("processed/area_matches.json") as area_matches_file: area_matches = json.load(area_matches_file) with open('output/areas.html', 'w+') as name_output: name_output.write(area_template.render( templates=templates, area_matches=area_matches, date=datetime.date.today().isoformat(), )) with open("processed/interesting_names.json") as interesting_names_file: interesting_names = json.load(interesting_names_file) with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file: key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file])) name_output.write(names_template.render( templates=templates, interesting_names=interesting_names, interesting_names_json=json.dumps(interesting_names), date=datetime.date.today().isoformat(), key_fields_json=json.dumps(key_fields), ))
Fix due to merge conflicts
Fix due to merge conflicts
Python
agpl-3.0
TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine
from jinja2 import Environment, FileSystemLoader import datetime import json env = Environment(loader=FileSystemLoader('templates'), autoescape=True) names_template = env.get_template('names.html') area_template = env.get_template('areas.html') with open("output/templates.js") as templatesjs: templates = templatesjs.read() with open("processed/area_matches.json") as area_matches_file: area_matches = json.load(area_matches_file) with open('output/areas.html', 'w+') as name_output: name_output.write(area_template.render( templates=templates, area_matches=area_matches, date=datetime.date.today().isoformat(), )) with open("processed/interesting_names.json") as interesting_names_file: interesting_names = json.load(interesting_names_file) - with open('output/names.html', 'w+') as name_output: + with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file: + key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file])) name_output.write(names_template.render( templates=templates, interesting_names=interesting_names, interesting_names_json=json.dumps(interesting_names), date=datetime.date.today().isoformat(), + key_fields_json=json.dumps(key_fields), )) +
Fix due to merge conflicts
## Code Before: from jinja2 import Environment, FileSystemLoader import datetime import json env = Environment(loader=FileSystemLoader('templates'), autoescape=True) names_template = env.get_template('names.html') area_template = env.get_template('areas.html') with open("output/templates.js") as templatesjs: templates = templatesjs.read() with open("processed/area_matches.json") as area_matches_file: area_matches = json.load(area_matches_file) with open('output/areas.html', 'w+') as name_output: name_output.write(area_template.render( templates=templates, area_matches=area_matches, date=datetime.date.today().isoformat(), )) with open("processed/interesting_names.json") as interesting_names_file: interesting_names = json.load(interesting_names_file) with open('output/names.html', 'w+') as name_output: name_output.write(names_template.render( templates=templates, interesting_names=interesting_names, interesting_names_json=json.dumps(interesting_names), date=datetime.date.today().isoformat(), )) ## Instruction: Fix due to merge conflicts ## Code After: from jinja2 import Environment, FileSystemLoader import datetime import json env = Environment(loader=FileSystemLoader('templates'), autoescape=True) names_template = env.get_template('names.html') area_template = env.get_template('areas.html') with open("output/templates.js") as templatesjs: templates = templatesjs.read() with open("processed/area_matches.json") as area_matches_file: area_matches = json.load(area_matches_file) with open('output/areas.html', 'w+') as name_output: name_output.write(area_template.render( templates=templates, area_matches=area_matches, date=datetime.date.today().isoformat(), )) with open("processed/interesting_names.json") as interesting_names_file: interesting_names = json.load(interesting_names_file) with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file: key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file])) name_output.write(names_template.render( templates=templates, interesting_names=interesting_names, interesting_names_json=json.dumps(interesting_names), date=datetime.date.today().isoformat(), key_fields_json=json.dumps(key_fields), ))
0ed9e159fa606c9dbdb90dfc64fcb357e9f9cedb
plenum/test/test_request.py
plenum/test/test_request.py
from indy_common.types import Request def test_request_all_identifiers_returns_empty_list_for_request_without_signatures(): req = Request() assert req.all_identifiers == []
from plenum.common.request import Request def test_request_all_identifiers_returns_empty_list_for_request_without_signatures(): req = Request() assert req.all_identifiers == []
Fix wrong import in test
Fix wrong import in test Signed-off-by: Sergey Khoroshavin <b770466c7a06c5fe47531d5f0e31684f1131354d@dsr-corporation.com>
Python
apache-2.0
evernym/zeno,evernym/plenum
- from indy_common.types import Request + from plenum.common.request import Request def test_request_all_identifiers_returns_empty_list_for_request_without_signatures(): req = Request() assert req.all_identifiers == []
Fix wrong import in test
## Code Before: from indy_common.types import Request def test_request_all_identifiers_returns_empty_list_for_request_without_signatures(): req = Request() assert req.all_identifiers == [] ## Instruction: Fix wrong import in test ## Code After: from plenum.common.request import Request def test_request_all_identifiers_returns_empty_list_for_request_without_signatures(): req = Request() assert req.all_identifiers == []
0241e253c68ca6862a3da26d29a649f65c27ae36
demos/chatroom/experiment.py
demos/chatroom/experiment.py
"""Coordination chatroom game.""" import dallinger as dlgr from dallinger.config import get_config try: unicode = unicode except NameError: # Python 3 unicode = str config = get_config() def extra_settings(): config.register('network', unicode) config.register('n', int) class CoordinationChatroom(dlgr.experiments.Experiment): """Define the structure of the experiment.""" def __init__(self, session): """Initialize the experiment.""" super(CoordinationChatroom, self).__init__(session) self.experiment_repeats = 1 self.num_participants = config.get('n') self.initial_recruitment_size = self.num_participants self.quorum = self.num_participants self.config = config if not self.config.ready: self.config.load_config() self.setup() def create_network(self): """Create a new network by reading the configuration file.""" class_ = getattr( dlgr.networks, self.config.get('network') ) return class_(max_size=self.num_participants) def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) def create_node(self, participant, network): """Create a node for a participant.""" return dlgr.nodes.Agent(network=network, participant=participant)
"""Coordination chatroom game.""" import dallinger as dlgr from dallinger.compat import unicode from dallinger.config import get_config config = get_config() def extra_settings(): config.register('network', unicode) config.register('n', int) class CoordinationChatroom(dlgr.experiments.Experiment): """Define the structure of the experiment.""" def __init__(self, session): """Initialize the experiment.""" super(CoordinationChatroom, self).__init__(session) self.experiment_repeats = 1 self.num_participants = config.get('n') self.initial_recruitment_size = self.num_participants self.quorum = self.num_participants self.config = config if not self.config.ready: self.config.load_config() self.setup() def create_network(self): """Create a new network by reading the configuration file.""" class_ = getattr( dlgr.networks, self.config.get('network') ) return class_(max_size=self.num_participants) def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) def create_node(self, participant, network): """Create a node for a participant.""" return dlgr.nodes.Agent(network=network, participant=participant)
Use compat for unicode import
Use compat for unicode import
Python
mit
Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger
"""Coordination chatroom game.""" import dallinger as dlgr + from dallinger.compat import unicode from dallinger.config import get_config - try: - unicode = unicode - except NameError: # Python 3 - unicode = str config = get_config() def extra_settings(): config.register('network', unicode) config.register('n', int) class CoordinationChatroom(dlgr.experiments.Experiment): """Define the structure of the experiment.""" def __init__(self, session): """Initialize the experiment.""" super(CoordinationChatroom, self).__init__(session) self.experiment_repeats = 1 self.num_participants = config.get('n') self.initial_recruitment_size = self.num_participants self.quorum = self.num_participants self.config = config if not self.config.ready: self.config.load_config() self.setup() def create_network(self): """Create a new network by reading the configuration file.""" class_ = getattr( dlgr.networks, self.config.get('network') ) return class_(max_size=self.num_participants) def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) def create_node(self, participant, network): """Create a node for a participant.""" return dlgr.nodes.Agent(network=network, participant=participant)
Use compat for unicode import
## Code Before: """Coordination chatroom game.""" import dallinger as dlgr from dallinger.config import get_config try: unicode = unicode except NameError: # Python 3 unicode = str config = get_config() def extra_settings(): config.register('network', unicode) config.register('n', int) class CoordinationChatroom(dlgr.experiments.Experiment): """Define the structure of the experiment.""" def __init__(self, session): """Initialize the experiment.""" super(CoordinationChatroom, self).__init__(session) self.experiment_repeats = 1 self.num_participants = config.get('n') self.initial_recruitment_size = self.num_participants self.quorum = self.num_participants self.config = config if not self.config.ready: self.config.load_config() self.setup() def create_network(self): """Create a new network by reading the configuration file.""" class_ = getattr( dlgr.networks, self.config.get('network') ) return class_(max_size=self.num_participants) def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) def create_node(self, participant, network): """Create a node for a participant.""" return dlgr.nodes.Agent(network=network, participant=participant) ## Instruction: Use compat for unicode import ## Code After: """Coordination chatroom game.""" import dallinger as dlgr from dallinger.compat import unicode from dallinger.config import get_config config = get_config() def extra_settings(): config.register('network', unicode) config.register('n', int) class CoordinationChatroom(dlgr.experiments.Experiment): """Define the structure of the experiment.""" def __init__(self, session): """Initialize the experiment.""" super(CoordinationChatroom, self).__init__(session) self.experiment_repeats = 1 self.num_participants = config.get('n') self.initial_recruitment_size = self.num_participants self.quorum = self.num_participants self.config = config if not self.config.ready: self.config.load_config() self.setup() def create_network(self): """Create a new network by reading the configuration file.""" class_ = getattr( dlgr.networks, self.config.get('network') ) return class_(max_size=self.num_participants) def info_post_request(self, node, info): """Run when a request to create an info is complete.""" for agent in node.neighbors(): node.transmit(what=info, to_whom=agent) def create_node(self, participant, network): """Create a node for a participant.""" return dlgr.nodes.Agent(network=network, participant=participant)
8033b00ebbcb8e294f47ee558e76ee260ec18d2b
orglog-config.py
orglog-config.py
org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman", "libfreetype2"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
Remove libfreetype2, which should have been omitted and was breaking the scripts
Remove libfreetype2, which should have been omitted and was breaking the scripts
Python
mit
servo/servo-org-stats,servo/servo-org-stats,servo/servo-org-stats
org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", - "libhubbub", "libparserutils", "libwapcaplet", "pixman"] + "libhubbub", "libparserutils", "libwapcaplet", "pixman", + "libfreetype2"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
Remove libfreetype2, which should have been omitted and was breaking the scripts
## Code Before: org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True ## Instruction: Remove libfreetype2, which should have been omitted and was breaking the scripts ## Code After: org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman", "libfreetype2"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
ead9192b4c2acb21df917dfe116785343e9a59a6
scripts/patches/transfer.py
scripts/patches/transfer.py
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, ]
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", }, { "op": "replace", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", "value": "String", }, ]
Fix spec issue with Transfer::Server ProtocolDetails
Fix spec issue with Transfer::Server ProtocolDetails
Python
bsd-2-clause
cloudtools/troposphere,cloudtools/troposphere
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, + { + "op": "move", + "from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType", + "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", + }, + { + "op": "replace", + "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", + "value": "String", + }, ]
Fix spec issue with Transfer::Server ProtocolDetails
## Code Before: patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, ] ## Instruction: Fix spec issue with Transfer::Server ProtocolDetails ## Code After: patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", }, { "op": "replace", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", "value": "String", }, ]
4fe19797ba2fb12239ae73da60bb3e726b23ffe9
web/forms.py
web/forms.py
from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",)
from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",)
Fix bug in admin user editing
Fix bug in admin user editing Fixes KeyError when creating or editing a UniqueEmailUser in the admin interface.
Python
mit
uppercounty/uppercounty,uppercounty/uppercounty,uppercounty/uppercounty
from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ - - def __init__(self, *args, **kargs): - super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs) - del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ - def __init__(self, *args, **kargs): - super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs) - del self.fields['username'] - class Meta: model = UniqueEmailUser fields = ("email",)
Fix bug in admin user editing
## Code Before: from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",) ## Instruction: Fix bug in admin user editing ## Code After: from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",)