commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
51
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
3ca46f1407d8984ca5cbd1eb0581765386533d71
observatory/rcos/tests/test_rcos.py
observatory/rcos/tests/test_rcos.py
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
Add / to rcos tests
rcos: Add / to rcos tests
Python
isc
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( + "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
Add / to rcos tests
## Code Before: import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301] ## Instruction: Add / to rcos tests ## Code After: import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
5e3c6d6ab892a87ca27c05c01b39646bd339b3f2
tests/test_event.py
tests/test_event.py
import unittest from event import Event class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): called = False def listener(): nonlocal called called = True event = Event() event.connect(listener) event.fire() self.assertTrue(called) def test_a_listener_is_passed_correct_parameters(self): params = () def listener(*args, **kwargs): nonlocal params params = (args, kwargs) event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), params)
import unittest from event import Event class Mock: def __init__(self): self.called = False self.params = () def __call__(self, *args, **kwargs): self.called = True self.params = (args, kwargs) class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): listener = Mock() event = Event() event.connect(listener) event.fire() self.assertTrue(listener.called) def test_a_listener_is_passed_correct_parameters(self): listener = Mock() event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), listener.params)
Refactor a lightweight Mock class.
Refactor a lightweight Mock class.
Python
mit
bsmukasa/stock_alerter
import unittest from event import Event + class Mock: + def __init__(self): + self.called = False + self.params = () + + def __call__(self, *args, **kwargs): + self.called = True + self.params = (args, kwargs) + + class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): - called = False - - def listener(): + listener = Mock() - nonlocal called - called = True - event = Event() event.connect(listener) event.fire() - self.assertTrue(called) + self.assertTrue(listener.called) def test_a_listener_is_passed_correct_parameters(self): + listener = Mock() - params = () - - def listener(*args, **kwargs): - nonlocal params - params = (args, kwargs) - event = Event() event.connect(listener) event.fire(5, shape="square") - self.assertEquals(((5, ), {"shape": "square"}), params) + self.assertEquals(((5, ), {"shape": "square"}), listener.params)
Refactor a lightweight Mock class.
## Code Before: import unittest from event import Event class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): called = False def listener(): nonlocal called called = True event = Event() event.connect(listener) event.fire() self.assertTrue(called) def test_a_listener_is_passed_correct_parameters(self): params = () def listener(*args, **kwargs): nonlocal params params = (args, kwargs) event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), params) ## Instruction: Refactor a lightweight Mock class. ## Code After: import unittest from event import Event class Mock: def __init__(self): self.called = False self.params = () def __call__(self, *args, **kwargs): self.called = True self.params = (args, kwargs) class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): listener = Mock() event = Event() event.connect(listener) event.fire() self.assertTrue(listener.called) def test_a_listener_is_passed_correct_parameters(self): listener = Mock() event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), listener.params)
291923f4ad1fc0041284a73d6edad43e6047fafc
workspace/commands/status.py
workspace/commands/status.py
from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1 or len(scm_repos) == 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
Fix bug to display all branches when there is only 1 repo
Fix bug to display all branches when there is only 1 repo
Python
mit
maxzheng/workspace-tools
from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] - if len(child_branches) > 1: + if len(child_branches) > 1 or len(scm_repos) == 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
Fix bug to display all branches when there is only 1 repo
## Code Before: from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait() ## Instruction: Fix bug to display all branches when there is only 1 repo ## Code After: from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1 or len(scm_repos) == 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
b18bdf11141cf47319eed9ba2b861ebc287cf5ff
pyqs/utils.py
pyqs/utils.py
import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) try: return json.loads(message) except ValueError: pass return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
Add fallback for loading json encoded celery messages
Add fallback for loading json encoded celery messages
Python
mit
spulec/PyQS
import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) + try: + return json.loads(message) + except ValueError: + pass return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
Add fallback for loading json encoded celery messages
## Code Before: import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name) ## Instruction: Add fallback for loading json encoded celery messages ## Code After: import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) try: return json.loads(message) except ValueError: pass return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
77b87f5657583a5418d57f712b52bbcd6e9421aa
puzzle.py
puzzle.py
class Puzzle: def get_all_exits(self, graph): exits = [] for key, value in graph.items(): for item in value: if 'Exit' in item: exits += item return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
class Puzzle: def get_all_exits(self, graph): exits = [] for root_node, connected_nodes in graph.items(): for node in connected_nodes: if 'Exit' in node: exits += node return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
Rename vars in get_all_exits to make it more clear
Rename vars in get_all_exits to make it more clear
Python
mit
aaronshaver/graph-unique-paths
class Puzzle: def get_all_exits(self, graph): exits = [] - for key, value in graph.items(): - for item in value: + for root_node, connected_nodes in graph.items(): + for node in connected_nodes: - if 'Exit' in item: + if 'Exit' in node: - exits += item + exits += node return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
Rename vars in get_all_exits to make it more clear
## Code Before: class Puzzle: def get_all_exits(self, graph): exits = [] for key, value in graph.items(): for item in value: if 'Exit' in item: exits += item return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths ## Instruction: Rename vars in get_all_exits to make it more clear ## Code After: class Puzzle: def get_all_exits(self, graph): exits = [] for root_node, connected_nodes in graph.items(): for node in connected_nodes: if 'Exit' in node: exits += node return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
f7e2bcf941e2a15a3bc28ebf3f15244df6f0d758
posts/versatileimagefield.py
posts/versatileimagefield.py
from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10, image.size[1] - 10 - fontsize), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
Use custom font for watermark
Use custom font for watermark Signed-off-by: Michal Čihař <a2df1e659c9fd2578de0a26565357cb273292eeb@cihar.com>
Python
agpl-3.0
nijel/photoblog,nijel/photoblog
+ import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) + height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( - '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', + os.path.join( + os.path.dirname(os.path.dirname(__file__)), + 'font', 'conthrax-sb.ttf' + ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( - (10, image.size[1] - 10 - fontsize), + (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
Use custom font for watermark
## Code Before: from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10, image.size[1] - 10 - fontsize), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark) ## Instruction: Use custom font for watermark ## Code After: import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
01e62119750d0737e396358dbf45727dcbb5732f
tests/__init__.py
tests/__init__.py
import sys import unittest def main(): if sys.version_info[0] >= 3: from unittest.main import main main(module=None) else: unittest.main() if __name__ == '__main__': main()
from unittest.main import main if __name__ == '__main__': main(module=None, verbosity=2)
Drop Python 2 support in tests
Drop Python 2 support in tests
Python
bsd-3-clause
retext-project/pymarkups,mitya57/pymarkups
- import sys - import unittest - - def main(): - if sys.version_info[0] >= 3: - from unittest.main import main + from unittest.main import main - main(module=None) - else: - unittest.main() if __name__ == '__main__': - main() + main(module=None, verbosity=2)
Drop Python 2 support in tests
## Code Before: import sys import unittest def main(): if sys.version_info[0] >= 3: from unittest.main import main main(module=None) else: unittest.main() if __name__ == '__main__': main() ## Instruction: Drop Python 2 support in tests ## Code After: from unittest.main import main if __name__ == '__main__': main(module=None, verbosity=2)
2b74c8714b659ccf5faa615e9b5c4c4559f8d9c8
artbot_website/views.py
artbot_website/views.py
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start') return render(request, 'index.html', {'events': events})
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start') return render(request, 'index.html', {'events': events})
Index now only displays published articles.
Index now only displays published articles.
Python
mit
coreymcdermott/artbot,coreymcdermott/artbot
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) - events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start') + events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start') return render(request, 'index.html', {'events': events})
Index now only displays published articles.
## Code Before: from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start') return render(request, 'index.html', {'events': events}) ## Instruction: Index now only displays published articles. ## Code After: from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start') return render(request, 'index.html', {'events': events})
285eeb1c7565f8fa9fb6ba38ed843601f81cdf4e
tmc/models/document_topic.py
tmc/models/document_topic.py
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
[IMP] Order document topics by name
Python
agpl-3.0
tmcrosario/odoo-tmc
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' + _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
## Code Before: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id ## Instruction: Order document topics by name ## Code After: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
ee9f1058107f675f7f12f822ead3feb78ec10d9b
wagtail/utils/urlpatterns.py
wagtail/utils/urlpatterns.py
from __future__ import absolute_import, unicode_literals from functools import update_wrapper def decorate_urlpatterns(urlpatterns, decorator): for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): decorate_urlpatterns(pattern.url_patterns, decorator) if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
from __future__ import absolute_import, unicode_literals from functools import update_wrapper from django import VERSION as DJANGO_VERSION def decorate_urlpatterns(urlpatterns, decorator): """Decorate all the views in the passed urlpatterns list with the given decorator""" for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): # this is an included RegexURLResolver; recursively decorate the views # contained in it decorate_urlpatterns(pattern.url_patterns, decorator) if DJANGO_VERSION < (1, 10): # Prior to Django 1.10, RegexURLPattern accepted both strings and callables as # the callback parameter; `callback` is a property that consistently returns it as # a callable. # # * if RegexURLPattern was given a string, _callback will be None, and will be # populated on the first call to the `callback` property # * if RegexURLPattern was given a callable, _callback will be set to that callable, # and the `callback` property will return it # # In either case, we wrap the result of `callback` and write it back to `_callback`, # so that future calls to `callback` will return our wrapped version. if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) else: # In Django 1.10 and above, RegexURLPattern only accepts a callable as the callback # parameter; this is directly accessible as the `callback` attribute. if getattr(pattern, 'callback', None): pattern.callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
Test for RegexURLPattern.callback on Django 1.10
Test for RegexURLPattern.callback on Django 1.10 Thanks Paul J Stevens for the initial patch, Tim Graham for review and Matt Westcott for tweak of initial patch
Python
bsd-3-clause
nealtodd/wagtail,torchbox/wagtail,nutztherookie/wagtail,nealtodd/wagtail,kurtw/wagtail,mixxorz/wagtail,rsalmaso/wagtail,kurtw/wagtail,jnns/wagtail,kurtw/wagtail,nutztherookie/wagtail,wagtail/wagtail,Toshakins/wagtail,mixxorz/wagtail,gasman/wagtail,iansprice/wagtail,rsalmaso/wagtail,thenewguy/wagtail,kaedroho/wagtail,thenewguy/wagtail,mixxorz/wagtail,gasman/wagtail,kurtrwall/wagtail,nealtodd/wagtail,chrxr/wagtail,mikedingjan/wagtail,mikedingjan/wagtail,timorieber/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,FlipperPA/wagtail,takeflight/wagtail,nilnvoid/wagtail,wagtail/wagtail,rsalmaso/wagtail,chrxr/wagtail,torchbox/wagtail,Toshakins/wagtail,wagtail/wagtail,FlipperPA/wagtail,Toshakins/wagtail,takeflight/wagtail,iansprice/wagtail,chrxr/wagtail,jnns/wagtail,kurtrwall/wagtail,gasman/wagtail,takeflight/wagtail,mixxorz/wagtail,Toshakins/wagtail,wagtail/wagtail,mikedingjan/wagtail,thenewguy/wagtail,nimasmi/wagtail,iansprice/wagtail,thenewguy/wagtail,jnns/wagtail,kurtw/wagtail,nilnvoid/wagtail,zerolab/wagtail,thenewguy/wagtail,nimasmi/wagtail,nutztherookie/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,kaedroho/wagtail,mixxorz/wagtail,kaedroho/wagtail,zerolab/wagtail,chrxr/wagtail,timorieber/wagtail,nimasmi/wagtail,torchbox/wagtail,kaedroho/wagtail,iansprice/wagtail,nutztherookie/wagtail,jnns/wagtail,FlipperPA/wagtail,rsalmaso/wagtail,kaedroho/wagtail,wagtail/wagtail,takeflight/wagtail,gasman/wagtail,timorieber/wagtail,zerolab/wagtail,kurtrwall/wagtail,nealtodd/wagtail,zerolab/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,gasman/wagtail,torchbox/wagtail,zerolab/wagtail,nimasmi/wagtail
from __future__ import absolute_import, unicode_literals - from functools import update_wrapper + from django import VERSION as DJANGO_VERSION def decorate_urlpatterns(urlpatterns, decorator): + """Decorate all the views in the passed urlpatterns list with the given decorator""" for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): + # this is an included RegexURLResolver; recursively decorate the views + # contained in it decorate_urlpatterns(pattern.url_patterns, decorator) + if DJANGO_VERSION < (1, 10): + # Prior to Django 1.10, RegexURLPattern accepted both strings and callables as + # the callback parameter; `callback` is a property that consistently returns it as + # a callable. + # + # * if RegexURLPattern was given a string, _callback will be None, and will be + # populated on the first call to the `callback` property + # * if RegexURLPattern was given a callable, _callback will be set to that callable, + # and the `callback` property will return it + # + # In either case, we wrap the result of `callback` and write it back to `_callback`, + # so that future calls to `callback` will return our wrapped version. + - if hasattr(pattern, '_callback'): + if hasattr(pattern, '_callback'): - pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) + pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) + else: + # In Django 1.10 and above, RegexURLPattern only accepts a callable as the callback + # parameter; this is directly accessible as the `callback` attribute. + if getattr(pattern, 'callback', None): + pattern.callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
Test for RegexURLPattern.callback on Django 1.10
## Code Before: from __future__ import absolute_import, unicode_literals from functools import update_wrapper def decorate_urlpatterns(urlpatterns, decorator): for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): decorate_urlpatterns(pattern.url_patterns, decorator) if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns ## Instruction: Test for RegexURLPattern.callback on Django 1.10 ## Code After: from __future__ import absolute_import, unicode_literals from functools import update_wrapper from django import VERSION as DJANGO_VERSION def decorate_urlpatterns(urlpatterns, decorator): """Decorate all the views in the passed urlpatterns list with the given decorator""" for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): # this is an included RegexURLResolver; recursively decorate the views # contained in it decorate_urlpatterns(pattern.url_patterns, decorator) if DJANGO_VERSION < (1, 10): # Prior to Django 1.10, RegexURLPattern accepted both strings and callables as # the callback parameter; `callback` is a property that consistently returns it as # a callable. # # * if RegexURLPattern was given a string, _callback will be None, and will be # populated on the first call to the `callback` property # * if RegexURLPattern was given a callable, _callback will be set to that callable, # and the `callback` property will return it # # In either case, we wrap the result of `callback` and write it back to `_callback`, # so that future calls to `callback` will return our wrapped version. if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) else: # In Django 1.10 and above, RegexURLPattern only accepts a callable as the callback # parameter; this is directly accessible as the `callback` attribute. if getattr(pattern, 'callback', None): pattern.callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
1e8ecd09ce6dc44c4955f8bb2f81aa65232ad9a0
multi_schema/management/commands/loaddata.py
multi_schema/management/commands/loaddata.py
from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate()
from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate() for schema in Schema.objects.all(): schema.create_schema()
Fix indenting. Create any schemas that were just loaded.
Fix indenting. Create any schemas that were just loaded.
Python
bsd-3-clause
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() - super(Command, self).handle(*app_labels, **options) + super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate() + + + for schema in Schema.objects.all(): + schema.create_schema()
Fix indenting. Create any schemas that were just loaded.
## Code Before: from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate() ## Instruction: Fix indenting. Create any schemas that were just loaded. ## Code After: from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate() for schema in Schema.objects.all(): schema.create_schema()
c5d22fd143f952ce5e0c86b9e8bce4a06fe47063
bigsi/storage/__init__.py
bigsi/storage/__init__.py
from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage from bigsi.storage.rocksdb import RocksDBStorage def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
from bigsi.storage.redis import RedisStorage try: from bigsi.storage.berkeleydb import BerkeleyDBStorage except ModuleNotFoundError: pass try: from bigsi.storage.rocksdb import RocksDBStorage except ModuleNotFoundError: pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
Allow import without optional requirements
Allow import without optional requirements
Python
mit
Phelimb/cbg,Phelimb/cbg,Phelimb/cbg,Phelimb/cbg
- from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage + + try: + from bigsi.storage.berkeleydb import BerkeleyDBStorage + except ModuleNotFoundError: + pass + try: - from bigsi.storage.rocksdb import RocksDBStorage + from bigsi.storage.rocksdb import RocksDBStorage + except ModuleNotFoundError: + pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
Allow import without optional requirements
## Code Before: from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage from bigsi.storage.rocksdb import RocksDBStorage def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"]) ## Instruction: Allow import without optional requirements ## Code After: from bigsi.storage.redis import RedisStorage try: from bigsi.storage.berkeleydb import BerkeleyDBStorage except ModuleNotFoundError: pass try: from bigsi.storage.rocksdb import RocksDBStorage except ModuleNotFoundError: pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
33505f9b4dfeead0b01ee1b8cf3f8f228476e866
openpassword/crypt_utils.py
openpassword/crypt_utils.py
from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] print(data) cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
Remove print statement from crypto utils...
Remove print statement from crypto utils...
Python
mit
openpassword/blimey,openpassword/blimey
from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] - print(data) cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
Remove print statement from crypto utils...
## Code Before: from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] print(data) cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data) ## Instruction: Remove print statement from crypto utils... ## Code After: from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
f2fc7f1015fc24fdbb69069ac74a21437e94657b
xmantissa/plugins/sineoff.py
xmantissa/plugins/sineoff.py
from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), loginInterfaces=(), themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
Revert 5505 - introduced numerous regressions into the test suite
Revert 5505 - introduced numerous regressions into the test suite
Python
mit
habnabit/divmod-sine,twisted/sine
from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), - loginInterfaces=(), + themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
Revert 5505 - introduced numerous regressions into the test suite
## Code Before: from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), loginInterfaces=(), themes = (sinetheme.XHTMLDirectoryTheme('base'),) ) ## Instruction: Revert 5505 - introduced numerous regressions into the test suite ## Code After: from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
8bfd49c7aef03f6d2ad541f466e9661b6acc5262
staticassets/compilers/sass.py
staticassets/compilers/sass.py
from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True} command = 'sass' params = ['--trace'] def compile(self, asset): if self.compass: self.params.append('--compass') if '.scss' in asset.attributes.extensions: self.params.append('--scss') return super(SassCompiler, self).compile(asset)
from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True, 'scss': False} command = 'sass' params = ['--trace'] def get_args(self): args = super(SassCompiler, self).get_args() if self.compass: args.append('--compass') if self.scss: args.append('--scss') return args
Fix args being appended continuously to SassCompiler
Fix args being appended continuously to SassCompiler
Python
mit
davidelias/django-staticassets,davidelias/django-staticassets,davidelias/django-staticassets
from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' - options = {'compass': True} + options = {'compass': True, 'scss': False} command = 'sass' params = ['--trace'] - def compile(self, asset): + def get_args(self): + args = super(SassCompiler, self).get_args() if self.compass: - self.params.append('--compass') + args.append('--compass') - if '.scss' in asset.attributes.extensions: + if self.scss: - self.params.append('--scss') + args.append('--scss') - return super(SassCompiler, self).compile(asset) + return args
Fix args being appended continuously to SassCompiler
## Code Before: from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True} command = 'sass' params = ['--trace'] def compile(self, asset): if self.compass: self.params.append('--compass') if '.scss' in asset.attributes.extensions: self.params.append('--scss') return super(SassCompiler, self).compile(asset) ## Instruction: Fix args being appended continuously to SassCompiler ## Code After: from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True, 'scss': False} command = 'sass' params = ['--trace'] def get_args(self): args = super(SassCompiler, self).get_args() if self.compass: args.append('--compass') if self.scss: args.append('--scss') return args
f522a464e3f58a9f2ed235b48382c9db15f66029
eva/layers/residual_block.py
eva/layers/residual_block.py
from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(Merge(mode='sum')([model, block])) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(merge([model, block], mode='sum')) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
Use the functional merge; just for formatting
Use the functional merge; just for formatting
Python
apache-2.0
israelg99/eva
from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) - return PReLU()(Merge(mode='sum')([model, block])) + return PReLU()(merge([model, block], mode='sum')) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
Use the functional merge; just for formatting
## Code Before: from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(Merge(mode='sum')([model, block])) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model ## Instruction: Use the functional merge; just for formatting ## Code After: from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(merge([model, block], mode='sum')) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
8a7a8c3589b1e3bd3a4d8b0dc832178be26117d3
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) src_partners = ordered_partners[:-1] src_partners.mapped("membership_line_ids")._close(force=True) return super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks )
from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) dst_partner = ordered_partners[-1] src_partners = ordered_partners[:-1] # since we close the membership we need to keep an instance for the security for p in src_partners: p.force_int_instance_id = p.int_instance_id dst_force_int_instance_id = dst_partner.force_int_instance_id src_partners.mapped("membership_line_ids")._close(force=True) res = super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) # do not modify the force_int_instance_id since it should be empty if # there is a membership_line_id dst_partner.force_int_instance_id = dst_force_int_instance_id return res
Fix the security for the merge after closing memberships
Fix the security for the merge after closing memberships
Python
agpl-3.0
mozaik-association/mozaik,mozaik-association/mozaik
from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) + dst_partner = ordered_partners[-1] src_partners = ordered_partners[:-1] + + # since we close the membership we need to keep an instance for the security + for p in src_partners: + p.force_int_instance_id = p.int_instance_id + dst_force_int_instance_id = dst_partner.force_int_instance_id src_partners.mapped("membership_line_ids")._close(force=True) - return super(BasePartnerMergeAutomaticWizard, self)._merge( + res = super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) + # do not modify the force_int_instance_id since it should be empty if + # there is a membership_line_id + dst_partner.force_int_instance_id = dst_force_int_instance_id + return res +
Fix the security for the merge after closing memberships
## Code Before: from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) src_partners = ordered_partners[:-1] src_partners.mapped("membership_line_ids")._close(force=True) return super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) ## Instruction: Fix the security for the merge after closing memberships ## Code After: from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) dst_partner = ordered_partners[-1] src_partners = ordered_partners[:-1] # since we close the membership we need to keep an instance for the security for p in src_partners: p.force_int_instance_id = p.int_instance_id dst_force_int_instance_id = dst_partner.force_int_instance_id src_partners.mapped("membership_line_ids")._close(force=True) res = super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) # do not modify the force_int_instance_id since it should be empty if # there is a membership_line_id dst_partner.force_int_instance_id = dst_force_int_instance_id return res
0e36a49d6a53f87cbe71fd5ec9dce524dd638122
fireplace/deck.py
fireplace/deck.py
import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero, name=None): super().__init__(cards) self.hero = hero if name is None: name = "Custom %s" % (hero) self.name = name for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __str__(self): return self.name def __repr__(self): return "<%s (%i cards)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero): super().__init__(cards) self.hero = hero for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __repr__(self): return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
Drop support for naming Deck objects
Drop support for naming Deck objects
Python
agpl-3.0
smallnamespace/fireplace,Meerkov/fireplace,amw2104/fireplace,Ragowit/fireplace,beheh/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,smallnamespace/fireplace,jleclanche/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,Meerkov/fireplace,liujimj/fireplace
import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) - def __init__(self, cards, hero, name=None): + def __init__(self, cards, hero): super().__init__(cards) self.hero = hero - if name is None: - name = "Custom %s" % (hero) - self.name = name for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK - def __str__(self): - return self.name - def __repr__(self): - return "<%s (%i cards)>" % (self.hero, len(self)) + return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
Drop support for naming Deck objects
## Code Before: import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero, name=None): super().__init__(cards) self.hero = hero if name is None: name = "Custom %s" % (hero) self.name = name for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __str__(self): return self.name def __repr__(self): return "<%s (%i cards)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self) ## Instruction: Drop support for naming Deck objects ## Code After: import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero): super().__init__(cards) self.hero = hero for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __repr__(self): return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
82756e5314c2768bb3acf03cf542929d23b73f82
bot/logger/message_sender/synchronized.py
bot/logger/message_sender/synchronized.py
import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) self.lock = threading.Lock() def send(self, text): with self.lock: self.sender.send(text)
import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) # Using a reentrant lock to play safe in case the send function somewhat invokes this send function again # maybe because a send triggers another send on the same message sender. # Note that if this send throws an exception the lock is released when dealing with it from outside, # so this is not a problem. # But if the exception is handled inside this send call, the lock is still hold. self.lock = threading.RLock() def send(self, text): with self.lock: self.sender.send(text)
Use reentrant lock on SynchronizedMessageSender
Use reentrant lock on SynchronizedMessageSender
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) + # Using a reentrant lock to play safe in case the send function somewhat invokes this send function again + # maybe because a send triggers another send on the same message sender. + # Note that if this send throws an exception the lock is released when dealing with it from outside, + # so this is not a problem. + # But if the exception is handled inside this send call, the lock is still hold. - self.lock = threading.Lock() + self.lock = threading.RLock() def send(self, text): with self.lock: self.sender.send(text)
Use reentrant lock on SynchronizedMessageSender
## Code Before: import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) self.lock = threading.Lock() def send(self, text): with self.lock: self.sender.send(text) ## Instruction: Use reentrant lock on SynchronizedMessageSender ## Code After: import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) # Using a reentrant lock to play safe in case the send function somewhat invokes this send function again # maybe because a send triggers another send on the same message sender. # Note that if this send throws an exception the lock is released when dealing with it from outside, # so this is not a problem. # But if the exception is handled inside this send call, the lock is still hold. self.lock = threading.RLock() def send(self, text): with self.lock: self.sender.send(text)
721703801654af88e8b5064d1bc65569ce1555cf
thumbnails/engines/__init__.py
thumbnails/engines/__init__.py
def get_current_engine(): return None
from thumbnails.engines.pillow import PillowEngine def get_current_engine(): return PillowEngine()
Set pillow engine as default
Set pillow engine as default
Python
mit
python-thumbnails/python-thumbnails,relekang/python-thumbnails
+ from thumbnails.engines.pillow import PillowEngine def get_current_engine(): - return None + return PillowEngine()
Set pillow engine as default
## Code Before: def get_current_engine(): return None ## Instruction: Set pillow engine as default ## Code After: from thumbnails.engines.pillow import PillowEngine def get_current_engine(): return PillowEngine()
c814fe264c93dfa09276474960aa83cdb26e7754
polyaxon/api/searches/serializers.py
polyaxon/api/searches/serializers.py
from rest_framework import serializers from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer): class Meta: model = Search fields = ['id', 'name', 'query', 'meta']
from rest_framework import serializers from rest_framework.exceptions import ValidationError from api.utils.serializers.names import NamesMixin from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer, NamesMixin): class Meta: model = Search fields = ['id', 'name', 'query', 'meta'] def create(self, validated_data): validated_data = self.validated_name(validated_data, project=validated_data['project'], query=Search.all) try: return super().create(validated_data) except Exception as e: raise ValidationError(e)
Add graceful handling for creating search with similar names
Add graceful handling for creating search with similar names
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
from rest_framework import serializers + from rest_framework.exceptions import ValidationError + from api.utils.serializers.names import NamesMixin from db.models.searches import Search - class SearchSerializer(serializers.ModelSerializer): + class SearchSerializer(serializers.ModelSerializer, NamesMixin): - class Meta: model = Search fields = ['id', 'name', 'query', 'meta'] + def create(self, validated_data): + validated_data = self.validated_name(validated_data, + project=validated_data['project'], + query=Search.all) + try: + return super().create(validated_data) + except Exception as e: + raise ValidationError(e) +
Add graceful handling for creating search with similar names
## Code Before: from rest_framework import serializers from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer): class Meta: model = Search fields = ['id', 'name', 'query', 'meta'] ## Instruction: Add graceful handling for creating search with similar names ## Code After: from rest_framework import serializers from rest_framework.exceptions import ValidationError from api.utils.serializers.names import NamesMixin from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer, NamesMixin): class Meta: model = Search fields = ['id', 'name', 'query', 'meta'] def create(self, validated_data): validated_data = self.validated_name(validated_data, project=validated_data['project'], query=Search.all) try: return super().create(validated_data) except Exception as e: raise ValidationError(e)
d90f249e0865dab0cc9a224f413ea90df8a648ed
srsly/util.py
srsly/util.py
from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
Fix typing for JSONInput and JSONInputBin.
Fix typing for JSONInput and JSONInputBin.
Python
mit
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. - JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] + JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] - JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] + JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
Fix typing for JSONInput and JSONInputBin.
## Code Before: from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location) ## Instruction: Fix typing for JSONInput and JSONInputBin. ## Code After: from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
20e096ac5261cb7fd4197f6cdeb8b171753c82a7
landlab/values/tests/conftest.py
landlab/values/tests/conftest.py
import pytest from landlab import NetworkModelGrid, RasterModelGrid @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg
import pytest from landlab import NetworkModelGrid, RasterModelGrid from landlab.values.synthetic import _STATUS @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg def pytest_generate_tests(metafunc): if "at" in metafunc.fixturenames: metafunc.parametrize("at", ("node", "link", "patch", "corner", "face", "cell")) if "node_bc" in metafunc.fixturenames: metafunc.parametrize("node_bc", list(_STATUS["node"].keys())) if "link_bc" in metafunc.fixturenames: metafunc.parametrize("link_bc", list(_STATUS["link"].keys()))
Add parametrized fixture for at, node_bc, link_bc.
Add parametrized fixture for at, node_bc, link_bc.
Python
mit
landlab/landlab,cmshobe/landlab,landlab/landlab,cmshobe/landlab,amandersillinois/landlab,landlab/landlab,amandersillinois/landlab,cmshobe/landlab
import pytest from landlab import NetworkModelGrid, RasterModelGrid + from landlab.values.synthetic import _STATUS @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg + + def pytest_generate_tests(metafunc): + if "at" in metafunc.fixturenames: + metafunc.parametrize("at", ("node", "link", "patch", "corner", "face", "cell")) + if "node_bc" in metafunc.fixturenames: + metafunc.parametrize("node_bc", list(_STATUS["node"].keys())) + if "link_bc" in metafunc.fixturenames: + metafunc.parametrize("link_bc", list(_STATUS["link"].keys())) +
Add parametrized fixture for at, node_bc, link_bc.
## Code Before: import pytest from landlab import NetworkModelGrid, RasterModelGrid @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg ## Instruction: Add parametrized fixture for at, node_bc, link_bc. ## Code After: import pytest from landlab import NetworkModelGrid, RasterModelGrid from landlab.values.synthetic import _STATUS @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg def pytest_generate_tests(metafunc): if "at" in metafunc.fixturenames: metafunc.parametrize("at", ("node", "link", "patch", "corner", "face", "cell")) if "node_bc" in metafunc.fixturenames: metafunc.parametrize("node_bc", list(_STATUS["node"].keys())) if "link_bc" in metafunc.fixturenames: metafunc.parametrize("link_bc", list(_STATUS["link"].keys()))
bcde8104bd77f18d7061f7f4d4831ad49644a913
common/management/commands/build_index.py
common/management/commands/build_index.py
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) self.stdout.write("Finished indexing")
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): if model.__name__.lower() != 'testmodel': all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) else: # relation "common_testmodel" does not exist # Will be fixed pass self.stdout.write("Finished indexing")
Check the model beig indexed
Check the model beig indexed
Python
mit
urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,urandu/mfl_api
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): + if model.__name__.lower() != 'testmodel': - all_instances = model.objects.all()[0:3] \ + all_instances = model.objects.all()[0:3] \ - if options.get('test') else model.objects.all() + if options.get('test') else model.objects.all() - [index_instance(obj) for obj in all_instances] + [index_instance(obj) for obj in all_instances] - message = "Indexed {} {}".format( + message = "Indexed {} {}".format( - all_instances.count(), + all_instances.count(), - model._meta.verbose_name_plural.capitalize()) + model._meta.verbose_name_plural.capitalize()) - self.stdout.write(message) + self.stdout.write(message) + else: + # relation "common_testmodel" does not exist + # Will be fixed + pass self.stdout.write("Finished indexing")
Check the model beig indexed
## Code Before: from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) self.stdout.write("Finished indexing") ## Instruction: Check the model beig indexed ## Code After: from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): if model.__name__.lower() != 'testmodel': all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) else: # relation "common_testmodel" does not exist # Will be fixed pass self.stdout.write("Finished indexing")
ccb1759a205a4cdc8f5eb2c28adcf49503221135
ecpy/tasks/api.py
ecpy/tasks/api.py
from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig from .manager.utils.building import build_task_from_config with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView', 'build_task_from_config']
Add tasks/build_from_config to the public API.
Add tasks/build_from_config to the public API.
Python
bsd-3-clause
Ecpy/ecpy,Ecpy/ecpy
from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig + from .manager.utils.building import build_task_from_config + with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', - 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView'] + 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView', + 'build_task_from_config']
Add tasks/build_from_config to the public API.
## Code Before: from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView'] ## Instruction: Add tasks/build_from_config to the public API. ## Code After: from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig from .manager.utils.building import build_task_from_config with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView', 'build_task_from_config']
810a43c859264e3d5e1af8b43888bf89c06bee1d
ipybind/stream.py
ipybind/stream.py
import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def suppress(): if fcntl: with Forwarder(handler=lambda _: None): yield else: yield @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
Remove suppress() as it's no longer required
Remove suppress() as it's no longer required
Python
mit
aldanor/ipybind,aldanor/ipybind,aldanor/ipybind
import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager - def suppress(): - if fcntl: - with Forwarder(handler=lambda _: None): - yield - else: - yield - - - @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
Remove suppress() as it's no longer required
## Code Before: import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def suppress(): if fcntl: with Forwarder(handler=lambda _: None): yield else: yield @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None ## Instruction: Remove suppress() as it's no longer required ## Code After: import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
db19dfa17261c3d04de0202b2809ba8abb70326b
tests/unit/test_moxstubout.py
tests/unit/test_moxstubout.py
from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2)
from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) f._clear_cleanups()
Fix build break with Fixtures 1.3
Fix build break with Fixtures 1.3 Our explicit call to cleanUp messes things up in latest fixture, so we need to call _clear_cleanups to stop the test from breaking Change-Id: I8ce2309a94736b47fb347f37ab4027857e19c8a8
Python
apache-2.0
openstack/oslotest,openstack/oslotest
from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) + f._clear_cleanups()
Fix build break with Fixtures 1.3
## Code Before: from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) ## Instruction: Fix build break with Fixtures 1.3 ## Code After: from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) f._clear_cleanups()
5ac84c4e9d8d68b7e89ebf344d2c93a5f7ef4c4c
notebooks/galapagos_to_pandas.py
notebooks/galapagos_to_pandas.py
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK']) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None, bands='RUGIZYJHK'): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands]) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
Allow specification of GALAPAGOS bands
Allow specification of GALAPAGOS bands
Python
mit
MegaMorph/megamorph-analysis
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', - out_filename=None): + out_filename=None, bands='RUGIZYJHK'): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: - new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK']) + new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands]) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
Allow specification of GALAPAGOS bands
## Code Before: def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK']) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename) ## Instruction: Allow specification of GALAPAGOS bands ## Code After: def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None, bands='RUGIZYJHK'): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands]) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
3136f7e37b339252d4c1f5642974e180070c452d
kirppu/signals.py
kirppu/signals.py
from django.db.models.signals import pre_save, pre_delete from django.dispatch import receiver @receiver(pre_save) def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") @receiver(pre_delete) def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
from django.db.models.signals import pre_migrate, post_migrate from django.dispatch import receiver ENABLE_CHECK = True @receiver(pre_migrate) def pre_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = False @receiver(post_migrate) def post_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = True def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
Allow migrations to be run on extra databases.
Allow migrations to be run on extra databases. - Remove duplicate registration of save and delete signals. Already registered in apps.
Python
mit
jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu
- from django.db.models.signals import pre_save, pre_delete + from django.db.models.signals import pre_migrate, post_migrate from django.dispatch import receiver + ENABLE_CHECK = True + - @receiver(pre_save) + @receiver(pre_migrate) + def pre_migrate_handler(*args, **kwargs): + global ENABLE_CHECK + ENABLE_CHECK = False + + + @receiver(post_migrate) + def post_migrate_handler(*args, **kwargs): + global ENABLE_CHECK + ENABLE_CHECK = True + + def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember - if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": + if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") - @receiver(pre_delete) def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember - if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": + if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
Allow migrations to be run on extra databases.
## Code Before: from django.db.models.signals import pre_save, pre_delete from django.dispatch import receiver @receiver(pre_save) def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") @receiver(pre_delete) def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen") ## Instruction: Allow migrations to be run on extra databases. ## Code After: from django.db.models.signals import pre_migrate, post_migrate from django.dispatch import receiver ENABLE_CHECK = True @receiver(pre_migrate) def pre_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = False @receiver(post_migrate) def post_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = True def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
a5baa5f333625244c1e0935745dadedb7df444c3
setup.py
setup.py
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests', "catchy==0.1.0"], )
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests>=1,<2', "catchy>=0.1.0,<0.2"], )
Update install_requires to be more accurate
Update install_requires to be more accurate
Python
bsd-2-clause
mwilliamson/whack
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], - install_requires=['blah>=0.1.10,<0.2', 'requests', "catchy==0.1.0"], + install_requires=['blah>=0.1.10,<0.2', 'requests>=1,<2', "catchy>=0.1.0,<0.2"], )
Update install_requires to be more accurate
## Code Before: import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests', "catchy==0.1.0"], ) ## Instruction: Update install_requires to be more accurate ## Code After: import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests>=1,<2', "catchy>=0.1.0,<0.2"], )
fc6042cf57752ca139c52889ec5e00c02b618d0d
setup.py
setup.py
from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay'], version='0.1.0', author='webpay', author_email='administrators@webpay.jp', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay', 'webpay.api', 'webpay.model'], version='0.1.0', author='webpay', author_email='administrators@webpay.jp', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
Add api and model to packages
Add api and model to packages
Python
mit
yamaneko1212/webpay-python
from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', - packages=['webpay'], + packages=['webpay', 'webpay.api', 'webpay.model'], version='0.1.0', author='webpay', author_email='administrators@webpay.jp', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
Add api and model to packages
## Code Before: from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay'], version='0.1.0', author='webpay', author_email='administrators@webpay.jp', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] ) ## Instruction: Add api and model to packages ## Code After: from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay', 'webpay.api', 'webpay.model'], version='0.1.0', author='webpay', author_email='administrators@webpay.jp', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
a4cacaba81dda523fb6e24f8a4382a334cc549a8
textinator.py
textinator.py
from PIL import Image from os import get_terminal_size default_palette = list('░▒▓█') print(get_terminal_size()) def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette=default_palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] def convert_image(image_path): original = Image.open(image_path) width, height = original.size thumb = original.copy() thumb.thumbnail(get_terminal_size()) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): line += value_to_char(bw.getpixel((x, y))) print(line) bw.show() if __name__ == '__main__': convert_image('doge.jpg')
import click from PIL import Image def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] @click.command() @click.argument('image', type=click.File('rb')) @click.argument('out', type=click.File('r'), default='-', required=False) @click.option('-p', '--palette', default='█▓▒░ ', help="A custom palette for rendering images. Goes from dark to bright.") @click.option('-w', '--width', type=click.INT, help="Width of output. If height is not given, the image will be proportionally scaled.") @click.option('-h', '--height', type=click.INT, help="Height of output. If width is not given, the image will be proportionally scaled.") def convert(image, palette, out, width, height): """ Converts an input image to a text representation. Writes to stdout by default. Optionally takes another file as a second output. Supported filetypes: anything PIL supports. For JPEG etc., install the prerequisites. """ original = Image.open(image) width, height = original.size thumb = original.copy() thumb.thumbnail(80) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): pixel = bw.getpixel((x, y)) line += value_to_char(pixel, palette) click.echo(line)
Add commandline interface with Click.
Add commandline interface with Click.
Python
mit
ijks/textinator
+ import click from PIL import Image - from os import get_terminal_size - - default_palette = list('░▒▓█') - - print(get_terminal_size()) def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] - def value_to_char(value, palette=default_palette, value_range=(0, 256)): + def value_to_char(value, palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] - def convert_image(image_path): + @click.command() + @click.argument('image', type=click.File('rb')) + @click.argument('out', type=click.File('r'), default='-', + required=False) + @click.option('-p', '--palette', default='█▓▒░ ', + help="A custom palette for rendering images. Goes from dark to bright.") + @click.option('-w', '--width', type=click.INT, + help="Width of output. If height is not given, the image will be proportionally scaled.") + @click.option('-h', '--height', type=click.INT, + help="Height of output. If width is not given, the image will be proportionally scaled.") + def convert(image, palette, out, width, height): + """ + Converts an input image to a text representation. + Writes to stdout by default. Optionally takes another file as a second output. + Supported filetypes: anything PIL supports. For JPEG etc., install the prerequisites. + """ + - original = Image.open(image_path) + original = Image.open(image) width, height = original.size thumb = original.copy() - thumb.thumbnail(get_terminal_size()) + thumb.thumbnail(80) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): + pixel = bw.getpixel((x, y)) - line += value_to_char(bw.getpixel((x, y))) + line += value_to_char(pixel, palette) - print(line) + click.echo(line) - bw.show() - - if __name__ == '__main__': - convert_image('doge.jpg')
Add commandline interface with Click.
## Code Before: from PIL import Image from os import get_terminal_size default_palette = list('░▒▓█') print(get_terminal_size()) def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette=default_palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] def convert_image(image_path): original = Image.open(image_path) width, height = original.size thumb = original.copy() thumb.thumbnail(get_terminal_size()) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): line += value_to_char(bw.getpixel((x, y))) print(line) bw.show() if __name__ == '__main__': convert_image('doge.jpg') ## Instruction: Add commandline interface with Click. ## Code After: import click from PIL import Image def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] @click.command() @click.argument('image', type=click.File('rb')) @click.argument('out', type=click.File('r'), default='-', required=False) @click.option('-p', '--palette', default='█▓▒░ ', help="A custom palette for rendering images. Goes from dark to bright.") @click.option('-w', '--width', type=click.INT, help="Width of output. If height is not given, the image will be proportionally scaled.") @click.option('-h', '--height', type=click.INT, help="Height of output. If width is not given, the image will be proportionally scaled.") def convert(image, palette, out, width, height): """ Converts an input image to a text representation. Writes to stdout by default. Optionally takes another file as a second output. Supported filetypes: anything PIL supports. For JPEG etc., install the prerequisites. """ original = Image.open(image) width, height = original.size thumb = original.copy() thumb.thumbnail(80) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): pixel = bw.getpixel((x, y)) line += value_to_char(pixel, palette) click.echo(line)
1e68f5f1fd565a812ef3fdf10c4c40649e3ef398
foundation/organisation/search_indexes.py
foundation/organisation/search_indexes.py
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) mailinglist = indexes.CharField(model_attr='mailinglist') homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') def get_model(self): return NetworkGroup
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
Fix references to old model fields
organisation: Fix references to old model fields
Python
mit
okfn/foundation,okfn/foundation,okfn/foundation,okfn/website,MjAbuz/foundation,okfn/website,okfn/foundation,okfn/website,okfn/website,MjAbuz/foundation,MjAbuz/foundation,MjAbuz/foundation
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) - mailinglist = indexes.CharField(model_attr='mailinglist') - homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') + homepage_url = indexes.CharField(model_attr='homepage_url') + mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
Fix references to old model fields
## Code Before: from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) mailinglist = indexes.CharField(model_attr='mailinglist') homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') def get_model(self): return NetworkGroup ## Instruction: Fix references to old model fields ## Code After: from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
0ee59d04cb2cbe93a3f4f87a34725fbcd1a66fc0
core/Reader.py
core/Reader.py
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 current_position = -min_position ar_index = list() element = deque(stream.read(length)) while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) if len(element) == length: while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index else: stream.close() raise ValueError("Not enough characters to parse : " + str(len(element)))
Add not enough characters condition
Add not enough characters condition
Python
mit
JCH222/matriochkas
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() - stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 + stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) + if len(element) == length: + while True: + result = parsing_pipeline.check(element, ref_position=-min_position) + if result is not None and result[0]: + ar_index.append((current_position, element[-min_position])) + next_character = stream.read(1) + current_position += 1 + if next_character and result is not None: + element.popleft() + element.append(next_character) + else: + break + stream.close() + return ar_index - while True: - result = parsing_pipeline.check(element, ref_position=-min_position) - if result is not None and result[0]: - ar_index.append((current_position, element[-min_position])) - next_character = stream.read(1) - current_position += 1 - if next_character and result is not None: - element.popleft() - element.append(next_character) - else: + else: - break + stream.close() + raise ValueError("Not enough characters to parse : " + str(len(element))) - stream.close() - return ar_index -
Add not enough characters condition
## Code Before: from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 current_position = -min_position ar_index = list() element = deque(stream.read(length)) while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index ## Instruction: Add not enough characters condition ## Code After: from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) if len(element) == length: while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index else: stream.close() raise ValueError("Not enough characters to parse : " + str(len(element)))
6d32f609379febe2fdad690adc75a90e26b8d416
backend/backend/serializers.py
backend/backend/serializers.py
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') def validate_father(self, father): if (father.gender != Animal.MALE): raise serializers.ValidationError('The father has to be male.') def validate_mother(self, mother): if (mother.gender != Animal.FEMALE): raise serializers.ValidationError('The mother has to be female.') def validate_dob(self, dob): father_id = self.context['request'].data['father'] if (father_id): father = Animal.objects.get(pk = father_id) if (father and father.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s father') mother_id = self.context['request'].data['mother'] if (mother_id): mother = Animal.objects.get(pk = mother_id) if (mother and mother.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
Python
apache-2.0
mmlado/animal_pairing,mmlado/animal_pairing
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal - fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') + fields = ('id', 'name', 'dob', 'gender', + 'active', 'own', 'father', 'mother') + + def validate_father(self, father): + if (father.gender != Animal.MALE): + raise serializers.ValidationError('The father has to be male.') + + def validate_mother(self, mother): + if (mother.gender != Animal.FEMALE): + raise serializers.ValidationError('The mother has to be female.') + + def validate_dob(self, dob): + father_id = self.context['request'].data['father'] + if (father_id): + father = Animal.objects.get(pk = father_id) + if (father and father.dob > dob): + raise serializers.ValidationError('Animal can\'t be older than it\'s father') + + mother_id = self.context['request'].data['mother'] + if (mother_id): + mother = Animal.objects.get(pk = mother_id) + if (mother and mother.dob > dob): + raise serializers.ValidationError('Animal can\'t be older than it\'s mother') +
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
## Code Before: from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') ## Instruction: Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents. ## Code After: from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') def validate_father(self, father): if (father.gender != Animal.MALE): raise serializers.ValidationError('The father has to be male.') def validate_mother(self, mother): if (mother.gender != Animal.FEMALE): raise serializers.ValidationError('The mother has to be female.') def validate_dob(self, dob): father_id = self.context['request'].data['father'] if (father_id): father = Animal.objects.get(pk = father_id) if (father and father.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s father') mother_id = self.context['request'].data['mother'] if (mother_id): mother = Animal.objects.get(pk = mother_id) if (mother and mother.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
f2cd1d531a1cefdc5da4b418c866be0d76aa349b
basil_common/str_support.py
basil_common/str_support.py
def as_int(value): try: return int(value) except ValueError: return None
def as_int(value): try: return int(value) except ValueError: return None def urljoin(*parts): url = parts[0] for p in parts[1:]: if url[-1] != '/': url += '/' url += p return url
Add url join which serves our needs
Add url join which serves our needs Existing functions in common libraries add extra slashes.
Python
apache-2.0
eve-basil/common
def as_int(value): try: return int(value) except ValueError: return None + + def urljoin(*parts): + url = parts[0] + for p in parts[1:]: + if url[-1] != '/': + url += '/' + url += p + return url +
Add url join which serves our needs
## Code Before: def as_int(value): try: return int(value) except ValueError: return None ## Instruction: Add url join which serves our needs ## Code After: def as_int(value): try: return int(value) except ValueError: return None def urljoin(*parts): url = parts[0] for p in parts[1:]: if url[-1] != '/': url += '/' url += p return url
a40c617ea605bd667a9906f6c9400fc9562d7c0a
salt/daemons/flo/reactor.py
salt/daemons/flo/reactor.py
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,))
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,))
Add event return fork behavior
Add event return fork behavior
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' Start the reactor! ''' # Import salt libs import salt.utils.reactor + import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) + + @ioflo.base.deeding.deedify( + 'SaltRaetEventReturnFork', + ioinit={ + 'opts': '.salt.opts', + 'proc_mgr': '.salt.usr.proc_mgr'}) + def event_return_fork(self): + ''' + Add a reactor object to the process manager + ''' + self.proc_mgr.add_process( + salt.utils.event.EventReturn, + args=(self.opts.value,)) +
Add event return fork behavior
## Code Before: ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) ## Instruction: Add event return fork behavior ## Code After: ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,))
14e9bda5de10ef5a1c6dd96692d083f4e0f16025
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
import yaml from yaml import SafeLoader yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
Refactor PyYAML tests a bit
Python: Refactor PyYAML tests a bit
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
import yaml - from yaml import SafeLoader + # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput - yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput - yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML - yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML - - yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput + # Safe + yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + + # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
Refactor PyYAML tests a bit
## Code Before: import yaml from yaml import SafeLoader yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput ## Instruction: Refactor PyYAML tests a bit ## Code After: import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
0997055c591d7bd4ad4334874292f8977ba778bf
cashew/exceptions.py
cashew/exceptions.py
class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.message = plugin_instance_or_alias else: self.message = plugin_instance_or_alias.alias class NoPlugin(UserFeedback): pass
class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.alias = plugin_instance_or_alias else: self.alias = plugin_instance_or_alias.alias def __str__(self): return "%s is inactive. Some additional software might need to be installed." % (self.alias) class NoPlugin(UserFeedback): pass
Improve error message when alias not available.
Improve error message when alias not available.
Python
mit
dexy/cashew
class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): - self.message = plugin_instance_or_alias + self.alias = plugin_instance_or_alias else: - self.message = plugin_instance_or_alias.alias + self.alias = plugin_instance_or_alias.alias + + def __str__(self): + return "%s is inactive. Some additional software might need to be installed." % (self.alias) class NoPlugin(UserFeedback): pass
Improve error message when alias not available.
## Code Before: class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.message = plugin_instance_or_alias else: self.message = plugin_instance_or_alias.alias class NoPlugin(UserFeedback): pass ## Instruction: Improve error message when alias not available. ## Code After: class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.alias = plugin_instance_or_alias else: self.alias = plugin_instance_or_alias.alias def __str__(self): return "%s is inactive. Some additional software might need to be installed." % (self.alias) class NoPlugin(UserFeedback): pass
2d82280460c50d50f6be8d8c8405506b4706cd8a
securethenews/blog/tests.py
securethenews/blog/tests.py
from django.test import TestCase # Create your tests here.
import datetime from django.test import TestCase from wagtail.wagtailcore.models import Page from .models import BlogIndexPage, BlogPost class BlogTest(TestCase): def setUp(self): home_page = Page.objects.get(slug='home') blog_index_page = BlogIndexPage( title='Blog', slug='blog', show_in_menus=True ) home_page.add_child(instance=blog_index_page) blog_posts = [ BlogPost(title='First Blog Post', slug='first-blog-post', date=datetime.date.today(), byline='Author'), BlogPost(title='Second Blog Post', slug='second-blog-post', date=datetime.date.today(), byline='Author') ] for blog_post in blog_posts: blog_index_page.add_child(instance=blog_post) def test_ordering_of_same_day_blogs_on_index(self): """Verify that blog posts posted on the same day are ordered with the most recent at the top of the page.""" blog_index = BlogIndexPage.objects.first() self.assertEqual(blog_index.posts[0].title, 'Second Blog Post') self.assertEqual(blog_index.posts[1].title, 'First Blog Post')
Add unit test to verify that blog posts are ordered by most recent
Add unit test to verify that blog posts are ordered by most recent Verifies that blog posts are ordered by most recent first even if the blog posts are posted on the same day.
Python
agpl-3.0
freedomofpress/securethenews,freedomofpress/securethenews,freedomofpress/securethenews,freedomofpress/securethenews
+ import datetime + from django.test import TestCase - # Create your tests here. + from wagtail.wagtailcore.models import Page + from .models import BlogIndexPage, BlogPost + + + class BlogTest(TestCase): + def setUp(self): + home_page = Page.objects.get(slug='home') + + blog_index_page = BlogIndexPage( + title='Blog', + slug='blog', + show_in_menus=True + ) + home_page.add_child(instance=blog_index_page) + + blog_posts = [ + BlogPost(title='First Blog Post', + slug='first-blog-post', + date=datetime.date.today(), + byline='Author'), + BlogPost(title='Second Blog Post', + slug='second-blog-post', + date=datetime.date.today(), + byline='Author') + ] + + for blog_post in blog_posts: + blog_index_page.add_child(instance=blog_post) + + + def test_ordering_of_same_day_blogs_on_index(self): + """Verify that blog posts posted on the same day are ordered with the most recent at the top of the page.""" + + blog_index = BlogIndexPage.objects.first() + + self.assertEqual(blog_index.posts[0].title, 'Second Blog Post') + self.assertEqual(blog_index.posts[1].title, 'First Blog Post') +
Add unit test to verify that blog posts are ordered by most recent
## Code Before: from django.test import TestCase # Create your tests here. ## Instruction: Add unit test to verify that blog posts are ordered by most recent ## Code After: import datetime from django.test import TestCase from wagtail.wagtailcore.models import Page from .models import BlogIndexPage, BlogPost class BlogTest(TestCase): def setUp(self): home_page = Page.objects.get(slug='home') blog_index_page = BlogIndexPage( title='Blog', slug='blog', show_in_menus=True ) home_page.add_child(instance=blog_index_page) blog_posts = [ BlogPost(title='First Blog Post', slug='first-blog-post', date=datetime.date.today(), byline='Author'), BlogPost(title='Second Blog Post', slug='second-blog-post', date=datetime.date.today(), byline='Author') ] for blog_post in blog_posts: blog_index_page.add_child(instance=blog_post) def test_ordering_of_same_day_blogs_on_index(self): """Verify that blog posts posted on the same day are ordered with the most recent at the top of the page.""" blog_index = BlogIndexPage.objects.first() self.assertEqual(blog_index.posts[0].title, 'Second Blog Post') self.assertEqual(blog_index.posts[1].title, 'First Blog Post')
42221c7b858951376ba59385fa42cac11d542fdd
plugin/script/sphinxexampleae.py
plugin/script/sphinxexampleae.py
def process( node_name, handle ): handle.editorTemplate( beginScrollLayout=True ) handle.editorTemplate( beginLayout="Float Attributes" ) handle.editorTemplate( "floatAttr", addControl=True ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( addExtraControls=True ) handle.editorTemplate( endScrollLayout=True ) handle.editorTemplate( suppress="caching" ) handle.editorTemplate( suppress="nodeState" ) def ae_template( node_name ): from maya import cmds maya_handle = MayaHandle( cmds ) process( node_name, maya_handle )
string_attr_help = """ This is the *annotation* for the stringAttr attribute """ def process( node_name, handle ): handle.editorTemplate( beginScrollLayout=True ) handle.editorTemplate( beginLayout="Float Attributes" ) handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( beginLayout="String Attributes" ) handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( addExtraControls=True ) handle.editorTemplate( endScrollLayout=True ) handle.editorTemplate( suppress="caching" ) handle.editorTemplate( suppress="nodeState" ) def ae_template( node_name ): from maya import cmds maya_handle = MayaHandle( cmds ) process( node_name, maya_handle )
Add another attribute and some annotations
Add another attribute and some annotations We write the annotations in rst for the moment.
Python
bsd-3-clause
michaeljones/sphinx-maya-node
+ + string_attr_help = """ + This is the *annotation* for the stringAttr attribute + """ def process( node_name, handle ): handle.editorTemplate( beginScrollLayout=True ) handle.editorTemplate( beginLayout="Float Attributes" ) - handle.editorTemplate( "floatAttr", addControl=True ) + handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help ) + handle.editorTemplate( endLayout=True ) + + handle.editorTemplate( beginLayout="String Attributes" ) + handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( addExtraControls=True ) handle.editorTemplate( endScrollLayout=True ) handle.editorTemplate( suppress="caching" ) handle.editorTemplate( suppress="nodeState" ) def ae_template( node_name ): from maya import cmds maya_handle = MayaHandle( cmds ) process( node_name, maya_handle )
Add another attribute and some annotations
## Code Before: def process( node_name, handle ): handle.editorTemplate( beginScrollLayout=True ) handle.editorTemplate( beginLayout="Float Attributes" ) handle.editorTemplate( "floatAttr", addControl=True ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( addExtraControls=True ) handle.editorTemplate( endScrollLayout=True ) handle.editorTemplate( suppress="caching" ) handle.editorTemplate( suppress="nodeState" ) def ae_template( node_name ): from maya import cmds maya_handle = MayaHandle( cmds ) process( node_name, maya_handle ) ## Instruction: Add another attribute and some annotations ## Code After: string_attr_help = """ This is the *annotation* for the stringAttr attribute """ def process( node_name, handle ): handle.editorTemplate( beginScrollLayout=True ) handle.editorTemplate( beginLayout="Float Attributes" ) handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( beginLayout="String Attributes" ) handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help ) handle.editorTemplate( endLayout=True ) handle.editorTemplate( addExtraControls=True ) handle.editorTemplate( endScrollLayout=True ) handle.editorTemplate( suppress="caching" ) handle.editorTemplate( suppress="nodeState" ) def ae_template( node_name ): from maya import cmds maya_handle = MayaHandle( cmds ) process( node_name, maya_handle )
59030daa60a4d2006cae6192219071e2a8017364
test/conftest.py
test/conftest.py
from os.path import join, dirname, abspath default_base_dir = join(dirname(abspath(__file__)), 'completion') import run def pytest_addoption(parser): parser.addoption( "--base-dir", default=default_base_dir, help="Directory in which integration test case files locate.") parser.addoption( "--thirdparty", help="Include integration tests that requires third party modules.") def pytest_generate_tests(metafunc): """ :type metafunc: _pytest.python.Metafunc """ if 'case' in metafunc.fixturenames: base_dir = metafunc.config.option.base_dir test_files = {} thirdparty = metafunc.config.option.thirdparty metafunc.parametrize( 'case', run.collect_dir_tests(base_dir, test_files, thirdparty))
from os.path import join, dirname, abspath default_base_dir = join(dirname(abspath(__file__)), 'completion') import run def pytest_addoption(parser): parser.addoption( "--base-dir", default=default_base_dir, help="Directory in which integration test case files locate.") parser.addoption( "--test-files", "-T", default=[], action='append', help=( "Specify test files using FILE_NAME[:LINE[,LINE[,...]]]. " "For example: -T generators.py:10,13,19. " "Note that you can use -m to specify the test case by id.")) parser.addoption( "--thirdparty", help="Include integration tests that requires third party modules.") def parse_test_files_option(opt): """ Parse option passed to --test-files into a key-value pair. >>> parse_test_files_option('generators.py:10,13,19') ('generators.py', [10, 13, 19]) """ opt = str(opt) if ':' in opt: (f_name, rest) = opt.split(':', 1) return (f_name, list(map(int, rest.split(',')))) else: return (opt, []) def pytest_generate_tests(metafunc): """ :type metafunc: _pytest.python.Metafunc """ if 'case' in metafunc.fixturenames: base_dir = metafunc.config.option.base_dir test_files = dict(map(parse_test_files_option, metafunc.config.option.test_files)) thirdparty = metafunc.config.option.thirdparty metafunc.parametrize( 'case', run.collect_dir_tests(base_dir, test_files, thirdparty))
Add --test-files option to py.test
Add --test-files option to py.test At this point, py.test should be equivalent to test/run.py
Python
mit
tjwei/jedi,jonashaag/jedi,mfussenegger/jedi,jonashaag/jedi,dwillmer/jedi,WoLpH/jedi,tjwei/jedi,mfussenegger/jedi,dwillmer/jedi,flurischt/jedi,WoLpH/jedi,flurischt/jedi
from os.path import join, dirname, abspath default_base_dir = join(dirname(abspath(__file__)), 'completion') import run def pytest_addoption(parser): parser.addoption( "--base-dir", default=default_base_dir, help="Directory in which integration test case files locate.") parser.addoption( + "--test-files", "-T", default=[], action='append', + help=( + "Specify test files using FILE_NAME[:LINE[,LINE[,...]]]. " + "For example: -T generators.py:10,13,19. " + "Note that you can use -m to specify the test case by id.")) + parser.addoption( "--thirdparty", help="Include integration tests that requires third party modules.") + + + def parse_test_files_option(opt): + """ + Parse option passed to --test-files into a key-value pair. + + >>> parse_test_files_option('generators.py:10,13,19') + ('generators.py', [10, 13, 19]) + """ + opt = str(opt) + if ':' in opt: + (f_name, rest) = opt.split(':', 1) + return (f_name, list(map(int, rest.split(',')))) + else: + return (opt, []) def pytest_generate_tests(metafunc): """ :type metafunc: _pytest.python.Metafunc """ if 'case' in metafunc.fixturenames: base_dir = metafunc.config.option.base_dir - test_files = {} + test_files = dict(map(parse_test_files_option, + metafunc.config.option.test_files)) thirdparty = metafunc.config.option.thirdparty metafunc.parametrize( 'case', run.collect_dir_tests(base_dir, test_files, thirdparty))
Add --test-files option to py.test
## Code Before: from os.path import join, dirname, abspath default_base_dir = join(dirname(abspath(__file__)), 'completion') import run def pytest_addoption(parser): parser.addoption( "--base-dir", default=default_base_dir, help="Directory in which integration test case files locate.") parser.addoption( "--thirdparty", help="Include integration tests that requires third party modules.") def pytest_generate_tests(metafunc): """ :type metafunc: _pytest.python.Metafunc """ if 'case' in metafunc.fixturenames: base_dir = metafunc.config.option.base_dir test_files = {} thirdparty = metafunc.config.option.thirdparty metafunc.parametrize( 'case', run.collect_dir_tests(base_dir, test_files, thirdparty)) ## Instruction: Add --test-files option to py.test ## Code After: from os.path import join, dirname, abspath default_base_dir = join(dirname(abspath(__file__)), 'completion') import run def pytest_addoption(parser): parser.addoption( "--base-dir", default=default_base_dir, help="Directory in which integration test case files locate.") parser.addoption( "--test-files", "-T", default=[], action='append', help=( "Specify test files using FILE_NAME[:LINE[,LINE[,...]]]. " "For example: -T generators.py:10,13,19. " "Note that you can use -m to specify the test case by id.")) parser.addoption( "--thirdparty", help="Include integration tests that requires third party modules.") def parse_test_files_option(opt): """ Parse option passed to --test-files into a key-value pair. >>> parse_test_files_option('generators.py:10,13,19') ('generators.py', [10, 13, 19]) """ opt = str(opt) if ':' in opt: (f_name, rest) = opt.split(':', 1) return (f_name, list(map(int, rest.split(',')))) else: return (opt, []) def pytest_generate_tests(metafunc): """ :type metafunc: _pytest.python.Metafunc """ if 'case' in metafunc.fixturenames: base_dir = metafunc.config.option.base_dir test_files = dict(map(parse_test_files_option, metafunc.config.option.test_files)) thirdparty = metafunc.config.option.thirdparty metafunc.parametrize( 'case', run.collect_dir_tests(base_dir, test_files, thirdparty))
990008a6fb2788d25445ee9ec21375515527bdc8
nodeconductor/backup/utils.py
nodeconductor/backup/utils.py
import pkg_resources from django.utils import six from django.utils.lru_cache import lru_cache @lru_cache() def get_backup_strategies(): entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {}) strategies = dict((name.upper(), entry_point.load()) for name, entry_point in entry_points.iteritems()) return strategies def has_object_backup_strategy(obj): strategies = get_backup_strategies() return obj.__class__.__name__.upper() in strategies def get_object_backup_strategy(obj): strategies = get_backup_strategies() return strategies[obj.__class__.__name__.upper()] def get_backupable_models(): strategies = get_backup_strategies() return [strategy.get_model() for strategy in six.itervalues(strategies)]
import pkg_resources from django.utils import six from django.utils.lru_cache import lru_cache @lru_cache() def get_backup_strategies(): entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {}) strategies = {name.upper(): entry_point.load() for name, entry_point in six.iteritems(entry_points)} return strategies def has_object_backup_strategy(obj): strategies = get_backup_strategies() return obj.__class__.__name__.upper() in strategies def get_object_backup_strategy(obj): strategies = get_backup_strategies() return strategies[obj.__class__.__name__.upper()] def get_backupable_models(): strategies = get_backup_strategies() return [strategy.get_model() for strategy in six.itervalues(strategies)]
Use new comprehension syntax and six (nc-263)
Use new comprehension syntax and six (nc-263)
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
import pkg_resources from django.utils import six from django.utils.lru_cache import lru_cache @lru_cache() def get_backup_strategies(): entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {}) - strategies = dict((name.upper(), entry_point.load()) for name, entry_point in entry_points.iteritems()) + strategies = {name.upper(): entry_point.load() for name, entry_point in six.iteritems(entry_points)} return strategies def has_object_backup_strategy(obj): strategies = get_backup_strategies() return obj.__class__.__name__.upper() in strategies def get_object_backup_strategy(obj): strategies = get_backup_strategies() return strategies[obj.__class__.__name__.upper()] def get_backupable_models(): strategies = get_backup_strategies() return [strategy.get_model() for strategy in six.itervalues(strategies)]
Use new comprehension syntax and six (nc-263)
## Code Before: import pkg_resources from django.utils import six from django.utils.lru_cache import lru_cache @lru_cache() def get_backup_strategies(): entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {}) strategies = dict((name.upper(), entry_point.load()) for name, entry_point in entry_points.iteritems()) return strategies def has_object_backup_strategy(obj): strategies = get_backup_strategies() return obj.__class__.__name__.upper() in strategies def get_object_backup_strategy(obj): strategies = get_backup_strategies() return strategies[obj.__class__.__name__.upper()] def get_backupable_models(): strategies = get_backup_strategies() return [strategy.get_model() for strategy in six.itervalues(strategies)] ## Instruction: Use new comprehension syntax and six (nc-263) ## Code After: import pkg_resources from django.utils import six from django.utils.lru_cache import lru_cache @lru_cache() def get_backup_strategies(): entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {}) strategies = {name.upper(): entry_point.load() for name, entry_point in six.iteritems(entry_points)} return strategies def has_object_backup_strategy(obj): strategies = get_backup_strategies() return obj.__class__.__name__.upper() in strategies def get_object_backup_strategy(obj): strategies = get_backup_strategies() return strategies[obj.__class__.__name__.upper()] def get_backupable_models(): strategies = get_backup_strategies() return [strategy.get_model() for strategy in six.itervalues(strategies)]
385d7a5734e91217e9d8c0464327dedb30a69621
profile_python.py
profile_python.py
import cProfile as profile import pstats import cairo from DrawTurksHead import TurksHead stats_filename = "profiling/profile_python.stats" img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400) ctx = cairo.Context(img) ctx.set_source_rgb(1, 1, 0xBF / 255.) ctx.paint() ctx.translate(1600, 1200) ctx.scale(1, -1) profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename) img.write_to_png("profiling/reference.png") p = pstats.Stats(stats_filename) p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
import cProfile as profile import pstats import cairo from DrawTurksHead import TurksHead stats_filename = "/tmp/profile.stats" img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400) ctx = cairo.Context(img) ctx.set_source_rgb(1, 1, 0xBF / 255.) ctx.paint() ctx.translate(1600, 1200) ctx.scale(1, -1) profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename) img.write_to_png("profiling/reference.png") p = pstats.Stats(stats_filename) p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
Change name of stats file
Change name of stats file
Python
mit
jacquev6/DrawTurksHead,jacquev6/DrawTurksHead,jacquev6/DrawTurksHead
import cProfile as profile import pstats import cairo from DrawTurksHead import TurksHead - stats_filename = "profiling/profile_python.stats" + stats_filename = "/tmp/profile.stats" img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400) ctx = cairo.Context(img) ctx.set_source_rgb(1, 1, 0xBF / 255.) ctx.paint() ctx.translate(1600, 1200) ctx.scale(1, -1) profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename) img.write_to_png("profiling/reference.png") p = pstats.Stats(stats_filename) p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
Change name of stats file
## Code Before: import cProfile as profile import pstats import cairo from DrawTurksHead import TurksHead stats_filename = "profiling/profile_python.stats" img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400) ctx = cairo.Context(img) ctx.set_source_rgb(1, 1, 0xBF / 255.) ctx.paint() ctx.translate(1600, 1200) ctx.scale(1, -1) profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename) img.write_to_png("profiling/reference.png") p = pstats.Stats(stats_filename) p.strip_dirs().sort_stats("cumtime").print_stats().print_callees() ## Instruction: Change name of stats file ## Code After: import cProfile as profile import pstats import cairo from DrawTurksHead import TurksHead stats_filename = "/tmp/profile.stats" img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400) ctx = cairo.Context(img) ctx.set_source_rgb(1, 1, 0xBF / 255.) ctx.paint() ctx.translate(1600, 1200) ctx.scale(1, -1) profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename) img.write_to_png("profiling/reference.png") p = pstats.Stats(stats_filename) p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
dcd36fab023ac2530cbfa17449e3ce8f61ad6bdc
ssl-cert-parse.py
ssl-cert-parse.py
import datetime import ssl import OpenSSL def GetCert(SiteName, Port): return ssl.get_server_certificate((SiteName, Port)) def ParseCert(CertRaw): Cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, CertRaw) print(str(Cert.get_subject())[18:-2]) print(datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], '%Y%m%d%H%M%SZ')) print(datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], '%Y%m%d%H%M%SZ')) print(str(Cert.get_issuer())[18:-2]) CertRaw = GetCert('some.domain.tld', 443) print(CertRaw) ParseCert(CertRaw)
import datetime import ssl import OpenSSL def GetCert(SiteName, Port): return ssl.get_server_certificate((SiteName, Port)) def ParseCert(CertRaw): Cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, CertRaw) CertSubject = str(Cert.get_subject())[18:-2] CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], '%Y%m%d%H%M%SZ') CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], '%Y%m%d%H%M%SZ') CertIssuer = str(Cert.get_issuer())[18:-2] return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate, 'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer} CertRaw = GetCert('some.domain.tld', 443) print(CertRaw) Out = ParseCert(CertRaw) print(Out) print(Out['CertSubject']) print(Out['CertStartDate'])
Fix ParseCert() function, add variables, add a return statement
Fix ParseCert() function, add variables, add a return statement
Python
apache-2.0
ivuk/ssl-cert-parse
import datetime import ssl import OpenSSL def GetCert(SiteName, Port): return ssl.get_server_certificate((SiteName, Port)) def ParseCert(CertRaw): Cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, CertRaw) + - print(str(Cert.get_subject())[18:-2]) + CertSubject = str(Cert.get_subject())[18:-2] - print(datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], + CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], - '%Y%m%d%H%M%SZ')) + '%Y%m%d%H%M%SZ') - print(datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], + CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], - '%Y%m%d%H%M%SZ')) + '%Y%m%d%H%M%SZ') - print(str(Cert.get_issuer())[18:-2]) + CertIssuer = str(Cert.get_issuer())[18:-2] + + return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate, + 'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer} CertRaw = GetCert('some.domain.tld', 443) + print(CertRaw) - ParseCert(CertRaw) + Out = ParseCert(CertRaw) + print(Out) + print(Out['CertSubject']) + print(Out['CertStartDate']) +
Fix ParseCert() function, add variables, add a return statement
## Code Before: import datetime import ssl import OpenSSL def GetCert(SiteName, Port): return ssl.get_server_certificate((SiteName, Port)) def ParseCert(CertRaw): Cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, CertRaw) print(str(Cert.get_subject())[18:-2]) print(datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], '%Y%m%d%H%M%SZ')) print(datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], '%Y%m%d%H%M%SZ')) print(str(Cert.get_issuer())[18:-2]) CertRaw = GetCert('some.domain.tld', 443) print(CertRaw) ParseCert(CertRaw) ## Instruction: Fix ParseCert() function, add variables, add a return statement ## Code After: import datetime import ssl import OpenSSL def GetCert(SiteName, Port): return ssl.get_server_certificate((SiteName, Port)) def ParseCert(CertRaw): Cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, CertRaw) CertSubject = str(Cert.get_subject())[18:-2] CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1], '%Y%m%d%H%M%SZ') CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1], '%Y%m%d%H%M%SZ') CertIssuer = str(Cert.get_issuer())[18:-2] return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate, 'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer} CertRaw = GetCert('some.domain.tld', 443) print(CertRaw) Out = ParseCert(CertRaw) print(Out) print(Out['CertSubject']) print(Out['CertStartDate'])
572dca82aab583e91e5b8402d1334bae55244d16
hs_tracking/middleware.py
hs_tracking/middleware.py
from .models import Session class Tracking(object): """The default tracking middleware logs all successful responses as a 'visit' variable with the URL path as its value.""" def process_response(self, request, response): if response.status_code == 200: session = Session.objects.for_request(request) session.record("visit", request.path) return response
from .models import Session class Tracking(object): """The default tracking middleware logs all successful responses as a 'visit' variable with the URL path as its value.""" def process_response(self, request, response): if request.path.startswith('/heartbeat/'): return response if response.status_code == 200: session = Session.objects.for_request(request) session.record("visit", request.path) return response
Disable use tracking of all heartbeat app urls.
Disable use tracking of all heartbeat app urls.
Python
bsd-3-clause
RENCI/xDCIShare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,FescueFungiShare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,hydroshare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,hydroshare/hydroshare
from .models import Session class Tracking(object): """The default tracking middleware logs all successful responses as a 'visit' variable with the URL path as its value.""" def process_response(self, request, response): + if request.path.startswith('/heartbeat/'): + return response + if response.status_code == 200: session = Session.objects.for_request(request) session.record("visit", request.path) return response
Disable use tracking of all heartbeat app urls.
## Code Before: from .models import Session class Tracking(object): """The default tracking middleware logs all successful responses as a 'visit' variable with the URL path as its value.""" def process_response(self, request, response): if response.status_code == 200: session = Session.objects.for_request(request) session.record("visit", request.path) return response ## Instruction: Disable use tracking of all heartbeat app urls. ## Code After: from .models import Session class Tracking(object): """The default tracking middleware logs all successful responses as a 'visit' variable with the URL path as its value.""" def process_response(self, request, response): if request.path.startswith('/heartbeat/'): return response if response.status_code == 200: session = Session.objects.for_request(request) session.record("visit", request.path) return response
93d3a2f19cfb3ef9ae62d04ce24901df81bafc3e
luigi/rfam/families_csv.py
luigi/rfam/families_csv.py
import attr import luigi from rfam import utils from rfam.csv_writer import CsvWriter class FamiliesCSV(CsvWriter): headers = [ 'id', 'name', 'description', 'clan', 'seed_count', 'full_count', 'length', 'domain', 'is_supressed', 'rna_type', ] def data(self): for family in utils.load_families(): data = attr.asdict(family) data['is_suppressed'] = int(family.is_suppressed) data['rna_type'] = family.guess_insdc() yield data if __name__ == '__main__': luigi.run(main_task_cls=FamiliesCSV)
import attr import luigi from rfam import utils from rfam.csv_writer import CsvWriter class FamiliesCSV(CsvWriter): headers = [ 'id', 'name', 'description', 'clan', 'seed_count', 'full_count', 'length', 'domain', 'is_suppressed', 'rna_type', ] def data(self): for family in utils.load_families(): data = attr.asdict(family) data['name'] = family.pretty_name data['is_suppressed'] = int(family.is_suppressed) data['rna_type'] = family.guess_insdc() yield data if __name__ == '__main__': luigi.run(main_task_cls=FamiliesCSV)
Fix typo and use correct name
Fix typo and use correct name We want to use the pretty name, not the standard one for import. In addition, I fix a typo in the name of the the is_suppressed column.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
import attr import luigi from rfam import utils from rfam.csv_writer import CsvWriter class FamiliesCSV(CsvWriter): headers = [ 'id', 'name', 'description', 'clan', 'seed_count', 'full_count', 'length', 'domain', - 'is_supressed', + 'is_suppressed', 'rna_type', ] def data(self): for family in utils.load_families(): data = attr.asdict(family) + data['name'] = family.pretty_name data['is_suppressed'] = int(family.is_suppressed) data['rna_type'] = family.guess_insdc() yield data if __name__ == '__main__': luigi.run(main_task_cls=FamiliesCSV)
Fix typo and use correct name
## Code Before: import attr import luigi from rfam import utils from rfam.csv_writer import CsvWriter class FamiliesCSV(CsvWriter): headers = [ 'id', 'name', 'description', 'clan', 'seed_count', 'full_count', 'length', 'domain', 'is_supressed', 'rna_type', ] def data(self): for family in utils.load_families(): data = attr.asdict(family) data['is_suppressed'] = int(family.is_suppressed) data['rna_type'] = family.guess_insdc() yield data if __name__ == '__main__': luigi.run(main_task_cls=FamiliesCSV) ## Instruction: Fix typo and use correct name ## Code After: import attr import luigi from rfam import utils from rfam.csv_writer import CsvWriter class FamiliesCSV(CsvWriter): headers = [ 'id', 'name', 'description', 'clan', 'seed_count', 'full_count', 'length', 'domain', 'is_suppressed', 'rna_type', ] def data(self): for family in utils.load_families(): data = attr.asdict(family) data['name'] = family.pretty_name data['is_suppressed'] = int(family.is_suppressed) data['rna_type'] = family.guess_insdc() yield data if __name__ == '__main__': luigi.run(main_task_cls=FamiliesCSV)
49155373b9eea3812c295c9d89c40a7c9c1c1c13
migrations/versions/20170214191843_pubmed_rename_identifiers_list_to_article_ids.py
migrations/versions/20170214191843_pubmed_rename_identifiers_list_to_article_ids.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from alembic import op # revision identifiers, used by Alembic. revision = '3dbb46f23ed7' down_revision = u'0087dc1eb534' branch_labels = None depends_on = None def upgrade(): op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids') def downgrade(): op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from alembic import op # revision identifiers, used by Alembic. revision = '3dbb46f23ed7' down_revision = u'b32475938a2d' branch_labels = None depends_on = None def upgrade(): op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids') def downgrade(): op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
Fix migrations to have a single path
Fix migrations to have a single path As it took us a while to merge some PRs, the migrations ended branching in two parts. This commit fixes to use a single path. It shouldn't cause any issues, as we're only messing with the `down` migrations and the migrations aren't dependent on each other.
Python
mit
opentrials/scraper,opentrials/collectors
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from alembic import op # revision identifiers, used by Alembic. revision = '3dbb46f23ed7' - down_revision = u'0087dc1eb534' + down_revision = u'b32475938a2d' branch_labels = None depends_on = None def upgrade(): op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids') def downgrade(): op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
Fix migrations to have a single path
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from alembic import op # revision identifiers, used by Alembic. revision = '3dbb46f23ed7' down_revision = u'0087dc1eb534' branch_labels = None depends_on = None def upgrade(): op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids') def downgrade(): op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list') ## Instruction: Fix migrations to have a single path ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from alembic import op # revision identifiers, used by Alembic. revision = '3dbb46f23ed7' down_revision = u'b32475938a2d' branch_labels = None depends_on = None def upgrade(): op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids') def downgrade(): op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
2cbffa60c0b12a268e0347a6a4ecfc6d5acb29e3
lamor_flexbe_states/src/lamor_flexbe_states/detect_person_state.py
lamor_flexbe_states/src/lamor_flexbe_states/detect_person_state.py
from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxySubscriberCached from geometry_msgs.msg import PoseStamped class DetectPersonState(EventState): ''' Detects the nearest person and provides their pose. -- wait_timeout float Time (seconds) to wait for a person before giving up. #> person_pose PoseStamped Pose of the nearest person if one is detected, else None. <= detected Detected a person. <= not_detected No person detected, but time ran out. ''' def __init__(self, wait_timeout): super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'] output_keys = ['person_pose']) self._wait_timeout = rospy.Duration(wait_timeout) self._topic = '/people_tracker/pose' self._sub = ProxySubscriberCached({self._topic: PoseStamped}) self._start_waiting_time = None def execute(self, userdata): if rospy.Time.now() > self._start_waiting_time + self._wait_timeout: return 'not_detected' if self._sub.has_msgs(self._topic): userdata.person_pose = self._sub.get_last_msg(self._topic) return 'detected' def on_enter(self, userdata): self._start_waiting_time = rospy.Time.now()
from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxySubscriberCached from geometry_msgs.msg import PoseStamped class DetectPersonState(EventState): ''' Detects the nearest person and provides their pose. -- wait_timeout float Time (seconds) to wait for a person before giving up. #> person_pose PoseStamped Pose of the nearest person if one is detected, else None. <= detected Detected a person. <= not_detected No person detected, but time ran out. ''' def __init__(self, wait_timeout): super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'], output_keys = ['person_pose']) self._wait_timeout = rospy.Duration(wait_timeout) self._topic = '/people_tracker/pose' self._sub = ProxySubscriberCached({self._topic: PoseStamped}) self._start_waiting_time = None def execute(self, userdata): if rospy.Time.now() > self._start_waiting_time + self._wait_timeout: userdata.person_pose = None return 'not_detected' if self._sub.has_msgs(self._topic): userdata.person_pose = self._sub.get_last_msg(self._topic) return 'detected' def on_enter(self, userdata): self._start_waiting_time = rospy.Time.now()
Set person pose to None if no person is present
[lamor_flexbe_state] Set person pose to None if no person is present
Python
mit
marinaKollmitz/lamor15,pschillinger/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15
from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxySubscriberCached from geometry_msgs.msg import PoseStamped class DetectPersonState(EventState): ''' Detects the nearest person and provides their pose. -- wait_timeout float Time (seconds) to wait for a person before giving up. #> person_pose PoseStamped Pose of the nearest person if one is detected, else None. <= detected Detected a person. <= not_detected No person detected, but time ran out. ''' def __init__(self, wait_timeout): - super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'] + super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'], output_keys = ['person_pose']) self._wait_timeout = rospy.Duration(wait_timeout) self._topic = '/people_tracker/pose' self._sub = ProxySubscriberCached({self._topic: PoseStamped}) self._start_waiting_time = None def execute(self, userdata): if rospy.Time.now() > self._start_waiting_time + self._wait_timeout: + userdata.person_pose = None return 'not_detected' if self._sub.has_msgs(self._topic): userdata.person_pose = self._sub.get_last_msg(self._topic) return 'detected' def on_enter(self, userdata): self._start_waiting_time = rospy.Time.now()
Set person pose to None if no person is present
## Code Before: from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxySubscriberCached from geometry_msgs.msg import PoseStamped class DetectPersonState(EventState): ''' Detects the nearest person and provides their pose. -- wait_timeout float Time (seconds) to wait for a person before giving up. #> person_pose PoseStamped Pose of the nearest person if one is detected, else None. <= detected Detected a person. <= not_detected No person detected, but time ran out. ''' def __init__(self, wait_timeout): super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'] output_keys = ['person_pose']) self._wait_timeout = rospy.Duration(wait_timeout) self._topic = '/people_tracker/pose' self._sub = ProxySubscriberCached({self._topic: PoseStamped}) self._start_waiting_time = None def execute(self, userdata): if rospy.Time.now() > self._start_waiting_time + self._wait_timeout: return 'not_detected' if self._sub.has_msgs(self._topic): userdata.person_pose = self._sub.get_last_msg(self._topic) return 'detected' def on_enter(self, userdata): self._start_waiting_time = rospy.Time.now() ## Instruction: Set person pose to None if no person is present ## Code After: from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxySubscriberCached from geometry_msgs.msg import PoseStamped class DetectPersonState(EventState): ''' Detects the nearest person and provides their pose. -- wait_timeout float Time (seconds) to wait for a person before giving up. #> person_pose PoseStamped Pose of the nearest person if one is detected, else None. <= detected Detected a person. <= not_detected No person detected, but time ran out. ''' def __init__(self, wait_timeout): super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'], output_keys = ['person_pose']) self._wait_timeout = rospy.Duration(wait_timeout) self._topic = '/people_tracker/pose' self._sub = ProxySubscriberCached({self._topic: PoseStamped}) self._start_waiting_time = None def execute(self, userdata): if rospy.Time.now() > self._start_waiting_time + self._wait_timeout: userdata.person_pose = None return 'not_detected' if self._sub.has_msgs(self._topic): userdata.person_pose = self._sub.get_last_msg(self._topic) return 'detected' def on_enter(self, userdata): self._start_waiting_time = rospy.Time.now()
206a59c838623aae5e0b0f91f8089ffc13e2cfd0
pipenv/vendor/pythonfinder/environment.py
pipenv/vendor/pythonfinder/environment.py
from __future__ import print_function, absolute_import import os import platform import sys def is_type_checking(): from typing import TYPE_CHECKING return TYPE_CHECKING PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) ASDF_DATA_DIR = os.path.expanduser( os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) ) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] if sys.maxsize > 2 ** 32: IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
from __future__ import print_function, absolute_import import os import platform import sys def is_type_checking(): try: from typing import TYPE_CHECKING except ImportError: return False return TYPE_CHECKING PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) ASDF_DATA_DIR = os.path.expanduser( os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) ) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] if sys.maxsize > 2 ** 32: IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
Fix typing check for pythonfinder
Fix typing check for pythonfinder Signed-off-by: Dan Ryan <2591e5f46f28d303f9dc027d475a5c60d8dea17a@danryan.co>
Python
mit
kennethreitz/pipenv
from __future__ import print_function, absolute_import import os import platform import sys def is_type_checking(): + try: - from typing import TYPE_CHECKING + from typing import TYPE_CHECKING + except ImportError: + return False return TYPE_CHECKING PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) ASDF_DATA_DIR = os.path.expanduser( os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) ) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] if sys.maxsize > 2 ** 32: IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
Fix typing check for pythonfinder
## Code Before: from __future__ import print_function, absolute_import import os import platform import sys def is_type_checking(): from typing import TYPE_CHECKING return TYPE_CHECKING PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) ASDF_DATA_DIR = os.path.expanduser( os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) ) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] if sys.maxsize > 2 ** 32: IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking()) ## Instruction: Fix typing check for pythonfinder ## Code After: from __future__ import print_function, absolute_import import os import platform import sys def is_type_checking(): try: from typing import TYPE_CHECKING except ImportError: return False return TYPE_CHECKING PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool( os.environ.get("PYENV_ROOT") ) ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR")) PYENV_ROOT = os.path.expanduser( os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv")) ) ASDF_DATA_DIR = os.path.expanduser( os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf")) ) IS_64BIT_OS = None SYSTEM_ARCH = platform.architecture()[0] if sys.maxsize > 2 ** 32: IS_64BIT_OS = platform.machine() == "AMD64" else: IS_64BIT_OS = False IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False)) MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
c8a1b25c1579eba5cb68c1a4cdd60116b3496429
pre_commit_robotframework_tidy/rf_tidy.py
pre_commit_robotframework_tidy/rf_tidy.py
from __future__ import print_function import argparse from robot.errors import DataError from robot.tidy import Tidy def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') parser.add_argument('--use-pipes', action='store_true', dest='use_pipes', default=False) parser.add_argument('--space-count', type=int, dest='space_count', default=4) args = parser.parse_args(argv) tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count) for filename in args.filenames: try: tidier.inplace(filename) except DataError: pass return 0 if __name__ == '__main__': exit(main())
from __future__ import print_function import argparse from robot.errors import DataError from robot.tidy import Tidy def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') parser.add_argument('--use-pipes', action='store_true', dest='use_pipes', default=False) parser.add_argument('--space-count', type=int, dest='space_count', default=4) args = parser.parse_args(argv) tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count, format='robot') for filename in args.filenames: try: tidier.inplace(filename) except DataError: pass return 0 if __name__ == '__main__': exit(main())
Format results as robot files
Format results as robot files
Python
mit
guykisel/pre-commit-robotframework-tidy
from __future__ import print_function import argparse from robot.errors import DataError from robot.tidy import Tidy def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') parser.add_argument('--use-pipes', action='store_true', dest='use_pipes', default=False) parser.add_argument('--space-count', type=int, dest='space_count', default=4) args = parser.parse_args(argv) - tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count) + tidier = Tidy(use_pipes=args.use_pipes, + space_count=args.space_count, + format='robot') for filename in args.filenames: try: tidier.inplace(filename) except DataError: pass return 0 if __name__ == '__main__': exit(main())
Format results as robot files
## Code Before: from __future__ import print_function import argparse from robot.errors import DataError from robot.tidy import Tidy def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') parser.add_argument('--use-pipes', action='store_true', dest='use_pipes', default=False) parser.add_argument('--space-count', type=int, dest='space_count', default=4) args = parser.parse_args(argv) tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count) for filename in args.filenames: try: tidier.inplace(filename) except DataError: pass return 0 if __name__ == '__main__': exit(main()) ## Instruction: Format results as robot files ## Code After: from __future__ import print_function import argparse from robot.errors import DataError from robot.tidy import Tidy def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('filenames', nargs='*', help='Filenames to run') parser.add_argument('--use-pipes', action='store_true', dest='use_pipes', default=False) parser.add_argument('--space-count', type=int, dest='space_count', default=4) args = parser.parse_args(argv) tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count, format='robot') for filename in args.filenames: try: tidier.inplace(filename) except DataError: pass return 0 if __name__ == '__main__': exit(main())
3bbe101f609349c2475079f052d5400e77822237
common/my_filters.py
common/my_filters.py
from google.appengine.ext import webapp import re # More info on custom Django template filters here: # https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters register = webapp.template.create_template_register() @register.filter def digits(value): return re.sub('[^0-9]', '', value) @register.filter def mul(value, arg): return value * arg @register.filter def yt_start(value): return value.replace("?t=", "?start=")
from google.appengine.ext import webapp from helpers.youtube_video_helper import YouTubeVideoHelper import re # More info on custom Django template filters here: # https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters register = webapp.template.create_template_register() @register.filter def digits(value): return re.sub('[^0-9]', '', value) @register.filter def mul(value, arg): return value * arg @register.filter def yt_start(value): if '?t=' in value: # Treat ?t= the same as #t= value = value.replace('?t=', '#t=') if '#t=' in value: sp = value.split('#t=') video_id = sp[0] old_ts = sp[1] total_seconds = YouTubeVideoHelper.time_to_seconds(old_ts) value = '%s?start=%i' % (video_id, total_seconds) return value
Fix video suggestion review showing wrong time
Fix video suggestion review showing wrong time
Python
mit
nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance
from google.appengine.ext import webapp + from helpers.youtube_video_helper import YouTubeVideoHelper import re # More info on custom Django template filters here: # https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters register = webapp.template.create_template_register() @register.filter def digits(value): return re.sub('[^0-9]', '', value) @register.filter def mul(value, arg): return value * arg @register.filter def yt_start(value): - return value.replace("?t=", "?start=") + if '?t=' in value: # Treat ?t= the same as #t= + value = value.replace('?t=', '#t=') + if '#t=' in value: + sp = value.split('#t=') + video_id = sp[0] + old_ts = sp[1] + total_seconds = YouTubeVideoHelper.time_to_seconds(old_ts) + value = '%s?start=%i' % (video_id, total_seconds) + return value +
Fix video suggestion review showing wrong time
## Code Before: from google.appengine.ext import webapp import re # More info on custom Django template filters here: # https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters register = webapp.template.create_template_register() @register.filter def digits(value): return re.sub('[^0-9]', '', value) @register.filter def mul(value, arg): return value * arg @register.filter def yt_start(value): return value.replace("?t=", "?start=") ## Instruction: Fix video suggestion review showing wrong time ## Code After: from google.appengine.ext import webapp from helpers.youtube_video_helper import YouTubeVideoHelper import re # More info on custom Django template filters here: # https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters register = webapp.template.create_template_register() @register.filter def digits(value): return re.sub('[^0-9]', '', value) @register.filter def mul(value, arg): return value * arg @register.filter def yt_start(value): if '?t=' in value: # Treat ?t= the same as #t= value = value.replace('?t=', '#t=') if '#t=' in value: sp = value.split('#t=') video_id = sp[0] old_ts = sp[1] total_seconds = YouTubeVideoHelper.time_to_seconds(old_ts) value = '%s?start=%i' % (video_id, total_seconds) return value
34575124ea6b16f7a7d4f2ae5e073a87709843d2
engine/meta.py
engine/meta.py
registered = {} class GObjectMeta(type): def __new__(cls, name, bases, dict): c = super().__new__(cls, name, bases, dict) qualname = '{}.{}'.format(c.__module__, c.__qualname__) if qualname in registered: print(cls, qualname) c = type(name, (registered[qualname], c), {}) return c def register(name): def decorator(cls): registered[name] = cls return cls return decorator
registered = {} created = {} class GObjectMeta(type): def __new__(cls, name, bases, dict): c = super().__new__(cls, name, bases, dict) # Do not handle classes that are already decorated if c.__module__.startswith('<meta>'): return c # Fullname of the class (base module + qualified name) fullname = '{}.{}'.format(c.__module__, c.__qualname__) # Decorate registered classes if fullname in registered: print(cls, fullname) c = type(name, (registered[fullname], c), {'__module__': '<meta>.{}'.format(fullname)}) # Set fullname, save class and return c.__fullname__ = fullname created[fullname] = c return c def register(name): def decorator(cls): registered[name] = cls return cls return decorator
Add __fullname__ attribute on all game classes
Add __fullname__ attribute on all game classes
Python
bsd-3-clause
entwanne/NAGM
registered = {} + created = {} class GObjectMeta(type): def __new__(cls, name, bases, dict): c = super().__new__(cls, name, bases, dict) + + # Do not handle classes that are already decorated + if c.__module__.startswith('<meta>'): + return c + + # Fullname of the class (base module + qualified name) - qualname = '{}.{}'.format(c.__module__, c.__qualname__) + fullname = '{}.{}'.format(c.__module__, c.__qualname__) + + # Decorate registered classes - if qualname in registered: + if fullname in registered: - print(cls, qualname) + print(cls, fullname) - c = type(name, (registered[qualname], c), {}) + c = type(name, + (registered[fullname], c), + {'__module__': '<meta>.{}'.format(fullname)}) + + # Set fullname, save class and return + c.__fullname__ = fullname + created[fullname] = c return c def register(name): def decorator(cls): registered[name] = cls return cls return decorator
Add __fullname__ attribute on all game classes
## Code Before: registered = {} class GObjectMeta(type): def __new__(cls, name, bases, dict): c = super().__new__(cls, name, bases, dict) qualname = '{}.{}'.format(c.__module__, c.__qualname__) if qualname in registered: print(cls, qualname) c = type(name, (registered[qualname], c), {}) return c def register(name): def decorator(cls): registered[name] = cls return cls return decorator ## Instruction: Add __fullname__ attribute on all game classes ## Code After: registered = {} created = {} class GObjectMeta(type): def __new__(cls, name, bases, dict): c = super().__new__(cls, name, bases, dict) # Do not handle classes that are already decorated if c.__module__.startswith('<meta>'): return c # Fullname of the class (base module + qualified name) fullname = '{}.{}'.format(c.__module__, c.__qualname__) # Decorate registered classes if fullname in registered: print(cls, fullname) c = type(name, (registered[fullname], c), {'__module__': '<meta>.{}'.format(fullname)}) # Set fullname, save class and return c.__fullname__ = fullname created[fullname] = c return c def register(name): def decorator(cls): registered[name] = cls return cls return decorator
95ccab69cfff30c24932c4cd156983a29639435d
nginxauthdaemon/crowdauth.py
nginxauthdaemon/crowdauth.py
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) return result.get('name') == username
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) if result == None: # auth failed return False # auth succeeded return result.get('name') == username
Fix 500 error when Crowd auth is failed
Fix 500 error when Crowd auth is failed
Python
mit
akurdyukov/nginxauthdaemon,akurdyukov/nginxauthdaemon
import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) + if result == None: + # auth failed + return False + # auth succeeded return result.get('name') == username
Fix 500 error when Crowd auth is failed
## Code Before: import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) return result.get('name') == username ## Instruction: Fix 500 error when Crowd auth is failed ## Code After: import crowd from auth import Authenticator class CrowdAuthenticator(Authenticator): """Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD""" def __init__(self, config): super(CrowdAuthenticator, self).__init__(config) app_url = config['CROWD_URL'] app_user = config['CROWD_APP_NAME'] app_pass = config['CROWD_APP_PASSWORD'] self._cs = crowd.CrowdServer(app_url, app_user, app_pass) def authenticate(self, username, password): result = self._cs.auth_user(username, password) if result == None: # auth failed return False # auth succeeded return result.get('name') == username
6c2a154bf902b5f658b2c2cbf4b65c6ed33e6c1b
pywineds/utils.py
pywineds/utils.py
from contextlib import contextmanager import logging import timeit log = logging.getLogger("wineds") @contextmanager def time_it(task_desc): """ A context manager for timing chunks of code and logging it. Arguments: task_desc: task description for logging purposes """ start_time = timeit.default_timer() yield elapsed = timeit.default_timer() - start_time log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
from contextlib import contextmanager import logging import timeit REPORTING_TYPE_ALL = "" REPORTING_TYPE_ELD = "TC-Election Day Reporting" REPORTING_TYPE_VBM = "TC-VBM Reporting" REPORTING_KEYS_SIMPLE = (REPORTING_TYPE_ALL, ) REPORTING_KEYS_COMPLETE = (REPORTING_TYPE_ELD, REPORTING_TYPE_VBM) REPORTING_INDICES = { REPORTING_TYPE_ALL: 0, REPORTING_TYPE_ELD: 1, REPORTING_TYPE_VBM: 2, } log = logging.getLogger("wineds") @contextmanager def time_it(task_desc): """ A context manager for timing chunks of code and logging it. Arguments: task_desc: task description for logging purposes """ start_time = timeit.default_timer() yield elapsed = timeit.default_timer() - start_time log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
Add some reporting_type global variables.
Add some reporting_type global variables.
Python
bsd-3-clause
cjerdonek/wineds-converter
from contextlib import contextmanager import logging import timeit + + REPORTING_TYPE_ALL = "" + REPORTING_TYPE_ELD = "TC-Election Day Reporting" + REPORTING_TYPE_VBM = "TC-VBM Reporting" + + REPORTING_KEYS_SIMPLE = (REPORTING_TYPE_ALL, ) + REPORTING_KEYS_COMPLETE = (REPORTING_TYPE_ELD, REPORTING_TYPE_VBM) + + REPORTING_INDICES = { + REPORTING_TYPE_ALL: 0, + REPORTING_TYPE_ELD: 1, + REPORTING_TYPE_VBM: 2, + } log = logging.getLogger("wineds") @contextmanager def time_it(task_desc): """ A context manager for timing chunks of code and logging it. Arguments: task_desc: task description for logging purposes """ start_time = timeit.default_timer() yield elapsed = timeit.default_timer() - start_time log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
Add some reporting_type global variables.
## Code Before: from contextlib import contextmanager import logging import timeit log = logging.getLogger("wineds") @contextmanager def time_it(task_desc): """ A context manager for timing chunks of code and logging it. Arguments: task_desc: task description for logging purposes """ start_time = timeit.default_timer() yield elapsed = timeit.default_timer() - start_time log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed)) ## Instruction: Add some reporting_type global variables. ## Code After: from contextlib import contextmanager import logging import timeit REPORTING_TYPE_ALL = "" REPORTING_TYPE_ELD = "TC-Election Day Reporting" REPORTING_TYPE_VBM = "TC-VBM Reporting" REPORTING_KEYS_SIMPLE = (REPORTING_TYPE_ALL, ) REPORTING_KEYS_COMPLETE = (REPORTING_TYPE_ELD, REPORTING_TYPE_VBM) REPORTING_INDICES = { REPORTING_TYPE_ALL: 0, REPORTING_TYPE_ELD: 1, REPORTING_TYPE_VBM: 2, } log = logging.getLogger("wineds") @contextmanager def time_it(task_desc): """ A context manager for timing chunks of code and logging it. Arguments: task_desc: task description for logging purposes """ start_time = timeit.default_timer() yield elapsed = timeit.default_timer() - start_time log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
21e9254abeebb7260f74db9c94e480cc2b5bbcc9
tests/conftest.py
tests/conftest.py
import pytest @pytest.fixture(scope='session') def base_url(base_url, request): return base_url or 'https://developer.allizom.org'
import pytest VIEWPORT = { 'large': {'width': 1201, 'height': 1024}, # also nav-break-ends 'desktop': {'width': 1025, 'height': 1024}, 'tablet': {'width': 851, 'height': 1024}, # also nav-block-ends 'mobile': {'width': 481, 'height': 1024}, 'small': {'width': 320, 'height': 480}} @pytest.fixture(scope='session') def base_url(base_url, request): return base_url or 'https://developer.allizom.org' @pytest.fixture def selenium(request, selenium): viewport = VIEWPORT['large'] if request.keywords.get('viewport') is not None: viewport = VIEWPORT[request.keywords.get('viewport').args[0]] selenium.set_window_size(viewport['width'], viewport['height']) return selenium
Add viewport sizes fixture to tests.
Add viewport sizes fixture to tests.
Python
mpl-2.0
safwanrahman/kuma,Elchi3/kuma,mozilla/kuma,jwhitlock/kuma,SphinxKnight/kuma,SphinxKnight/kuma,Elchi3/kuma,mozilla/kuma,SphinxKnight/kuma,a2sheppy/kuma,safwanrahman/kuma,Elchi3/kuma,mozilla/kuma,yfdyh000/kuma,yfdyh000/kuma,SphinxKnight/kuma,safwanrahman/kuma,a2sheppy/kuma,yfdyh000/kuma,yfdyh000/kuma,safwanrahman/kuma,SphinxKnight/kuma,jwhitlock/kuma,safwanrahman/kuma,SphinxKnight/kuma,a2sheppy/kuma,a2sheppy/kuma,escattone/kuma,Elchi3/kuma,Elchi3/kuma,jwhitlock/kuma,jwhitlock/kuma,safwanrahman/kuma,escattone/kuma,escattone/kuma,a2sheppy/kuma,jwhitlock/kuma,yfdyh000/kuma,mozilla/kuma,mozilla/kuma,yfdyh000/kuma
import pytest + VIEWPORT = { + 'large': {'width': 1201, 'height': 1024}, # also nav-break-ends + 'desktop': {'width': 1025, 'height': 1024}, + 'tablet': {'width': 851, 'height': 1024}, # also nav-block-ends + 'mobile': {'width': 481, 'height': 1024}, + 'small': {'width': 320, 'height': 480}} @pytest.fixture(scope='session') def base_url(base_url, request): return base_url or 'https://developer.allizom.org' + @pytest.fixture + def selenium(request, selenium): + viewport = VIEWPORT['large'] + if request.keywords.get('viewport') is not None: + viewport = VIEWPORT[request.keywords.get('viewport').args[0]] + selenium.set_window_size(viewport['width'], viewport['height']) + return selenium
Add viewport sizes fixture to tests.
## Code Before: import pytest @pytest.fixture(scope='session') def base_url(base_url, request): return base_url or 'https://developer.allizom.org' ## Instruction: Add viewport sizes fixture to tests. ## Code After: import pytest VIEWPORT = { 'large': {'width': 1201, 'height': 1024}, # also nav-break-ends 'desktop': {'width': 1025, 'height': 1024}, 'tablet': {'width': 851, 'height': 1024}, # also nav-block-ends 'mobile': {'width': 481, 'height': 1024}, 'small': {'width': 320, 'height': 480}} @pytest.fixture(scope='session') def base_url(base_url, request): return base_url or 'https://developer.allizom.org' @pytest.fixture def selenium(request, selenium): viewport = VIEWPORT['large'] if request.keywords.get('viewport') is not None: viewport = VIEWPORT[request.keywords.get('viewport').args[0]] selenium.set_window_size(viewport['width'], viewport['height']) return selenium
534633d078fe6f81e67ead075ac31faac0c3c60d
tests/__init__.py
tests/__init__.py
import pycurl def setup_package(): print('Testing %s' % pycurl.version)
def setup_package(): # import here, not globally, so that running # python -m tests.appmanager # to launch the app manager is possible without having pycurl installed # (as the test app does not depend on pycurl) import pycurl print('Testing %s' % pycurl.version)
Make it possible to run test app without pycurl being installed
Make it possible to run test app without pycurl being installed
Python
lgpl-2.1
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
- import pycurl - def setup_package(): + # import here, not globally, so that running + # python -m tests.appmanager + # to launch the app manager is possible without having pycurl installed + # (as the test app does not depend on pycurl) + import pycurl + print('Testing %s' % pycurl.version)
Make it possible to run test app without pycurl being installed
## Code Before: import pycurl def setup_package(): print('Testing %s' % pycurl.version) ## Instruction: Make it possible to run test app without pycurl being installed ## Code After: def setup_package(): # import here, not globally, so that running # python -m tests.appmanager # to launch the app manager is possible without having pycurl installed # (as the test app does not depend on pycurl) import pycurl print('Testing %s' % pycurl.version)
1b9622cedecef0c6c45c11a84bd178adcff752e2
squadron/exthandlers/download.py
squadron/exthandlers/download.py
import urllib from extutils import get_filename from template import render import requests import yaml import jsonschema SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Describes the extract extension handler input', 'type':'object', 'properties': { 'url': { 'description': 'Where to download the tarball/zip/etc from', 'type':'string' }, 'username': { 'description': 'Username to login with BASIC Auth', 'type':'string' }, 'password': { 'description': 'Password to use with BASIC Auth', 'type':'string' } }, 'required': ['url'] } def _download_file(url, handle, auth=None): r = requests.get(url, auth=auth, stream=True) for chunk in r.iter_content(chunk_size=4096): if chunk: # filter out keep-alive new chunks handle.write(chunk) handle.close() def ext_download(loader, inputhash, abs_source, dest, **kwargs): """ Downloads a ~download file""" contents = yaml.load(render(abs_source, inputhash, loader)) jsonschema.validate(contents, SCHEMA) finalfile = get_filename(dest) handle = open(finalfile, 'w') auth = None if 'username' in contents and 'password' in contents: auth = (contents['username'], contents['password']) _download_file(contents['url'], handle, auth) return finalfile
import urllib from extutils import get_filename from template import render import requests import yaml import jsonschema SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Describes the extract extension handler input', 'type':'object', 'properties': { 'url': { 'description': 'Where to download the tarball/zip/etc from', 'type':'string' }, 'username': { 'description': 'Username to login with BASIC Auth', 'type':'string' }, 'password': { 'description': 'Password to use with BASIC Auth', 'type':'string' } }, 'required': ['url'] } def _download_file(url, handle, auth=None): r = requests.get(url, auth=auth, stream=True) r.raise_for_status() for chunk in r.iter_content(chunk_size=4096): if chunk: # filter out keep-alive new chunks handle.write(chunk) handle.close() def ext_download(loader, inputhash, abs_source, dest, **kwargs): """ Downloads a ~download file""" contents = yaml.load(render(abs_source, inputhash, loader)) jsonschema.validate(contents, SCHEMA) finalfile = get_filename(dest) handle = open(finalfile, 'w') auth = None if 'username' in contents and 'password' in contents: auth = (contents['username'], contents['password']) _download_file(contents['url'], handle, auth) return finalfile
Raise Exception when there's an HTTP error
Raise Exception when there's an HTTP error
Python
mit
gosquadron/squadron,gosquadron/squadron
import urllib from extutils import get_filename from template import render import requests import yaml import jsonschema SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Describes the extract extension handler input', 'type':'object', 'properties': { 'url': { 'description': 'Where to download the tarball/zip/etc from', 'type':'string' }, 'username': { 'description': 'Username to login with BASIC Auth', 'type':'string' }, 'password': { 'description': 'Password to use with BASIC Auth', 'type':'string' } }, 'required': ['url'] } def _download_file(url, handle, auth=None): r = requests.get(url, auth=auth, stream=True) + r.raise_for_status() for chunk in r.iter_content(chunk_size=4096): if chunk: # filter out keep-alive new chunks handle.write(chunk) handle.close() def ext_download(loader, inputhash, abs_source, dest, **kwargs): """ Downloads a ~download file""" contents = yaml.load(render(abs_source, inputhash, loader)) jsonschema.validate(contents, SCHEMA) finalfile = get_filename(dest) handle = open(finalfile, 'w') auth = None if 'username' in contents and 'password' in contents: auth = (contents['username'], contents['password']) _download_file(contents['url'], handle, auth) return finalfile
Raise Exception when there's an HTTP error
## Code Before: import urllib from extutils import get_filename from template import render import requests import yaml import jsonschema SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Describes the extract extension handler input', 'type':'object', 'properties': { 'url': { 'description': 'Where to download the tarball/zip/etc from', 'type':'string' }, 'username': { 'description': 'Username to login with BASIC Auth', 'type':'string' }, 'password': { 'description': 'Password to use with BASIC Auth', 'type':'string' } }, 'required': ['url'] } def _download_file(url, handle, auth=None): r = requests.get(url, auth=auth, stream=True) for chunk in r.iter_content(chunk_size=4096): if chunk: # filter out keep-alive new chunks handle.write(chunk) handle.close() def ext_download(loader, inputhash, abs_source, dest, **kwargs): """ Downloads a ~download file""" contents = yaml.load(render(abs_source, inputhash, loader)) jsonschema.validate(contents, SCHEMA) finalfile = get_filename(dest) handle = open(finalfile, 'w') auth = None if 'username' in contents and 'password' in contents: auth = (contents['username'], contents['password']) _download_file(contents['url'], handle, auth) return finalfile ## Instruction: Raise Exception when there's an HTTP error ## Code After: import urllib from extutils import get_filename from template import render import requests import yaml import jsonschema SCHEMA = { '$schema': 'http://json-schema.org/draft-04/schema#', 'description': 'Describes the extract extension handler input', 'type':'object', 'properties': { 'url': { 'description': 'Where to download the tarball/zip/etc from', 'type':'string' }, 'username': { 'description': 'Username to login with BASIC Auth', 'type':'string' }, 'password': { 'description': 'Password to use with BASIC Auth', 'type':'string' } }, 'required': ['url'] } def _download_file(url, handle, auth=None): r = requests.get(url, auth=auth, stream=True) r.raise_for_status() for chunk in r.iter_content(chunk_size=4096): if chunk: # filter out keep-alive new chunks handle.write(chunk) handle.close() def ext_download(loader, inputhash, abs_source, dest, **kwargs): """ Downloads a ~download file""" contents = yaml.load(render(abs_source, inputhash, loader)) jsonschema.validate(contents, SCHEMA) finalfile = get_filename(dest) handle = open(finalfile, 'w') auth = None if 'username' in contents and 'password' in contents: auth = (contents['username'], contents['password']) _download_file(contents['url'], handle, auth) return finalfile
675364683c5415f1db7a5599d8ad97f72f69aaf0
buckets/utils.py
buckets/utils.py
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return ''.join(map(byte_to_base32_chr, rand_id))
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] for loser in 'lo': i = alphabet0.index(loser) alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def byte_to_letter(byte): return alphabet0[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return (byte_to_letter(rand_id[0]) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
Make random IDs start with a letter
Make random IDs start with a letter
Python
agpl-3.0
Cadasta/django-buckets,Cadasta/django-buckets,Cadasta/django-buckets
import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits + alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] + for loser in 'lo': + i = alphabet0.index(loser) + alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] + def byte_to_letter(byte): + return alphabet0[byte & 31] + + def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] + return (byte_to_letter(rand_id[0]) + - return ''.join(map(byte_to_base32_chr, rand_id)) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
Make random IDs start with a letter
## Code Before: import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return ''.join(map(byte_to_base32_chr, rand_id)) ## Instruction: Make random IDs start with a letter ## Code After: import string import random from django.conf import settings def validate_settings(): assert settings.AWS, \ "No AWS settings found" assert settings.AWS.get('ACCESS_KEY'), \ "AWS access key is not set in settings" assert settings.AWS.get('SECRET_KEY'), \ "AWS secret key is not set in settings" assert settings.AWS.get('BUCKET'), \ "AWS bucket name is not set in settings" ID_FIELD_LENGTH = 24 alphabet = string.ascii_lowercase + string.digits alphabet0 = string.ascii_lowercase + string.ascii_lowercase for loser in 'l1o0': i = alphabet.index(loser) alphabet = alphabet[:i] + alphabet[i + 1:] for loser in 'lo': i = alphabet0.index(loser) alphabet0 = alphabet0[:i] + alphabet0[i + 1:] def byte_to_base32_chr(byte): return alphabet[byte & 31] def byte_to_letter(byte): return alphabet0[byte & 31] def random_id(): rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)] return (byte_to_letter(rand_id[0]) + ''.join(map(byte_to_base32_chr, rand_id[1:])))
d06adea5117eb3ebfddd8592889346089c7391f7
dictlearn/wordnik_api_demo.py
dictlearn/wordnik_api_demo.py
from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" tokenized_def = toktok.tokenize(def_.text.lower()) tokenized_def = [s.encode('utf-8') for s in tokenized_def] print(fmt_str.format(def_.sourceDictionary, tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
Add tokenization to the WordNik demo
Add tokenization to the WordNik demo
Python
mit
tombosc/dict_based_learning,tombosc/dict_based_learning
+ import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) + toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" + tokenized_def = toktok.tokenize(def_.text.lower()) + tokenized_def = [s.encode('utf-8') for s in tokenized_def] - print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) + print(fmt_str.format(def_.sourceDictionary, + tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
Add tokenization to the WordNik demo
## Code Before: from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8'))) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls)) ## Instruction: Add tokenization to the WordNik demo ## Code After: import nltk from wordnik import swagger, WordApi, AccountApi client = swagger.ApiClient( 'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31', 'https://api.wordnik.com/v4') word_api = WordApi.WordApi(client) toktok = nltk.ToktokTokenizer() words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse'] for word in words: print('=== {} ==='.format(word)) defs = word_api.getDefinitions(word) if not defs: print("no definitions") continue for def_ in defs: fmt_str = "{} --- {}" tokenized_def = toktok.tokenize(def_.text.lower()) tokenized_def = [s.encode('utf-8') for s in tokenized_def] print(fmt_str.format(def_.sourceDictionary, tokenized_def)) account_api = AccountApi.AccountApi(client) for i in range(5): print("Attempt {}".format(i)) status = account_api.getApiTokenStatus() print("Remaining_calls: {}".format(status.remainingCalls))
062e65a161f9c84e5cd18b85790b195eec947b99
social_website_django_angular/social_website_django_angular/urls.py
social_website_django_angular/social_website_django_angular/urls.py
from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ]
from django.conf.urls import url from django.contrib import admin from social_website_django_angular.views import IndexView urlpatterns = [ url(r'^admin/', admin.site.urls), url('^.*$', IndexView.as_view(), name='index') ]
Set up url for index page
Set up url for index page
Python
mit
tomaszzacharczuk/social-website-django-angular,tomaszzacharczuk/social-website-django-angular,tomaszzacharczuk/social-website-django-angular
from django.conf.urls import url from django.contrib import admin + from social_website_django_angular.views import IndexView + urlpatterns = [ url(r'^admin/', admin.site.urls), + url('^.*$', IndexView.as_view(), name='index') ]
Set up url for index page
## Code Before: from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] ## Instruction: Set up url for index page ## Code After: from django.conf.urls import url from django.contrib import admin from social_website_django_angular.views import IndexView urlpatterns = [ url(r'^admin/', admin.site.urls), url('^.*$', IndexView.as_view(), name='index') ]
6fd1305f2a4a2e08b51c421b1c2cfdd33b407119
src/puzzle/problems/problem.py
src/puzzle/problems/problem.py
from data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
from data import meta _THRESHOLD = 0.01 class Problem(object): def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines self._threshold = threshold self._solutions = None self._constraints = [ lambda k, v: v > self._threshold ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None self._solutions_iter = None def solutions(self): if self._solutions is None: self._solutions_iter = self._solve_iter() results = [] for k, v in self._solutions_iter: if all(fn(k, v) for fn in self._constraints): results.append((k, v)) self._solutions = meta.Meta(results) return self._solutions def _solve_iter(self): return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
Set a threshold on Problem and enforce it.
Set a threshold on Problem and enforce it.
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
from data import meta + + _THRESHOLD = 0.01 class Problem(object): - def __init__(self, name, lines): + def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines + self._threshold = threshold self._solutions = None - self._constraints = [] + self._constraints = [ + lambda k, v: v > self._threshold + ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None + self._solutions_iter = None def solutions(self): if self._solutions is None: + self._solutions_iter = self._solve_iter() + results = [] + for k, v in self._solutions_iter: + if all(fn(k, v) for fn in self._constraints): + results.append((k, v)) - self._solutions = meta.Meta( + self._solutions = meta.Meta(results) - (k, v) for k, v in self._solve().items() if all( - [fn(k, v) for fn in self._constraints] - ) - ) return self._solutions + + def _solve_iter(self): + return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
Set a threshold on Problem and enforce it.
## Code Before: from data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__ ## Instruction: Set a threshold on Problem and enforce it. ## Code After: from data import meta _THRESHOLD = 0.01 class Problem(object): def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines self._threshold = threshold self._solutions = None self._constraints = [ lambda k, v: v > self._threshold ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None self._solutions_iter = None def solutions(self): if self._solutions is None: self._solutions_iter = self._solve_iter() results = [] for k, v in self._solutions_iter: if all(fn(k, v) for fn in self._constraints): results.append((k, v)) self._solutions = meta.Meta(results) return self._solutions def _solve_iter(self): return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
d44fee53020470e2d9a8cd2393f5f0125dbd1fab
python/client.py
python/client.py
import grpc import hello_pb2 import hello_pb2_grpc def run(): channel = grpc.insecure_channel('localhost:50051') stub = hello_pb2_grpc.HelloServiceStub(channel) # ideally, you should have try catch block here too response = stub.SayHello(hello_pb2.HelloReq(Name='Euler')) print(response.Result) try: response = stub.SayHelloStrict(hello_pb2.HelloReq( Name='Leonhard Euler')) except grpc.RpcError as e: # ouch! # lets print the gRPC error message # which is "Length of `Name` cannot be more than 10 characters" print(e.details()) # lets access the error code, which is `INVALID_ARGUMENT` # `type` of `status_code` is `grpc.StatusCode` status_code = e.code() # should print `INVALID_ARGUMENT` print(status_code.name) # should print `(3, 'invalid argument')` print(status_code.value) else: print(response.Result) if __name__ == '__main__': run()
import grpc import hello_pb2 import hello_pb2_grpc def run(): channel = grpc.insecure_channel('localhost:50051') stub = hello_pb2_grpc.HelloServiceStub(channel) # ideally, you should have try catch block here too response = stub.SayHello(hello_pb2.HelloReq(Name='Euler')) print(response.Result) try: response = stub.SayHelloStrict(hello_pb2.HelloReq( Name='Leonhard Euler')) except grpc.RpcError as e: # ouch! # lets print the gRPC error message # which is "Length of `Name` cannot be more than 10 characters" print(e.details()) # lets access the error code, which is `INVALID_ARGUMENT` # `type` of `status_code` is `grpc.StatusCode` status_code = e.code() # should print `INVALID_ARGUMENT` print(status_code.name) # should print `(3, 'invalid argument')` print(status_code.value) # want to do some specific action based on the error? if grpc.StatusCode.INVALID_ARGUMENT == status_code: # do your stuff here pass else: print(response.Result) if __name__ == '__main__': run()
Update python version for better error handling
Update python version for better error handling
Python
mit
avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors
import grpc import hello_pb2 import hello_pb2_grpc def run(): channel = grpc.insecure_channel('localhost:50051') stub = hello_pb2_grpc.HelloServiceStub(channel) # ideally, you should have try catch block here too response = stub.SayHello(hello_pb2.HelloReq(Name='Euler')) print(response.Result) try: response = stub.SayHelloStrict(hello_pb2.HelloReq( Name='Leonhard Euler')) except grpc.RpcError as e: # ouch! # lets print the gRPC error message # which is "Length of `Name` cannot be more than 10 characters" print(e.details()) # lets access the error code, which is `INVALID_ARGUMENT` # `type` of `status_code` is `grpc.StatusCode` status_code = e.code() # should print `INVALID_ARGUMENT` print(status_code.name) # should print `(3, 'invalid argument')` print(status_code.value) + # want to do some specific action based on the error? + if grpc.StatusCode.INVALID_ARGUMENT == status_code: + # do your stuff here + pass else: print(response.Result) if __name__ == '__main__': run()
Update python version for better error handling
## Code Before: import grpc import hello_pb2 import hello_pb2_grpc def run(): channel = grpc.insecure_channel('localhost:50051') stub = hello_pb2_grpc.HelloServiceStub(channel) # ideally, you should have try catch block here too response = stub.SayHello(hello_pb2.HelloReq(Name='Euler')) print(response.Result) try: response = stub.SayHelloStrict(hello_pb2.HelloReq( Name='Leonhard Euler')) except grpc.RpcError as e: # ouch! # lets print the gRPC error message # which is "Length of `Name` cannot be more than 10 characters" print(e.details()) # lets access the error code, which is `INVALID_ARGUMENT` # `type` of `status_code` is `grpc.StatusCode` status_code = e.code() # should print `INVALID_ARGUMENT` print(status_code.name) # should print `(3, 'invalid argument')` print(status_code.value) else: print(response.Result) if __name__ == '__main__': run() ## Instruction: Update python version for better error handling ## Code After: import grpc import hello_pb2 import hello_pb2_grpc def run(): channel = grpc.insecure_channel('localhost:50051') stub = hello_pb2_grpc.HelloServiceStub(channel) # ideally, you should have try catch block here too response = stub.SayHello(hello_pb2.HelloReq(Name='Euler')) print(response.Result) try: response = stub.SayHelloStrict(hello_pb2.HelloReq( Name='Leonhard Euler')) except grpc.RpcError as e: # ouch! # lets print the gRPC error message # which is "Length of `Name` cannot be more than 10 characters" print(e.details()) # lets access the error code, which is `INVALID_ARGUMENT` # `type` of `status_code` is `grpc.StatusCode` status_code = e.code() # should print `INVALID_ARGUMENT` print(status_code.name) # should print `(3, 'invalid argument')` print(status_code.value) # want to do some specific action based on the error? if grpc.StatusCode.INVALID_ARGUMENT == status_code: # do your stuff here pass else: print(response.Result) if __name__ == '__main__': run()
1aef29a64886522d81d2f6a15bd4e48419a66545
ziggy/__init__.py
ziggy/__init__.py
__title__ = 'ziggy' __version__ = '0.0.1' __build__ = 0 __author__ = 'Rhett Garber' __license__ = 'ISC' __copyright__ = 'Copyright 2012 Rhett Garber' import logging from . import utils from . import network from .context import Context, set, append, add from . import context as _context_mod from .errors import Error from .timer import timeit log = logging.getLogger(__name__) def configure(host, port, recorder=None): """Initialize ziggy This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will be silently dropped. Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or to the specified recorder function """ global _record_function if recorder: context._recorder_function = recorder elif host and port: network.init(host, port) context._recorder_function = network.send else: log.warning("Empty ziggy configuration")
__title__ = 'ziggy' __version__ = '0.0.1' __build__ = 0 __author__ = 'Rhett Garber' __license__ = 'ISC' __copyright__ = 'Copyright 2012 Rhett Garber' import logging from . import utils from . import network from .context import Context, set, append, add from . import context as _context_mod from .errors import Error from .timer import timeit log = logging.getLogger(__name__) def configure(host, port, recorder=None): """Initialize ziggy This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will be silently dropped. Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or to the specified recorder function """ global _record_function if recorder: context._recorder_function = recorder elif host and port: network.init(host, port) context._recorder_function = network.send else: log.warning("Empty ziggy configuration") context._recorder_function = None
Allow unsetting of configuration (for testing)
Allow unsetting of configuration (for testing)
Python
isc
rhettg/Ziggy,rhettg/BlueOx
__title__ = 'ziggy' __version__ = '0.0.1' __build__ = 0 __author__ = 'Rhett Garber' __license__ = 'ISC' __copyright__ = 'Copyright 2012 Rhett Garber' import logging from . import utils from . import network from .context import Context, set, append, add from . import context as _context_mod from .errors import Error from .timer import timeit log = logging.getLogger(__name__) def configure(host, port, recorder=None): """Initialize ziggy This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will be silently dropped. Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or to the specified recorder function """ global _record_function if recorder: context._recorder_function = recorder elif host and port: network.init(host, port) context._recorder_function = network.send else: log.warning("Empty ziggy configuration") + context._recorder_function = None
Allow unsetting of configuration (for testing)
## Code Before: __title__ = 'ziggy' __version__ = '0.0.1' __build__ = 0 __author__ = 'Rhett Garber' __license__ = 'ISC' __copyright__ = 'Copyright 2012 Rhett Garber' import logging from . import utils from . import network from .context import Context, set, append, add from . import context as _context_mod from .errors import Error from .timer import timeit log = logging.getLogger(__name__) def configure(host, port, recorder=None): """Initialize ziggy This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will be silently dropped. Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or to the specified recorder function """ global _record_function if recorder: context._recorder_function = recorder elif host and port: network.init(host, port) context._recorder_function = network.send else: log.warning("Empty ziggy configuration") ## Instruction: Allow unsetting of configuration (for testing) ## Code After: __title__ = 'ziggy' __version__ = '0.0.1' __build__ = 0 __author__ = 'Rhett Garber' __license__ = 'ISC' __copyright__ = 'Copyright 2012 Rhett Garber' import logging from . import utils from . import network from .context import Context, set, append, add from . import context as _context_mod from .errors import Error from .timer import timeit log = logging.getLogger(__name__) def configure(host, port, recorder=None): """Initialize ziggy This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will be silently dropped. Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or to the specified recorder function """ global _record_function if recorder: context._recorder_function = recorder elif host and port: network.init(host, port) context._recorder_function = network.send else: log.warning("Empty ziggy configuration") context._recorder_function = None
8c90485e5cab6294a38cfc9332eda6fe8ca15483
project/config.py
project/config.py
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000']
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
Add two new domains to whitelist for CORS.
Add two new domains to whitelist for CORS.
Python
apache-2.0
AustinStoneProjects/Founderati-Server,AustinStoneProjects/Founderati-Server
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 - config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000'] + config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
Add two new domains to whitelist for CORS.
## Code Before: import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000'] ## Instruction: Add two new domains to whitelist for CORS. ## Code After: import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
616bd7c5ff8ba5fe5dd190a459b93980613a3ad4
myuw_mobile/restclients/dao_implementation/hfs.py
myuw_mobile/restclients/dao_implementation/hfs.py
from os.path import dirname from restclients.dao_implementation.mock import get_mockdata_url from restclients.dao_implementation.live import get_con_pool, get_live_url class File(object): """ This implementation returns mock/static content. Use this DAO with this configuration: RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File' """ def getURL(self, url, headers): """ Return the url for accessing the mock data in local file :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ return get_mockdata_url("hfs", "file", url, headers, dir_base=dirname(__file__)) class Live(object): """ This DAO provides real data. Access is restricted to localhost. """ pool = None def getURL(self, url, headers): """ Return the absolute url for accessing live data :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ host = 'http://localhost/' if Live.pool == None: Live.pool = get_con_pool(host, None, None) return get_live_url (Live.pool, 'GET', host, url, headers=headers)
from os.path import dirname from restclients.dao_implementation.mock import get_mockdata_url from restclients.dao_implementation.live import get_con_pool, get_live_url import logging from myuw_mobile.logger.logback import log_info class File(object): """ This implementation returns mock/static content. Use this DAO with this configuration: RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File' """ def getURL(self, url, headers): """ Return the url for accessing the mock data in local file :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ return get_mockdata_url("hfs", "file", url, headers, dir_base=dirname(__file__)) class Live(object): """ This DAO provides real data. Access is restricted to localhost. """ logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live') pool = None def getURL(self, url, headers): """ Return the absolute url for accessing live data :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ host = 'http://localhost:80/' if Live.pool is None: Live.pool = get_con_pool(host, None, None, socket_timeout=5.0, max_pool_size=5) log_info(Live.logger, Live.pool) return get_live_url (Live.pool, 'GET', host, url, headers=headers)
Fix bug: must specify the port number.
Fix bug: must specify the port number.
Python
apache-2.0
uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,fanglinfang/myuw,uw-it-aca/myuw
from os.path import dirname from restclients.dao_implementation.mock import get_mockdata_url from restclients.dao_implementation.live import get_con_pool, get_live_url + import logging + from myuw_mobile.logger.logback import log_info class File(object): """ This implementation returns mock/static content. Use this DAO with this configuration: RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File' """ def getURL(self, url, headers): """ Return the url for accessing the mock data in local file :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ return get_mockdata_url("hfs", "file", url, headers, dir_base=dirname(__file__)) class Live(object): """ This DAO provides real data. Access is restricted to localhost. """ + logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live') pool = None def getURL(self, url, headers): """ Return the absolute url for accessing live data :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ - host = 'http://localhost/' + host = 'http://localhost:80/' - if Live.pool == None: + if Live.pool is None: - Live.pool = get_con_pool(host, None, None) + Live.pool = get_con_pool(host, None, None, + socket_timeout=5.0, + max_pool_size=5) + log_info(Live.logger, Live.pool) return get_live_url (Live.pool, 'GET', host, url, headers=headers)
Fix bug: must specify the port number.
## Code Before: from os.path import dirname from restclients.dao_implementation.mock import get_mockdata_url from restclients.dao_implementation.live import get_con_pool, get_live_url class File(object): """ This implementation returns mock/static content. Use this DAO with this configuration: RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File' """ def getURL(self, url, headers): """ Return the url for accessing the mock data in local file :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ return get_mockdata_url("hfs", "file", url, headers, dir_base=dirname(__file__)) class Live(object): """ This DAO provides real data. Access is restricted to localhost. """ pool = None def getURL(self, url, headers): """ Return the absolute url for accessing live data :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ host = 'http://localhost/' if Live.pool == None: Live.pool = get_con_pool(host, None, None) return get_live_url (Live.pool, 'GET', host, url, headers=headers) ## Instruction: Fix bug: must specify the port number. ## Code After: from os.path import dirname from restclients.dao_implementation.mock import get_mockdata_url from restclients.dao_implementation.live import get_con_pool, get_live_url import logging from myuw_mobile.logger.logback import log_info class File(object): """ This implementation returns mock/static content. Use this DAO with this configuration: RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File' """ def getURL(self, url, headers): """ Return the url for accessing the mock data in local file :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ return get_mockdata_url("hfs", "file", url, headers, dir_base=dirname(__file__)) class Live(object): """ This DAO provides real data. Access is restricted to localhost. """ logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live') pool = None def getURL(self, url, headers): """ Return the absolute url for accessing live data :param url: in the format of "hfs/servlet/hfservices?sn=<student number>" """ host = 'http://localhost:80/' if Live.pool is None: Live.pool = get_con_pool(host, None, None, socket_timeout=5.0, max_pool_size=5) log_info(Live.logger, Live.pool) return get_live_url (Live.pool, 'GET', host, url, headers=headers)
0e99654d606038098d45fb83cc40405742e43ae8
readthedocs/builds/filters.py
readthedocs/builds/filters.py
from django.utils.translation import ugettext_lazy as _ import django_filters from builds import constants from builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__name', lookup_type="icontains") slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains') class Meta: model = Version fields = ['project', 'slug'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date") type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build fields = ['type', 'date', 'version', 'success']
from django.utils.translation import ugettext_lazy as _ import django_filters from builds import constants from builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__name', lookup_type="icontains") slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains') class Meta: model = Version fields = ['project', 'slug'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date") type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build fields = ['type', 'date', 'success']
Remove version from Build filter.
Remove version from Build filter.
Python
mit
agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,nyergler/pythonslides,Tazer/readthedocs.org,techtonik/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,GovReady/readthedocs.org,nikolas/readthedocs.org,gjtorikian/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,CedarLogic/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,hach-que/readthedocs.org,laplaceliu/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,kdkeyser/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,michaelmcandrew/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,emawind84/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,wijerasa/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,Tazer/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,nyergler/pythonslides,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,CedarLogic/readthedocs.org,safwanrahman/readthedocs.org,takluyver/readthedocs.org,royalwang/readthedocs.org,espdev/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,fujita-shintaro/readthedocs.org,kdkeyser/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,nyergler/pythonslides,titiushko/readthedocs.org,techtonik/readthedocs.org,wanghaven/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,sunnyzwh/readthedocs.org,mhils/readthedocs.org,d0ugal/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,mrshoki/readthedocs.org,raven47git/readthedocs.org,atsuyim/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,agjohnson/readthedocs.org,Carreau/readthedocs.org,tddv/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,nikolas/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,agjohnson/readthedocs.org,agjohnson/readthedocs.org,jerel/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,nikolas/readthedocs.org,emawind84/readthedocs.org,rtfd/readthedocs.org,wijerasa/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,sils1297/readthedocs.org,ojii/readthedocs.org,stevepiercy/readthedocs.org,LukasBoersma/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,wijerasa/readthedocs.org,CedarLogic/readthedocs.org,GovReady/readthedocs.org,singingwolfboy/readthedocs.org,kenwang76/readthedocs.org,mrshoki/readthedocs.org,istresearch/readthedocs.org,dirn/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,Tazer/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,mrshoki/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,LukasBoersma/readthedocs.org,singingwolfboy/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,safwanrahman/readthedocs.org,VishvajitP/readthedocs.org,wanghaven/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,gjtorikian/readthedocs.org,fujita-shintaro/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,SteveViss/readthedocs.org,mhils/readthedocs.org,Carreau/readthedocs.org,laplaceliu/readthedocs.org,KamranMackey/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org
from django.utils.translation import ugettext_lazy as _ import django_filters from builds import constants from builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__name', lookup_type="icontains") slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains') class Meta: model = Version fields = ['project', 'slug'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date") type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build - fields = ['type', 'date', 'version', 'success'] + fields = ['type', 'date', 'success'] -
Remove version from Build filter.
## Code Before: from django.utils.translation import ugettext_lazy as _ import django_filters from builds import constants from builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__name', lookup_type="icontains") slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains') class Meta: model = Version fields = ['project', 'slug'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date") type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build fields = ['type', 'date', 'version', 'success'] ## Instruction: Remove version from Build filter. ## Code After: from django.utils.translation import ugettext_lazy as _ import django_filters from builds import constants from builds.models import Build, Version ANY_REPO = ( ('', _('Any')), ) BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES class VersionFilter(django_filters.FilterSet): project = django_filters.CharFilter(name='project__name', lookup_type="icontains") slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains') class Meta: model = Version fields = ['project', 'slug'] class BuildFilter(django_filters.FilterSet): date = django_filters.DateRangeFilter(label=_("Build Date"), name="date") type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES) class Meta: model = Build fields = ['type', 'date', 'success']
0b7636422c632172dfc68ea2a5f21ec649248c8c
nimp/commands/vs_build.py
nimp/commands/vs_build.py
from nimp.commands._command import * from nimp.utilities.build import * #------------------------------------------------------------------------------- class VsBuildCommand(Command): def __init__(self): Command.__init__(self, 'vs-build', 'Builds a Visual Studio project') #--------------------------------------------------------------------------- def configure_arguments(self, env, parser): parser.add_argument('solution', help = 'Solution file', metavar = '<FILE>') parser.add_argument('project', help = 'Project', metavar = '<FILE>', default = 'None') parser.add_argument('--target', help = 'Target', metavar = '<TARGET>', default = 'Build') parser.add_argument('-c', '--configuration', help = 'configuration to build', metavar = '<configuration>', default = 'release') parser.add_argument('-p', '--platform', help = 'platform to build', metavar = '<platform>', default = 'Win64') parser.add_argument('--vs-version', help = 'VS version to use', metavar = '<VERSION>', default = '12') return True #--------------------------------------------------------------------------- def run(self, env): return vsbuild(env.solution, env.platform, env.configuration, env.project, env.vs_version, env.target)
from nimp.commands._command import * from nimp.utilities.build import * #------------------------------------------------------------------------------- class VsBuildCommand(Command): def __init__(self): Command.__init__(self, 'vs-build', 'Builds a Visual Studio project') #--------------------------------------------------------------------------- def configure_arguments(self, env, parser): parser.add_argument('solution', help = 'Solution file', metavar = '<FILE>') parser.add_argument('project', help = 'Project', metavar = '<FILE>', default = 'None') parser.add_argument('--target', help = 'Target', metavar = '<TARGET>', default = 'Build') parser.add_argument('-c', '--vs-configuration', help = 'configuration to build', metavar = '<vs-configuration>', default = 'release') parser.add_argument('-p', '--vs-platform', help = 'platform to build', metavar = '<vs-platform>', default = 'Win64') parser.add_argument('--vs-version', help = 'VS version to use', metavar = '<VERSION>', default = '12') return True #--------------------------------------------------------------------------- def run(self, env): return vsbuild(env.solution, env.vs_platform, env.vs_configuration, env.project, env.vs_version, env.target)
Use separate variable names for Visual Studio config/platform.
Use separate variable names for Visual Studio config/platform.
Python
mit
dontnod/nimp
from nimp.commands._command import * from nimp.utilities.build import * #------------------------------------------------------------------------------- class VsBuildCommand(Command): def __init__(self): Command.__init__(self, 'vs-build', 'Builds a Visual Studio project') #--------------------------------------------------------------------------- def configure_arguments(self, env, parser): parser.add_argument('solution', help = 'Solution file', metavar = '<FILE>') parser.add_argument('project', help = 'Project', metavar = '<FILE>', default = 'None') parser.add_argument('--target', help = 'Target', metavar = '<TARGET>', default = 'Build') parser.add_argument('-c', - '--configuration', + '--vs-configuration', help = 'configuration to build', - metavar = '<configuration>', + metavar = '<vs-configuration>', default = 'release') parser.add_argument('-p', - '--platform', + '--vs-platform', help = 'platform to build', - metavar = '<platform>', + metavar = '<vs-platform>', default = 'Win64') parser.add_argument('--vs-version', help = 'VS version to use', metavar = '<VERSION>', default = '12') return True #--------------------------------------------------------------------------- def run(self, env): - return vsbuild(env.solution, env.platform, env.configuration, env.project, env.vs_version, env.target) + return vsbuild(env.solution, env.vs_platform, env.vs_configuration, env.project, env.vs_version, env.target)
Use separate variable names for Visual Studio config/platform.
## Code Before: from nimp.commands._command import * from nimp.utilities.build import * #------------------------------------------------------------------------------- class VsBuildCommand(Command): def __init__(self): Command.__init__(self, 'vs-build', 'Builds a Visual Studio project') #--------------------------------------------------------------------------- def configure_arguments(self, env, parser): parser.add_argument('solution', help = 'Solution file', metavar = '<FILE>') parser.add_argument('project', help = 'Project', metavar = '<FILE>', default = 'None') parser.add_argument('--target', help = 'Target', metavar = '<TARGET>', default = 'Build') parser.add_argument('-c', '--configuration', help = 'configuration to build', metavar = '<configuration>', default = 'release') parser.add_argument('-p', '--platform', help = 'platform to build', metavar = '<platform>', default = 'Win64') parser.add_argument('--vs-version', help = 'VS version to use', metavar = '<VERSION>', default = '12') return True #--------------------------------------------------------------------------- def run(self, env): return vsbuild(env.solution, env.platform, env.configuration, env.project, env.vs_version, env.target) ## Instruction: Use separate variable names for Visual Studio config/platform. ## Code After: from nimp.commands._command import * from nimp.utilities.build import * #------------------------------------------------------------------------------- class VsBuildCommand(Command): def __init__(self): Command.__init__(self, 'vs-build', 'Builds a Visual Studio project') #--------------------------------------------------------------------------- def configure_arguments(self, env, parser): parser.add_argument('solution', help = 'Solution file', metavar = '<FILE>') parser.add_argument('project', help = 'Project', metavar = '<FILE>', default = 'None') parser.add_argument('--target', help = 'Target', metavar = '<TARGET>', default = 'Build') parser.add_argument('-c', '--vs-configuration', help = 'configuration to build', metavar = '<vs-configuration>', default = 'release') parser.add_argument('-p', '--vs-platform', help = 'platform to build', metavar = '<vs-platform>', default = 'Win64') parser.add_argument('--vs-version', help = 'VS version to use', metavar = '<VERSION>', default = '12') return True #--------------------------------------------------------------------------- def run(self, env): return vsbuild(env.solution, env.vs_platform, env.vs_configuration, env.project, env.vs_version, env.target)
84b01f0015163dc016293162f1525be76329e602
pythonforandroid/recipes/cryptography/__init__.py
pythonforandroid/recipes/cryptography/__init__.py
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_flags(arch) return env recipe = CryptographyRecipe()
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) env['LIBS'] = openssl_recipe.link_libs_flags() return env recipe = CryptographyRecipe()
Move libraries from LDFLAGS to LIBS for cryptography recipe
Move libraries from LDFLAGS to LIBS for cryptography recipe Because this is how you are supposed to do it, you must use LDFLAGS for linker flags and LDLIBS (or the equivalent LOADLIBES) for the libraries
Python
mit
kronenpj/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,germn/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,germn/python-for-android,kivy/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,kivy/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,germn/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,rnixx/python-for-android
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) - env['LDFLAGS'] += openssl_recipe.link_flags(arch) + env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) + env['LIBS'] = openssl_recipe.link_libs_flags() return env recipe = CryptographyRecipe()
Move libraries from LDFLAGS to LIBS for cryptography recipe
## Code Before: from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_flags(arch) return env recipe = CryptographyRecipe() ## Instruction: Move libraries from LDFLAGS to LIBS for cryptography recipe ## Code After: from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) env['LIBS'] = openssl_recipe.link_libs_flags() return env recipe = CryptographyRecipe()
72301067306d6baf4aab0315a769c75dd585b46d
pi_setup/boot_config.py
pi_setup/boot_config.py
from utils import file_templates from utils.validation import is_valid_gpu_mem def main(): gpu_mem = 0 while gpu_mem == 0: user_input = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") if is_valid_gpu_mem(user_input): gpu_mem = user_input else: print("Acceptable memory values are: 16/32/64/128/256") update_file('/boot/config.txt', gpu_mem) def update_file(path, gpu_mem): data = { 'gpu_mem': gpu_mem } template_name = path.split('/')[-1] new_file_data = file_templates.build(template_name, data) with open(path, 'w') as f: f.write(new_file_data) if __name__ == '__main__': main()
from utils import file_templates from utils.validation import is_valid_gpu_mem def main(): user_input = raw_input("Want to change the GPU memory split? (Y/N): ") if user_input == 'Y': gpu_mem = 0 while gpu_mem == 0: mem_split = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") if is_valid_gpu_mem(mem_split): gpu_mem = mem_split else: print("Acceptable memory values are: 16/32/64/128/256") update_file('/boot/config.txt', gpu_mem) else: print("Skipping GPU memory split...") def update_file(path, gpu_mem): data = { 'gpu_mem': gpu_mem } template_name = path.split('/')[-1] new_file_data = file_templates.build(template_name, data) with open(path, 'w') as f: f.write(new_file_data) if __name__ == '__main__': main()
Make GPU mem split optional
Make GPU mem split optional
Python
mit
projectweekend/Pi-Setup,projectweekend/Pi-Setup
from utils import file_templates from utils.validation import is_valid_gpu_mem def main(): + user_input = raw_input("Want to change the GPU memory split? (Y/N): ") + if user_input == 'Y': - gpu_mem = 0 + gpu_mem = 0 - while gpu_mem == 0: + while gpu_mem == 0: - user_input = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") + mem_split = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") - if is_valid_gpu_mem(user_input): + if is_valid_gpu_mem(mem_split): - gpu_mem = user_input + gpu_mem = mem_split - else: + else: - print("Acceptable memory values are: 16/32/64/128/256") + print("Acceptable memory values are: 16/32/64/128/256") - update_file('/boot/config.txt', gpu_mem) + update_file('/boot/config.txt', gpu_mem) + else: + print("Skipping GPU memory split...") def update_file(path, gpu_mem): data = { 'gpu_mem': gpu_mem } template_name = path.split('/')[-1] new_file_data = file_templates.build(template_name, data) with open(path, 'w') as f: f.write(new_file_data) if __name__ == '__main__': main()
Make GPU mem split optional
## Code Before: from utils import file_templates from utils.validation import is_valid_gpu_mem def main(): gpu_mem = 0 while gpu_mem == 0: user_input = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") if is_valid_gpu_mem(user_input): gpu_mem = user_input else: print("Acceptable memory values are: 16/32/64/128/256") update_file('/boot/config.txt', gpu_mem) def update_file(path, gpu_mem): data = { 'gpu_mem': gpu_mem } template_name = path.split('/')[-1] new_file_data = file_templates.build(template_name, data) with open(path, 'w') as f: f.write(new_file_data) if __name__ == '__main__': main() ## Instruction: Make GPU mem split optional ## Code After: from utils import file_templates from utils.validation import is_valid_gpu_mem def main(): user_input = raw_input("Want to change the GPU memory split? (Y/N): ") if user_input == 'Y': gpu_mem = 0 while gpu_mem == 0: mem_split = raw_input("Enter GPU memory in MB (16/32/64/128/256): ") if is_valid_gpu_mem(mem_split): gpu_mem = mem_split else: print("Acceptable memory values are: 16/32/64/128/256") update_file('/boot/config.txt', gpu_mem) else: print("Skipping GPU memory split...") def update_file(path, gpu_mem): data = { 'gpu_mem': gpu_mem } template_name = path.split('/')[-1] new_file_data = file_templates.build(template_name, data) with open(path, 'w') as f: f.write(new_file_data) if __name__ == '__main__': main()
2fe5f960f4998a0337bceabd7db930ac5d5a4fd1
qipipe/qiprofile/helpers.py
qipipe/qiprofile/helpers.py
import re from datetime import datetime TRAILING_NUM_REGEX = re.compile("(\d+)$") """A regular expression to extract the trailing number from a string.""" DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$") class DateError(Exception): pass def trailing_number(s): """ :param s: the input string :return: the trailing number in the string, or None if there is none """ match = TRAILING_NUM_REGEX.search(s) if match: return int(match.group(1)) def default_parser(attribute): """ Retuns the default parser, determined as follows: * If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser :param attribute: the row attribute :return: the function or lambda value parser, or None if none """ if attribute.endswith('date'): return _parse_date def _parse_date(s): """ :param s: the input date string :return: the parsed datetime :rtype: datetime """ match = DATE_REGEX.match(s) if not match: raise DateError("Date is not in a supported format: %s" % s) m, d, y = map(int, match.groups()[:3]) if y < 20: y += 2000 elif y < 100: y += 1900 return datetime(y, m, d)
import re from datetime import datetime TRAILING_NUM_REGEX = re.compile("(\d+)$") """A regular expression to extract the trailing number from a string.""" DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$") class DateError(Exception): pass def trailing_number(s): """ :param s: the input string :return: the trailing number in the string, or None if there is none """ match = TRAILING_NUM_REGEX.search(s) if match: return int(match.group(1)) def default_parser(attribute): """ Retuns the default parser, determined as follows: * If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser :param attribute: the row attribute :return: the value parser function, or None if none """ if attribute.endswith('date'): return _parse_date def _parse_date(s): """ :param s: the input date string :return: the parsed datetime :rtype: datetime """ match = DATE_REGEX.match(s) if not match: raise DateError("Date is not in a supported format: %s" % s) m, d, y = map(int, match.groups()[:3]) if y < 20: y += 2000 elif y < 100: y += 1900 return datetime(y, m, d)
Change lambda to function in doc.
Change lambda to function in doc.
Python
bsd-2-clause
ohsu-qin/qipipe
import re from datetime import datetime TRAILING_NUM_REGEX = re.compile("(\d+)$") """A regular expression to extract the trailing number from a string.""" DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$") class DateError(Exception): pass def trailing_number(s): """ :param s: the input string :return: the trailing number in the string, or None if there is none """ match = TRAILING_NUM_REGEX.search(s) if match: return int(match.group(1)) def default_parser(attribute): """ Retuns the default parser, determined as follows: * If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser :param attribute: the row attribute - :return: the function or lambda value parser, or None if none + :return: the value parser function, or None if none """ if attribute.endswith('date'): return _parse_date def _parse_date(s): """ :param s: the input date string :return: the parsed datetime :rtype: datetime """ match = DATE_REGEX.match(s) if not match: raise DateError("Date is not in a supported format: %s" % s) m, d, y = map(int, match.groups()[:3]) if y < 20: y += 2000 elif y < 100: y += 1900 return datetime(y, m, d)
Change lambda to function in doc.
## Code Before: import re from datetime import datetime TRAILING_NUM_REGEX = re.compile("(\d+)$") """A regular expression to extract the trailing number from a string.""" DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$") class DateError(Exception): pass def trailing_number(s): """ :param s: the input string :return: the trailing number in the string, or None if there is none """ match = TRAILING_NUM_REGEX.search(s) if match: return int(match.group(1)) def default_parser(attribute): """ Retuns the default parser, determined as follows: * If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser :param attribute: the row attribute :return: the function or lambda value parser, or None if none """ if attribute.endswith('date'): return _parse_date def _parse_date(s): """ :param s: the input date string :return: the parsed datetime :rtype: datetime """ match = DATE_REGEX.match(s) if not match: raise DateError("Date is not in a supported format: %s" % s) m, d, y = map(int, match.groups()[:3]) if y < 20: y += 2000 elif y < 100: y += 1900 return datetime(y, m, d) ## Instruction: Change lambda to function in doc. ## Code After: import re from datetime import datetime TRAILING_NUM_REGEX = re.compile("(\d+)$") """A regular expression to extract the trailing number from a string.""" DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$") class DateError(Exception): pass def trailing_number(s): """ :param s: the input string :return: the trailing number in the string, or None if there is none """ match = TRAILING_NUM_REGEX.search(s) if match: return int(match.group(1)) def default_parser(attribute): """ Retuns the default parser, determined as follows: * If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser :param attribute: the row attribute :return: the value parser function, or None if none """ if attribute.endswith('date'): return _parse_date def _parse_date(s): """ :param s: the input date string :return: the parsed datetime :rtype: datetime """ match = DATE_REGEX.match(s) if not match: raise DateError("Date is not in a supported format: %s" % s) m, d, y = map(int, match.groups()[:3]) if y < 20: y += 2000 elif y < 100: y += 1900 return datetime(y, m, d)
2f360d9986c13adaaf670b80b27dad995823b849
bandstructure/system/tightbindingsystem.py
bandstructure/system/tightbindingsystem.py
import numpy as np from .system import System class TightBindingSystem(System): def setDefaultParams(self): self.params.setdefault('t', 1) # nearest neighbor tunneling strength self.params.setdefault('t2', 0) # next-nearest neighbor .. def tunnelingRate(self, dr): t = self.get("t") t2 = self.get("t2") # Nearest neighbors: # Only with newest numpy version: # nn = np.linalg.norm(dr, axis=3) == 1 # TODO! get the real nearest neighbor distance # nnn = np.linalg.norm(dr, axis=3) == 2 # TODO! nn = np.sqrt(np.sum(dr ** 2, axis=3)) == 1 # TODO! get the real nearest neighbor distance nnn = np.sqrt(np.sum(dr ** 2, axis=3)) == 2 # TODO # Orbital matrix m = np.array([[1, 0], [0, -1]]) # m = np.array([-t]) return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
import numpy as np from .system import System class TightBindingSystem(System): def setDefaultParams(self): self.params.setdefault('t', 1) # nearest neighbor tunneling strength self.params.setdefault('t2', 0) # next-nearest neighbor .. def tunnelingRate(self, dr): t = self.get("t") t2 = self.get("t2") # Orbital matrix m = np.array([[1, 0], [0, -1]]) # m = np.array([-t]) nn = dr.getNeighborsMask(1) nnn = dr.getNeighborsMask(2) return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
Use new functions for getting (next) nearest neighbors
Use new functions for getting (next) nearest neighbors
Python
mit
sharkdp/bandstructure,sharkdp/bandstructure
import numpy as np from .system import System class TightBindingSystem(System): def setDefaultParams(self): self.params.setdefault('t', 1) # nearest neighbor tunneling strength self.params.setdefault('t2', 0) # next-nearest neighbor .. def tunnelingRate(self, dr): t = self.get("t") t2 = self.get("t2") - # Nearest neighbors: - - # Only with newest numpy version: - # nn = np.linalg.norm(dr, axis=3) == 1 # TODO! get the real nearest neighbor distance - # nnn = np.linalg.norm(dr, axis=3) == 2 # TODO! - - nn = np.sqrt(np.sum(dr ** 2, axis=3)) == 1 # TODO! get the real nearest neighbor distance - nnn = np.sqrt(np.sum(dr ** 2, axis=3)) == 2 # TODO - # Orbital matrix m = np.array([[1, 0], [0, -1]]) # m = np.array([-t]) + nn = dr.getNeighborsMask(1) + nnn = dr.getNeighborsMask(2) return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
Use new functions for getting (next) nearest neighbors
## Code Before: import numpy as np from .system import System class TightBindingSystem(System): def setDefaultParams(self): self.params.setdefault('t', 1) # nearest neighbor tunneling strength self.params.setdefault('t2', 0) # next-nearest neighbor .. def tunnelingRate(self, dr): t = self.get("t") t2 = self.get("t2") # Nearest neighbors: # Only with newest numpy version: # nn = np.linalg.norm(dr, axis=3) == 1 # TODO! get the real nearest neighbor distance # nnn = np.linalg.norm(dr, axis=3) == 2 # TODO! nn = np.sqrt(np.sum(dr ** 2, axis=3)) == 1 # TODO! get the real nearest neighbor distance nnn = np.sqrt(np.sum(dr ** 2, axis=3)) == 2 # TODO # Orbital matrix m = np.array([[1, 0], [0, -1]]) # m = np.array([-t]) return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None] ## Instruction: Use new functions for getting (next) nearest neighbors ## Code After: import numpy as np from .system import System class TightBindingSystem(System): def setDefaultParams(self): self.params.setdefault('t', 1) # nearest neighbor tunneling strength self.params.setdefault('t2', 0) # next-nearest neighbor .. def tunnelingRate(self, dr): t = self.get("t") t2 = self.get("t2") # Orbital matrix m = np.array([[1, 0], [0, -1]]) # m = np.array([-t]) nn = dr.getNeighborsMask(1) nnn = dr.getNeighborsMask(2) return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
611f95b0c72e436ebf056329349216625c61e133
wagtail/tests/testapp/migrations/0009_defaultstreampage.py
wagtail/tests/testapp/migrations/0009_defaultstreampage.py
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.blocks import wagtail.wagtailcore.fields import wagtail.wagtailimages.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0030_index_on_pagerevision_created_at'), ('tests', '0008_inlinestreampage_inlinestreampagesection'), ] operations = [ migrations.CreateModel( name='DefaultStreamPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.blocks import wagtail.wagtailcore.fields import wagtail.wagtailimages.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0029_unicode_slugfield_dj19'), ('tests', '0008_inlinestreampage_inlinestreampagesection'), ] operations = [ migrations.CreateModel( name='DefaultStreamPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
Adjust testapp migration dependency to be valid on 1.6.x
Adjust testapp migration dependency to be valid on 1.6.x
Python
bsd-3-clause
mixxorz/wagtail,nutztherookie/wagtail,rsalmaso/wagtail,torchbox/wagtail,chrxr/wagtail,gasman/wagtail,zerolab/wagtail,Toshakins/wagtail,takeflight/wagtail,FlipperPA/wagtail,iansprice/wagtail,takeflight/wagtail,wagtail/wagtail,nilnvoid/wagtail,nealtodd/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,chrxr/wagtail,takeflight/wagtail,wagtail/wagtail,iansprice/wagtail,iansprice/wagtail,thenewguy/wagtail,nimasmi/wagtail,gasman/wagtail,Toshakins/wagtail,torchbox/wagtail,nealtodd/wagtail,mixxorz/wagtail,wagtail/wagtail,mixxorz/wagtail,nimasmi/wagtail,jnns/wagtail,nutztherookie/wagtail,timorieber/wagtail,kaedroho/wagtail,rsalmaso/wagtail,kaedroho/wagtail,mixxorz/wagtail,chrxr/wagtail,FlipperPA/wagtail,nealtodd/wagtail,rsalmaso/wagtail,torchbox/wagtail,nilnvoid/wagtail,jnns/wagtail,nimasmi/wagtail,zerolab/wagtail,gasman/wagtail,mikedingjan/wagtail,Toshakins/wagtail,rsalmaso/wagtail,timorieber/wagtail,jnns/wagtail,nealtodd/wagtail,iansprice/wagtail,torchbox/wagtail,nimasmi/wagtail,mixxorz/wagtail,mikedingjan/wagtail,nutztherookie/wagtail,kaedroho/wagtail,Toshakins/wagtail,zerolab/wagtail,kaedroho/wagtail,thenewguy/wagtail,thenewguy/wagtail,timorieber/wagtail,chrxr/wagtail,mikedingjan/wagtail,gasman/wagtail,rsalmaso/wagtail,zerolab/wagtail,gasman/wagtail,nutztherookie/wagtail,wagtail/wagtail,zerolab/wagtail,kaedroho/wagtail,wagtail/wagtail,FlipperPA/wagtail,jnns/wagtail,nilnvoid/wagtail,mikedingjan/wagtail,timorieber/wagtail,thenewguy/wagtail,takeflight/wagtail,thenewguy/wagtail
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.blocks import wagtail.wagtailcore.fields import wagtail.wagtailimages.blocks class Migration(migrations.Migration): dependencies = [ - ('wagtailcore', '0030_index_on_pagerevision_created_at'), + ('wagtailcore', '0029_unicode_slugfield_dj19'), ('tests', '0008_inlinestreampage_inlinestreampagesection'), ] operations = [ migrations.CreateModel( name='DefaultStreamPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
Adjust testapp migration dependency to be valid on 1.6.x
## Code Before: from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.blocks import wagtail.wagtailcore.fields import wagtail.wagtailimages.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0030_index_on_pagerevision_created_at'), ('tests', '0008_inlinestreampage_inlinestreampagesection'), ] operations = [ migrations.CreateModel( name='DefaultStreamPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ] ## Instruction: Adjust testapp migration dependency to be valid on 1.6.x ## Code After: from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.blocks import wagtail.wagtailcore.fields import wagtail.wagtailimages.blocks class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0029_unicode_slugfield_dj19'), ('tests', '0008_inlinestreampage_inlinestreampagesection'), ] operations = [ migrations.CreateModel( name='DefaultStreamPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
c5946e378147f6d4d42c7a3e531388e6203f29e4
fantasyStocks/static/stockCleaner.py
fantasyStocks/static/stockCleaner.py
import json with open("stocks.json") as f:
from pprint import pprint import json import re REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: l = json.loads(f.read()) out = [] for i in l: if not "^" in i["symbol"]: out.append(i) with open("newStocks.json", "w") as w: w.write(json.dumps(out))
Write script to remove duplicates from stocks.json
Write script to remove duplicates from stocks.json
Python
apache-2.0
ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks
+ from pprint import pprint import json + import re + REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: - + l = json.loads(f.read()) + out = [] + for i in l: + if not "^" in i["symbol"]: + out.append(i) + with open("newStocks.json", "w") as w: + w.write(json.dumps(out))
Write script to remove duplicates from stocks.json
## Code Before: import json with open("stocks.json") as f: ## Instruction: Write script to remove duplicates from stocks.json ## Code After: from pprint import pprint import json import re REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: l = json.loads(f.read()) out = [] for i in l: if not "^" in i["symbol"]: out.append(i) with open("newStocks.json", "w") as w: w.write(json.dumps(out))
2c8b60569d20a350b33f3c5e8ba00bdc3d9bbee4
ask_sweden/lambda_function.py
ask_sweden/lambda_function.py
import logging logger = logging.getLogger() logger.setLevel(logging.INFO) from ask import alexa def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond('There were 42 accidents in 2016.') @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') return alexa.create_response(message='You can ask me about car accidents.') @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') return alexa.create_response(message="Goodbye!") @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') return alexa.create_response(message='ok', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') return alexa.create_response(message='You can ask me about car accidents.') @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') return alexa.create_response(message='ok', end_session=True)
import logging logger = logging.getLogger() logger.setLevel(logging.INFO) from ask import alexa def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond('There were 42 accidents in 2016.') @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') return alexa.respond('You can ask me about car accidents.') @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') return alexa.respond('Goodbye.') @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') return alexa.respond('Okay.', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') return alexa.respond('You can ask me about car accidents.') @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') return alexa.respond('Okay.', end_session=True)
Use respond instead of create_response
Use respond instead of create_response
Python
mit
geoaxis/ask-sweden,geoaxis/ask-sweden
import logging logger = logging.getLogger() logger.setLevel(logging.INFO) from ask import alexa def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond('There were 42 accidents in 2016.') @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') - return alexa.create_response(message='You can ask me about car accidents.') + return alexa.respond('You can ask me about car accidents.') @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') - return alexa.create_response(message="Goodbye!") + return alexa.respond('Goodbye.') @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') - return alexa.create_response(message='ok', end_session=True) + return alexa.respond('Okay.', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') - return alexa.create_response(message='You can ask me about car accidents.') + return alexa.respond('You can ask me about car accidents.') @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') - return alexa.create_response(message='ok', end_session=True) + return alexa.respond('Okay.', end_session=True)
Use respond instead of create_response
## Code Before: import logging logger = logging.getLogger() logger.setLevel(logging.INFO) from ask import alexa def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond('There were 42 accidents in 2016.') @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') return alexa.create_response(message='You can ask me about car accidents.') @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') return alexa.create_response(message="Goodbye!") @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') return alexa.create_response(message='ok', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') return alexa.create_response(message='You can ask me about car accidents.') @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') return alexa.create_response(message='ok', end_session=True) ## Instruction: Use respond instead of create_response ## Code After: import logging logger = logging.getLogger() logger.setLevel(logging.INFO) from ask import alexa def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond('There were 42 accidents in 2016.') @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') return alexa.respond('You can ask me about car accidents.') @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') return alexa.respond('Goodbye.') @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') return alexa.respond('Okay.', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') return alexa.respond('You can ask me about car accidents.') @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') return alexa.respond('Okay.', end_session=True)
aa82f91d220e8985c7f6dc68433ad65e70a71d15
froide/foirequest/tests/test_mail.py
froide/foirequest/tests/test_mail.py
from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(message[1].attachments), 1)
from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(messages[1].attachments), 1) self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
Test for attachment in mail test
Test for attachment in mail test
Python
mit
catcosmo/froide,okfse/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide,LilithWittmann/froide,LilithWittmann/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,LilithWittmann/froide,catcosmo/froide,catcosmo/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,stefanw/froide,okfse/froide,CodeforHawaii/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide
from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") - self.assertEqual(len(message[1].attachments), 1) + self.assertEqual(len(messages[1].attachments), 1) + self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
Test for attachment in mail test
## Code Before: from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(message[1].attachments), 1) ## Instruction: Test for attachment in mail test ## Code After: from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("s.wehrmeyer+axb4afh@fragdenstaat.de") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(messages[1].attachments), 1) self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
20f0d90f5c64322864ad5fda4b4c9314e6c1cb11
run.py
run.py
import sys from kitchen.text.converters import getwriter from utils.log import getLogger, open_log, close_log from utils.misc import output_exception from system.factory_manager import Manager sys.stdout = getwriter('utf-8')(sys.stdout) sys.stderr = getwriter('utf-8')(sys.stderr) open_log("output.log") logger = getLogger("System") logger.info("Starting up..") try: manager = Manager() except Exception: logger.critical("Runtime error - process cannot continue!") output_exception(logger) finally: close_log("output.log") try: raw_input("Press enter to exit.") except: pass
import os import sys from kitchen.text.converters import getwriter from utils.log import getLogger, open_log, close_log from utils.misc import output_exception from system.factory_manager import Manager sys.stdout = getwriter('utf-8')(sys.stdout) sys.stderr = getwriter('utf-8')(sys.stderr) if not os.path.exists("logs"): os.mkdir("logs") open_log("output.log") logger = getLogger("System") logger.info("Starting up..") try: manager = Manager() except Exception: logger.critical("Runtime error - process cannot continue!") output_exception(logger) finally: close_log("output.log") try: raw_input("Press enter to exit.") except: pass
Create logs folder if it doesn't exist (to prevent errors)
Create logs folder if it doesn't exist (to prevent errors)
Python
artistic-2.0
UltrosBot/Ultros,UltrosBot/Ultros
+ import os import sys from kitchen.text.converters import getwriter from utils.log import getLogger, open_log, close_log from utils.misc import output_exception from system.factory_manager import Manager sys.stdout = getwriter('utf-8')(sys.stdout) sys.stderr = getwriter('utf-8')(sys.stderr) + + if not os.path.exists("logs"): + os.mkdir("logs") open_log("output.log") logger = getLogger("System") logger.info("Starting up..") try: manager = Manager() except Exception: logger.critical("Runtime error - process cannot continue!") output_exception(logger) finally: close_log("output.log") try: raw_input("Press enter to exit.") except: pass
Create logs folder if it doesn't exist (to prevent errors)
## Code Before: import sys from kitchen.text.converters import getwriter from utils.log import getLogger, open_log, close_log from utils.misc import output_exception from system.factory_manager import Manager sys.stdout = getwriter('utf-8')(sys.stdout) sys.stderr = getwriter('utf-8')(sys.stderr) open_log("output.log") logger = getLogger("System") logger.info("Starting up..") try: manager = Manager() except Exception: logger.critical("Runtime error - process cannot continue!") output_exception(logger) finally: close_log("output.log") try: raw_input("Press enter to exit.") except: pass ## Instruction: Create logs folder if it doesn't exist (to prevent errors) ## Code After: import os import sys from kitchen.text.converters import getwriter from utils.log import getLogger, open_log, close_log from utils.misc import output_exception from system.factory_manager import Manager sys.stdout = getwriter('utf-8')(sys.stdout) sys.stderr = getwriter('utf-8')(sys.stderr) if not os.path.exists("logs"): os.mkdir("logs") open_log("output.log") logger = getLogger("System") logger.info("Starting up..") try: manager = Manager() except Exception: logger.critical("Runtime error - process cannot continue!") output_exception(logger) finally: close_log("output.log") try: raw_input("Press enter to exit.") except: pass
ba983dea1e20409d403a86d62c300ea3d257b58a
parserscripts/phage.py
parserscripts/phage.py
import re class Phage: supported_databases = { # European Nucleotide Archive phage database "ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$", # National Center for Biotechnology Information phage database "NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$", # Actinobacteriophage Database "AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$" } def __init__(self, raw_text, phage_finder): self.raw = raw_text.strip() self.refseq = None self.name = None self.db = None self._parse_phage(raw_text, phage_finder) def _parse_phage(self, raw_text, phage_finder): for db, regex in Phage.supported_databases.items(): match = re.search(regex, raw_text) if match is not None: if db is not "AD": self.name = match.group(2) self.refseq = match.group(1) else: short_name = match.group(1) cluster = match.group(2) self.name = "Mycobacteriophage " + short_name self.refseq = phage_finder.find_by_phage(short_name, cluster) self.db = db
import re class Phage: SUPPORTED_DATABASES = { # European Nucleotide Archive phage database "ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$", # National Center for Biotechnology Information phage database "NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$", # Actinobacteriophage Database "AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$" } def __init__(self, raw_text, phage_finder): self.raw = raw_text.strip() self.refseq = None self.name = None self.db = None self._parse_phage(raw_text, phage_finder) def _parse_phage(self, raw_text, phage_finder): for db, regex in Phage.SUPPORTED_DATABASES.items(): match = re.search(regex, raw_text) if match is not None: if db is not "AD": self.name = match.group(2) self.refseq = match.group(1) else: short_name = match.group(1) cluster = match.group(2) self.name = "Mycobacteriophage " + short_name self.refseq = phage_finder.find_by_phage(short_name, cluster) self.db = db
Rename to follow constant naming conventions
Rename to follow constant naming conventions
Python
mit
mbonsma/phageParser,mbonsma/phageParser,phageParser/phageParser,mbonsma/phageParser,phageParser/phageParser,goyalsid/phageParser,goyalsid/phageParser,phageParser/phageParser,phageParser/phageParser,mbonsma/phageParser,goyalsid/phageParser
import re class Phage: - supported_databases = { + SUPPORTED_DATABASES = { # European Nucleotide Archive phage database "ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$", # National Center for Biotechnology Information phage database "NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$", # Actinobacteriophage Database "AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$" } def __init__(self, raw_text, phage_finder): self.raw = raw_text.strip() self.refseq = None self.name = None self.db = None self._parse_phage(raw_text, phage_finder) def _parse_phage(self, raw_text, phage_finder): - for db, regex in Phage.supported_databases.items(): + for db, regex in Phage.SUPPORTED_DATABASES.items(): match = re.search(regex, raw_text) if match is not None: if db is not "AD": self.name = match.group(2) self.refseq = match.group(1) else: short_name = match.group(1) cluster = match.group(2) self.name = "Mycobacteriophage " + short_name self.refseq = phage_finder.find_by_phage(short_name, cluster) self.db = db
Rename to follow constant naming conventions
## Code Before: import re class Phage: supported_databases = { # European Nucleotide Archive phage database "ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$", # National Center for Biotechnology Information phage database "NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$", # Actinobacteriophage Database "AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$" } def __init__(self, raw_text, phage_finder): self.raw = raw_text.strip() self.refseq = None self.name = None self.db = None self._parse_phage(raw_text, phage_finder) def _parse_phage(self, raw_text, phage_finder): for db, regex in Phage.supported_databases.items(): match = re.search(regex, raw_text) if match is not None: if db is not "AD": self.name = match.group(2) self.refseq = match.group(1) else: short_name = match.group(1) cluster = match.group(2) self.name = "Mycobacteriophage " + short_name self.refseq = phage_finder.find_by_phage(short_name, cluster) self.db = db ## Instruction: Rename to follow constant naming conventions ## Code After: import re class Phage: SUPPORTED_DATABASES = { # European Nucleotide Archive phage database "ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$", # National Center for Biotechnology Information phage database "NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$", # Actinobacteriophage Database "AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$" } def __init__(self, raw_text, phage_finder): self.raw = raw_text.strip() self.refseq = None self.name = None self.db = None self._parse_phage(raw_text, phage_finder) def _parse_phage(self, raw_text, phage_finder): for db, regex in Phage.SUPPORTED_DATABASES.items(): match = re.search(regex, raw_text) if match is not None: if db is not "AD": self.name = match.group(2) self.refseq = match.group(1) else: short_name = match.group(1) cluster = match.group(2) self.name = "Mycobacteriophage " + short_name self.refseq = phage_finder.find_by_phage(short_name, cluster) self.db = db
8c05cb85c47db892dd13abbd91b3948c09b9a954
statsmodels/tools/__init__.py
statsmodels/tools/__init__.py
from tools import add_constant, categorical from datautils import Dataset from statsmodels import NoseWrapper as Tester test = Tester().test
from tools import add_constant, categorical from statsmodels import NoseWrapper as Tester test = Tester().test
Remove import of moved file
REF: Remove import of moved file
Python
bsd-3-clause
josef-pkt/statsmodels,adammenges/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,kiyoto/statsmodels,astocko/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,bzero/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,astocko/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,gef756/statsmodels,edhuckle/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,hainm/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,detrout/debian-statsmodels,yarikoptic/pystatsmodels,bavardage/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,statsmodels/statsmodels,nvoron23/statsmodels,adammenges/statsmodels,wwf5067/statsmodels,yl565/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,alekz112/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,bavardage/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,bsipocz/statsmodels,waynenilsen/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,bashtage/statsmodels,Averroes/statsmodels,adammenges/statsmodels,hlin117/statsmodels,wwf5067/statsmodels,gef756/statsmodels,cbmoore/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,alekz112/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,wdurhamh/statsmodels,musically-ut/statsmodels,kiyoto/statsmodels,phobson/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,kiyoto/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,hainm/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,jstoxrocky/statsmodels,bzero/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,huongttlan/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,astocko/statsmodels,wzbozon/statsmodels,bzero/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,yl565/statsmodels,bashtage/statsmodels,jseabold/statsmodels,kiyoto/statsmodels,phobson/statsmodels,bzero/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,DonBeo/statsmodels,yarikoptic/pystatsmodels,nguyentu1602/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,adammenges/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jstoxrocky/statsmodels,yl565/statsmodels,bashtage/statsmodels,astocko/statsmodels,nguyentu1602/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,wzbozon/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,statsmodels/statsmodels,hainm/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,detrout/debian-statsmodels,phobson/statsmodels,wdurhamh/statsmodels
from tools import add_constant, categorical - from datautils import Dataset from statsmodels import NoseWrapper as Tester test = Tester().test
Remove import of moved file
## Code Before: from tools import add_constant, categorical from datautils import Dataset from statsmodels import NoseWrapper as Tester test = Tester().test ## Instruction: Remove import of moved file ## Code After: from tools import add_constant, categorical from statsmodels import NoseWrapper as Tester test = Tester().test
b090c7ae0f5407562e3adc818d2f65ccd4ea7e02
src/arc_utilities/listener.py
src/arc_utilities/listener.py
from copy import deepcopy from threading import Lock import rospy from arc_utilities.ros_helpers import wait_for class Listener: def __init__(self, topic_name, topic_type, wait_for_data=False): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic wait_for_data (bool): block constructor until a message has been received """ self.data = None self.lock = Lock() self.topic_name = topic_name self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) self.get(wait_for_data) def callback(self, msg): with self.lock: self.data = msg def get(self, block_until_data=True): """ Returns the latest msg from the subscribed topic Parameters: block_until_data (bool): block if no message has been received yet. Guarantees a msg is returned (not None) """ wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})") with self.lock: return deepcopy(self.data)
from copy import deepcopy from threading import Lock import rospy from arc_utilities.ros_helpers import wait_for class Listener: def __init__(self, topic_name, topic_type, wait_for_data=False, callback=None): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic wait_for_data (bool): block constructor until a message has been received callback (function taking msg_type): optional callback to be called on the data as we receive it """ self.data = None self.lock = Lock() self.topic_name = topic_name self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) self.custom_callback = callback self.get(wait_for_data) def callback(self, msg): with self.lock: self.data = msg if self.custom_callback is not None: self.custom_callback(self.data) def get(self, block_until_data=True): """ Returns the latest msg from the subscribed topic Parameters: block_until_data (bool): block if no message has been received yet. Guarantees a msg is returned (not None) """ wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})") with self.lock: return deepcopy(self.data)
Allow optional callbacks for Listeners
Allow optional callbacks for Listeners
Python
bsd-2-clause
WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities,WPI-ARC/arc_utilities
from copy import deepcopy from threading import Lock import rospy from arc_utilities.ros_helpers import wait_for class Listener: - def __init__(self, topic_name, topic_type, wait_for_data=False): + def __init__(self, topic_name, topic_type, wait_for_data=False, callback=None): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic wait_for_data (bool): block constructor until a message has been received + callback (function taking msg_type): optional callback to be called on the data as we receive it """ self.data = None self.lock = Lock() self.topic_name = topic_name self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) + self.custom_callback = callback self.get(wait_for_data) def callback(self, msg): with self.lock: self.data = msg + if self.custom_callback is not None: + self.custom_callback(self.data) def get(self, block_until_data=True): """ Returns the latest msg from the subscribed topic Parameters: block_until_data (bool): block if no message has been received yet. Guarantees a msg is returned (not None) """ wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})") with self.lock: return deepcopy(self.data)
Allow optional callbacks for Listeners
## Code Before: from copy import deepcopy from threading import Lock import rospy from arc_utilities.ros_helpers import wait_for class Listener: def __init__(self, topic_name, topic_type, wait_for_data=False): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic wait_for_data (bool): block constructor until a message has been received """ self.data = None self.lock = Lock() self.topic_name = topic_name self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) self.get(wait_for_data) def callback(self, msg): with self.lock: self.data = msg def get(self, block_until_data=True): """ Returns the latest msg from the subscribed topic Parameters: block_until_data (bool): block if no message has been received yet. Guarantees a msg is returned (not None) """ wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})") with self.lock: return deepcopy(self.data) ## Instruction: Allow optional callbacks for Listeners ## Code After: from copy import deepcopy from threading import Lock import rospy from arc_utilities.ros_helpers import wait_for class Listener: def __init__(self, topic_name, topic_type, wait_for_data=False, callback=None): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic wait_for_data (bool): block constructor until a message has been received callback (function taking msg_type): optional callback to be called on the data as we receive it """ self.data = None self.lock = Lock() self.topic_name = topic_name self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) self.custom_callback = callback self.get(wait_for_data) def callback(self, msg): with self.lock: self.data = msg if self.custom_callback is not None: self.custom_callback(self.data) def get(self, block_until_data=True): """ Returns the latest msg from the subscribed topic Parameters: block_until_data (bool): block if no message has been received yet. Guarantees a msg is returned (not None) """ wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})") with self.lock: return deepcopy(self.data)
7fc62edee40ecedc49b0529e17ac04e4d7bf6865
door/models.py
door/models.py
from django.db import models from django.utils import timezone class DoorStatus(models.Model): datetime = models.DateTimeField() status = models.BooleanField(default=False) name = models.CharField(max_length=20) def __str__(self): return self.name @staticmethod def get_door_by_name(name): # Creates the object if it does not exist try: door = DoorStatus.objects.get(name=name) return door except DoorStatus.DoesNotExist: door = DoorStatus.objects.create(name=name, datetime=timezone.now()) return door class OpenData(models.Model): opened = models.DateTimeField() closed = models.DateTimeField() def __str__(self): return str(self.opened)
from django.db import models from django.utils import timezone class DoorStatus(models.Model): datetime = models.DateTimeField() status = models.BooleanField(default=False) name = models.CharField(max_length=20) def __str__(self): return self.name @staticmethod def get_door_by_name(name): # Creates the object if it does not exist try: door = DoorStatus.objects.get(name=name) return door except DoorStatus.DoesNotExist: door = DoorStatus.objects.create(name=name, datetime=timezone.now()) return door class Meta: verbose_name_plural = "Door Statuses" class OpenData(models.Model): opened = models.DateTimeField() closed = models.DateTimeField() def __str__(self): return str(self.opened)
Change plural name of DoorStatus model
Change plural name of DoorStatus model
Python
mit
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
from django.db import models from django.utils import timezone class DoorStatus(models.Model): datetime = models.DateTimeField() status = models.BooleanField(default=False) name = models.CharField(max_length=20) def __str__(self): return self.name @staticmethod def get_door_by_name(name): # Creates the object if it does not exist try: door = DoorStatus.objects.get(name=name) return door except DoorStatus.DoesNotExist: door = DoorStatus.objects.create(name=name, datetime=timezone.now()) return door + class Meta: + verbose_name_plural = "Door Statuses" + class OpenData(models.Model): opened = models.DateTimeField() closed = models.DateTimeField() def __str__(self): return str(self.opened)
Change plural name of DoorStatus model
## Code Before: from django.db import models from django.utils import timezone class DoorStatus(models.Model): datetime = models.DateTimeField() status = models.BooleanField(default=False) name = models.CharField(max_length=20) def __str__(self): return self.name @staticmethod def get_door_by_name(name): # Creates the object if it does not exist try: door = DoorStatus.objects.get(name=name) return door except DoorStatus.DoesNotExist: door = DoorStatus.objects.create(name=name, datetime=timezone.now()) return door class OpenData(models.Model): opened = models.DateTimeField() closed = models.DateTimeField() def __str__(self): return str(self.opened) ## Instruction: Change plural name of DoorStatus model ## Code After: from django.db import models from django.utils import timezone class DoorStatus(models.Model): datetime = models.DateTimeField() status = models.BooleanField(default=False) name = models.CharField(max_length=20) def __str__(self): return self.name @staticmethod def get_door_by_name(name): # Creates the object if it does not exist try: door = DoorStatus.objects.get(name=name) return door except DoorStatus.DoesNotExist: door = DoorStatus.objects.create(name=name, datetime=timezone.now()) return door class Meta: verbose_name_plural = "Door Statuses" class OpenData(models.Model): opened = models.DateTimeField() closed = models.DateTimeField() def __str__(self): return str(self.opened)
fae3e55b1c472cd314676431a34fe6e160418626
tests/test_command_line.py
tests/test_command_line.py
import os import subprocess class TestCommandLine(object): def setup(self): """Set up the environment by moving to the demos directory.""" os.chdir("demos") def teardown(self): os.chdir("..") def add(self, *args): self.db.add_all(args) self.db.commit() def test_dallinger_help(self): output = subprocess.check_output("dallinger", shell=True) assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output)
import os import subprocess from dallinger.command_line import heroku_id class TestCommandLine(object): def setup(self): """Set up the environment by moving to the demos directory.""" os.chdir("demos") def teardown(self): os.chdir("..") def add(self, *args): self.db.add_all(args) self.db.commit() def test_dallinger_help(self): output = subprocess.check_output("dallinger", shell=True) assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output) def test_heroku_app_id(self): id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0" assert(len(heroku_id(id)) < 30)
Test for Heroku app name length
Test for Heroku app name length
Python
mit
jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger
import os import subprocess + from dallinger.command_line import heroku_id class TestCommandLine(object): def setup(self): """Set up the environment by moving to the demos directory.""" os.chdir("demos") def teardown(self): os.chdir("..") def add(self, *args): self.db.add_all(args) self.db.commit() def test_dallinger_help(self): output = subprocess.check_output("dallinger", shell=True) assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output) + def test_heroku_app_id(self): + id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0" + assert(len(heroku_id(id)) < 30) +
Test for Heroku app name length
## Code Before: import os import subprocess class TestCommandLine(object): def setup(self): """Set up the environment by moving to the demos directory.""" os.chdir("demos") def teardown(self): os.chdir("..") def add(self, *args): self.db.add_all(args) self.db.commit() def test_dallinger_help(self): output = subprocess.check_output("dallinger", shell=True) assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output) ## Instruction: Test for Heroku app name length ## Code After: import os import subprocess from dallinger.command_line import heroku_id class TestCommandLine(object): def setup(self): """Set up the environment by moving to the demos directory.""" os.chdir("demos") def teardown(self): os.chdir("..") def add(self, *args): self.db.add_all(args) self.db.commit() def test_dallinger_help(self): output = subprocess.check_output("dallinger", shell=True) assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output) def test_heroku_app_id(self): id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0" assert(len(heroku_id(id)) < 30)
ccc6c983411f951ef3906d55d6a0946c7ef93c75
app/brief_utils.py
app/brief_utils.py
from flask import abort from .models import Service from .validation import get_validation_errors from .service_utils import filter_services def validate_brief_data(brief, enforce_required=True, required_fields=None): errs = get_validation_errors( 'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug), brief.data, enforce_required=enforce_required, required_fields=required_fields ) if errs: abort(400, errs) def is_supplier_eligible_for_brief(supplier, brief): services = filter_services( framework_slugs=[brief.framework.slug], statuses=["published"], lot_slug=brief.lot.slug, location=brief.data["location"], role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None ) services = services.filter(Service.supplier_id == supplier.supplier_id) return services.count() > 0
from flask import abort from .models import Service from .validation import get_validation_errors from .service_utils import filter_services def validate_brief_data(brief, enforce_required=True, required_fields=None): errs = get_validation_errors( 'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug), brief.data, enforce_required=enforce_required, required_fields=required_fields ) criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting'] # Only check total if all weightings are set if all(key in brief.data for key in criteria_weighting_keys): criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys) if criteria_weightings != 100: for key in set(criteria_weighting_keys) - set(errs): errs[key] = 'total_should_be_100' if errs: abort(400, errs) def is_supplier_eligible_for_brief(supplier, brief): services = filter_services( framework_slugs=[brief.framework.slug], statuses=["published"], lot_slug=brief.lot.slug, location=brief.data["location"], role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None ) services = services.filter(Service.supplier_id == supplier.supplier_id) return services.count() > 0
Add criteria weighting 100% total validation
Add criteria weighting 100% total validation Checks the criteria weighting sum if all criteria fields are set. This relies on all three fields being required. If the fields don't add up to a 100 an error is added for each field that doesn't have any other validation errors.
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
from flask import abort from .models import Service from .validation import get_validation_errors from .service_utils import filter_services def validate_brief_data(brief, enforce_required=True, required_fields=None): errs = get_validation_errors( 'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug), brief.data, enforce_required=enforce_required, required_fields=required_fields ) + + criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting'] + # Only check total if all weightings are set + if all(key in brief.data for key in criteria_weighting_keys): + criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys) + if criteria_weightings != 100: + for key in set(criteria_weighting_keys) - set(errs): + errs[key] = 'total_should_be_100' if errs: abort(400, errs) def is_supplier_eligible_for_brief(supplier, brief): services = filter_services( framework_slugs=[brief.framework.slug], statuses=["published"], lot_slug=brief.lot.slug, location=brief.data["location"], role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None ) services = services.filter(Service.supplier_id == supplier.supplier_id) return services.count() > 0
Add criteria weighting 100% total validation
## Code Before: from flask import abort from .models import Service from .validation import get_validation_errors from .service_utils import filter_services def validate_brief_data(brief, enforce_required=True, required_fields=None): errs = get_validation_errors( 'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug), brief.data, enforce_required=enforce_required, required_fields=required_fields ) if errs: abort(400, errs) def is_supplier_eligible_for_brief(supplier, brief): services = filter_services( framework_slugs=[brief.framework.slug], statuses=["published"], lot_slug=brief.lot.slug, location=brief.data["location"], role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None ) services = services.filter(Service.supplier_id == supplier.supplier_id) return services.count() > 0 ## Instruction: Add criteria weighting 100% total validation ## Code After: from flask import abort from .models import Service from .validation import get_validation_errors from .service_utils import filter_services def validate_brief_data(brief, enforce_required=True, required_fields=None): errs = get_validation_errors( 'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug), brief.data, enforce_required=enforce_required, required_fields=required_fields ) criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting'] # Only check total if all weightings are set if all(key in brief.data for key in criteria_weighting_keys): criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys) if criteria_weightings != 100: for key in set(criteria_weighting_keys) - set(errs): errs[key] = 'total_should_be_100' if errs: abort(400, errs) def is_supplier_eligible_for_brief(supplier, brief): services = filter_services( framework_slugs=[brief.framework.slug], statuses=["published"], lot_slug=brief.lot.slug, location=brief.data["location"], role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None ) services = services.filter(Service.supplier_id == supplier.supplier_id) return services.count() > 0
e378902b85bf865e0b020bd4afe0e12d593a95a8
github-keys-check.py
github-keys-check.py
import urllib.request import argparse import pwd import sys def key_for_user(user): url = 'https://github.com/%s.keys' % user with urllib.request.urlopen(url) as f: return f.read().decode('utf-8') def validate_user(username, min_uid): """ Validates that a given username is: 1. A valid, existing user 2. Has uid > min_uid """ user = pwd.getpwnam(username) return user.pw_uid > min_uid if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('username') parser.add_argument( '--min-uid', type=int, default=999, help='uid must be > this to be allowed ssh access. \ Helps keep system users non-sshable' ) args = parser.parse_args() if validate_user(args.username, args.min_uid): print(key_for_user(args.username)) else: print("Not a valid user") sys.exit(1)
import urllib.request import argparse import pwd import grp import sys def key_for_user(user): url = 'https://github.com/%s.keys' % user with urllib.request.urlopen(url) as f: return f.read().decode('utf-8') def validate_user(username, min_uid, in_group): """ Validates that a given username is: 1. A valid, existing user 2. Is a member of the group in_group 3. Has uid > min_uid """ user = pwd.getpwnam(username) if in_group is None or username in grp.getgrnam(in_group).gr_mem: return user.pw_uid > min_uid if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('username') parser.add_argument( '--min-uid', type=int, default=999, help='uid must be > this to be allowed ssh access. \ Helps keep system users non-sshable' ) parser.add_argument( '--in-group', default=None, help='Only users in this group can login via github keys' ) args = parser.parse_args() if validate_user(args.username, args.min_uid, args.in_group): print(key_for_user(args.username)) else: print("Not a valid user") sys.exit(1)
Add --in-group parameter to validate users
Add --in-group parameter to validate users Allows github login only for users in a certain group. This can be used to whitelist users who are allowed to ssh in
Python
apache-2.0
yuvipanda/github-ssh-auth
import urllib.request import argparse import pwd + import grp import sys def key_for_user(user): url = 'https://github.com/%s.keys' % user with urllib.request.urlopen(url) as f: return f.read().decode('utf-8') - def validate_user(username, min_uid): + def validate_user(username, min_uid, in_group): """ Validates that a given username is: 1. A valid, existing user + 2. Is a member of the group in_group - 2. Has uid > min_uid + 3. Has uid > min_uid """ user = pwd.getpwnam(username) + if in_group is None or username in grp.getgrnam(in_group).gr_mem: - return user.pw_uid > min_uid + return user.pw_uid > min_uid if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('username') parser.add_argument( '--min-uid', type=int, default=999, help='uid must be > this to be allowed ssh access. \ Helps keep system users non-sshable' ) + parser.add_argument( + '--in-group', default=None, + help='Only users in this group can login via github keys' + ) args = parser.parse_args() - if validate_user(args.username, args.min_uid): + if validate_user(args.username, args.min_uid, args.in_group): print(key_for_user(args.username)) else: print("Not a valid user") sys.exit(1)
Add --in-group parameter to validate users
## Code Before: import urllib.request import argparse import pwd import sys def key_for_user(user): url = 'https://github.com/%s.keys' % user with urllib.request.urlopen(url) as f: return f.read().decode('utf-8') def validate_user(username, min_uid): """ Validates that a given username is: 1. A valid, existing user 2. Has uid > min_uid """ user = pwd.getpwnam(username) return user.pw_uid > min_uid if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('username') parser.add_argument( '--min-uid', type=int, default=999, help='uid must be > this to be allowed ssh access. \ Helps keep system users non-sshable' ) args = parser.parse_args() if validate_user(args.username, args.min_uid): print(key_for_user(args.username)) else: print("Not a valid user") sys.exit(1) ## Instruction: Add --in-group parameter to validate users ## Code After: import urllib.request import argparse import pwd import grp import sys def key_for_user(user): url = 'https://github.com/%s.keys' % user with urllib.request.urlopen(url) as f: return f.read().decode('utf-8') def validate_user(username, min_uid, in_group): """ Validates that a given username is: 1. A valid, existing user 2. Is a member of the group in_group 3. Has uid > min_uid """ user = pwd.getpwnam(username) if in_group is None or username in grp.getgrnam(in_group).gr_mem: return user.pw_uid > min_uid if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('username') parser.add_argument( '--min-uid', type=int, default=999, help='uid must be > this to be allowed ssh access. \ Helps keep system users non-sshable' ) parser.add_argument( '--in-group', default=None, help='Only users in this group can login via github keys' ) args = parser.parse_args() if validate_user(args.username, args.min_uid, args.in_group): print(key_for_user(args.username)) else: print("Not a valid user") sys.exit(1)
d5a2a11d23b9f5393b0b39ca2f90978276311f52
app/slot/routes.py
app/slot/routes.py
from app import app from app.slot import controller as con import config from auth import requires_auth from flask import render_template from flask.ext.login import login_required @app.route('/dashboard') # @requires_auth @login_required def index(): return con.index() @app.route('/new', methods=['GET', 'POST']) @requires_auth def render_new_procedure_form(): return con.render_new_procedure_form() @app.route('/sms', methods=['POST']) @requires_auth def receive_sms(): return con.receive_sms()
from app import app from app.slot import controller as con import config from auth import requires_auth from flask import render_template from flask.ext.login import login_required @app.route('/') @app.route('/dashboard') @login_required def index(): return con.index() @app.route('/new', methods=['GET', 'POST']) @requires_auth def render_new_procedure_form(): return con.render_new_procedure_form() @app.route('/sms', methods=['POST']) @requires_auth def receive_sms(): return con.receive_sms()
Add / route to index. Remove old requires_auth decorator.
Add / route to index. Remove old requires_auth decorator.
Python
mit
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
from app import app from app.slot import controller as con import config from auth import requires_auth from flask import render_template from flask.ext.login import login_required + @app.route('/') @app.route('/dashboard') - # @requires_auth @login_required def index(): return con.index() @app.route('/new', methods=['GET', 'POST']) @requires_auth def render_new_procedure_form(): return con.render_new_procedure_form() @app.route('/sms', methods=['POST']) @requires_auth def receive_sms(): return con.receive_sms()
Add / route to index. Remove old requires_auth decorator.
## Code Before: from app import app from app.slot import controller as con import config from auth import requires_auth from flask import render_template from flask.ext.login import login_required @app.route('/dashboard') # @requires_auth @login_required def index(): return con.index() @app.route('/new', methods=['GET', 'POST']) @requires_auth def render_new_procedure_form(): return con.render_new_procedure_form() @app.route('/sms', methods=['POST']) @requires_auth def receive_sms(): return con.receive_sms() ## Instruction: Add / route to index. Remove old requires_auth decorator. ## Code After: from app import app from app.slot import controller as con import config from auth import requires_auth from flask import render_template from flask.ext.login import login_required @app.route('/') @app.route('/dashboard') @login_required def index(): return con.index() @app.route('/new', methods=['GET', 'POST']) @requires_auth def render_new_procedure_form(): return con.render_new_procedure_form() @app.route('/sms', methods=['POST']) @requires_auth def receive_sms(): return con.receive_sms()
ecc816295154a3756e87349b4cff397ebd17b95f
sipa/base.py
sipa/base.py
from flask import request, session from flask_login import AnonymousUserMixin, LoginManager from werkzeug.routing import IntegerConverter as BaseIntegerConverter from sipa.model import backends login_manager = LoginManager() class IntegerConverter(BaseIntegerConverter): """IntegerConverter supporting negative values This is a Modification of the standard IntegerConverter which does not support negative values. See the corresponding `werkzeug documentation <http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_. """ regex = r'-?\d+' @login_manager.user_loader def load_user(username): """Loads a User object from/into the session at every request """ if request.blueprint == "documents" or request.endpoint == "static": return AnonymousUserMixin() dormitory = backends.get_dormitory(session.get('dormitory', None)) if dormitory: return dormitory.datasource.user_class.get(username) else: return AnonymousUserMixin()
from flask import request, session from flask_login import AnonymousUserMixin, LoginManager from flask_babel import gettext from werkzeug.routing import IntegerConverter as BaseIntegerConverter from sipa.model import backends login_manager = LoginManager() login_manager.login_view = "generic.login" login_manager.localize_callback = gettext login_manager.login_message = "Bitte melde Dich an, um die Seite zu sehen." class IntegerConverter(BaseIntegerConverter): """IntegerConverter supporting negative values This is a Modification of the standard IntegerConverter which does not support negative values. See the corresponding `werkzeug documentation <http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_. """ regex = r'-?\d+' @login_manager.user_loader def load_user(username): """Loads a User object from/into the session at every request """ if request.blueprint == "documents" or request.endpoint == "static": return AnonymousUserMixin() dormitory = backends.get_dormitory(session.get('dormitory', None)) if dormitory: return dormitory.datasource.user_class.get(username) else: return AnonymousUserMixin()
Set up flask to handle login redirects.
Set up flask to handle login redirects. Fix #147.
Python
mit
lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa
from flask import request, session from flask_login import AnonymousUserMixin, LoginManager + from flask_babel import gettext from werkzeug.routing import IntegerConverter as BaseIntegerConverter from sipa.model import backends login_manager = LoginManager() + login_manager.login_view = "generic.login" + login_manager.localize_callback = gettext + login_manager.login_message = "Bitte melde Dich an, um die Seite zu sehen." class IntegerConverter(BaseIntegerConverter): """IntegerConverter supporting negative values This is a Modification of the standard IntegerConverter which does not support negative values. See the corresponding `werkzeug documentation <http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_. """ regex = r'-?\d+' @login_manager.user_loader def load_user(username): """Loads a User object from/into the session at every request """ if request.blueprint == "documents" or request.endpoint == "static": return AnonymousUserMixin() dormitory = backends.get_dormitory(session.get('dormitory', None)) if dormitory: return dormitory.datasource.user_class.get(username) else: return AnonymousUserMixin()
Set up flask to handle login redirects.
## Code Before: from flask import request, session from flask_login import AnonymousUserMixin, LoginManager from werkzeug.routing import IntegerConverter as BaseIntegerConverter from sipa.model import backends login_manager = LoginManager() class IntegerConverter(BaseIntegerConverter): """IntegerConverter supporting negative values This is a Modification of the standard IntegerConverter which does not support negative values. See the corresponding `werkzeug documentation <http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_. """ regex = r'-?\d+' @login_manager.user_loader def load_user(username): """Loads a User object from/into the session at every request """ if request.blueprint == "documents" or request.endpoint == "static": return AnonymousUserMixin() dormitory = backends.get_dormitory(session.get('dormitory', None)) if dormitory: return dormitory.datasource.user_class.get(username) else: return AnonymousUserMixin() ## Instruction: Set up flask to handle login redirects. ## Code After: from flask import request, session from flask_login import AnonymousUserMixin, LoginManager from flask_babel import gettext from werkzeug.routing import IntegerConverter as BaseIntegerConverter from sipa.model import backends login_manager = LoginManager() login_manager.login_view = "generic.login" login_manager.localize_callback = gettext login_manager.login_message = "Bitte melde Dich an, um die Seite zu sehen." class IntegerConverter(BaseIntegerConverter): """IntegerConverter supporting negative values This is a Modification of the standard IntegerConverter which does not support negative values. See the corresponding `werkzeug documentation <http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_. """ regex = r'-?\d+' @login_manager.user_loader def load_user(username): """Loads a User object from/into the session at every request """ if request.blueprint == "documents" or request.endpoint == "static": return AnonymousUserMixin() dormitory = backends.get_dormitory(session.get('dormitory', None)) if dormitory: return dormitory.datasource.user_class.get(username) else: return AnonymousUserMixin()
b9ac30b0e428038986de64e069954ee340b991a9
integration/group.py
integration/group.py
from spec import Spec, eq_ from fabric import ThreadingGroup as Group class Group_(Spec): def simple_command_on_multiple_hosts(self): """ Run command on localhost...twice! """ group = Group('localhost', 'localhost') result = group.run('echo foo', hide=True) # NOTE: currently, the result will only be 1 object, because both of # them will end up as the same key. Derp. eq_(result[group[0]].stdout, "foo\n")
from spec import Spec, eq_ from fabric import ThreadingGroup as Group class Group_(Spec): def simple_command(self): group = Group('localhost', '127.0.0.1') result = group.run('echo foo', hide=True) eq_( [x.stdout.strip() for x in result.values()], ['foo', 'foo'], )
Tidy up existing integration test
Tidy up existing integration test
Python
bsd-2-clause
fabric/fabric
from spec import Spec, eq_ from fabric import ThreadingGroup as Group class Group_(Spec): - def simple_command_on_multiple_hosts(self): + def simple_command(self): - """ - Run command on localhost...twice! - """ - group = Group('localhost', 'localhost') + group = Group('localhost', '127.0.0.1') result = group.run('echo foo', hide=True) - # NOTE: currently, the result will only be 1 object, because both of - # them will end up as the same key. Derp. - eq_(result[group[0]].stdout, "foo\n") + eq_( + [x.stdout.strip() for x in result.values()], + ['foo', 'foo'], + )
Tidy up existing integration test
## Code Before: from spec import Spec, eq_ from fabric import ThreadingGroup as Group class Group_(Spec): def simple_command_on_multiple_hosts(self): """ Run command on localhost...twice! """ group = Group('localhost', 'localhost') result = group.run('echo foo', hide=True) # NOTE: currently, the result will only be 1 object, because both of # them will end up as the same key. Derp. eq_(result[group[0]].stdout, "foo\n") ## Instruction: Tidy up existing integration test ## Code After: from spec import Spec, eq_ from fabric import ThreadingGroup as Group class Group_(Spec): def simple_command(self): group = Group('localhost', '127.0.0.1') result = group.run('echo foo', hide=True) eq_( [x.stdout.strip() for x in result.values()], ['foo', 'foo'], )
1ed040f9d64e12adf964e9f86cc1e18bd8d21593
scripts/rename.py
scripts/rename.py
import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: # doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
Stop cassandra from deleting documents, delete documents from old index as well
Stop cassandra from deleting documents, delete documents from old index as well
Python
apache-2.0
erinspace/scrapi,mehanig/scrapi,alexgarciac/scrapi,felliott/scrapi,fabianvf/scrapi,icereval/scrapi,jeffreyliu3230/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,fabianvf/scrapi,felliott/scrapi
import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: - doc.delete() + # doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) + es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
Stop cassandra from deleting documents, delete documents from old index as well
## Code Before: import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions))) ## Instruction: Stop cassandra from deleting documents, delete documents from old index as well ## Code After: import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: # doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
624276b80b6d69b788b2f48691941cd89847237b
software/Pi/ui.py
software/Pi/ui.py
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
Disable warnings for GPIO channels...
Disable warnings for GPIO channels...
Python
mit
AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) + gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
Disable warnings for GPIO channels...
## Code Before: import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5) ## Instruction: Disable warnings for GPIO channels... ## Code After: import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
c266fbd7a3478d582dc0d6c88fc5e3d8b7a8f62f
survey/views/survey_result.py
survey/views/survey_result.py
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
Fix - Apache error AH02429 Response header name 'mimetype=' contains invalid characters, aborting request
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') - response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
## Code Before: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response ## Instruction: Fix - Apache error AH02429 ## Code After: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
9a121f309ded039f770339d51b43d0933a98d982
app/main/views.py
app/main/views.py
from flask import render_template, current_app, flash, redirect, url_for from . import main from forms import ContactForm from ..email import send_email @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email @main.route('/<path:filename>') def static_from_root(filename): return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
Add additional view for sitemap.xml
Add additional view for sitemap.xml
Python
mit
jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi
- from flask import render_template, current_app, flash, redirect, url_for + from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email + + @main.route('/<path:filename>') + def static_from_root(filename): + return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
Add additional view for sitemap.xml
## Code Before: from flask import render_template, current_app, flash, redirect, url_for from . import main from forms import ContactForm from ..email import send_email @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html') ## Instruction: Add additional view for sitemap.xml ## Code After: from flask import render_template, current_app, flash, redirect, url_for, send_from_directory from . import main from forms import ContactForm from ..email import send_email @main.route('/<path:filename>') def static_from_root(filename): return send_from_directory(current_app.static_folder, filename) @main.route('/') def index(): return render_template('index.html') @main.route('/about') def about(): return render_template('about.html') @main.route('/menu') def menu(): return render_template('menu.html') @main.route('/hours-and-directions') def hours(): return render_template('hours-and-directions.html') @main.route('/contact', methods=['GET', 'POST']) def contact(): contact_form = ContactForm() if contact_form.validate_on_submit(): name = contact_form.name.data email = contact_form.email.data phone = contact_form.phone.data message = contact_form.message.data send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry', 'mail/message', name=name, email=email, phone=phone, message=message) flash('Your message has been sent. We will be in contact with you shortly.') return redirect(url_for('main.contact')) return render_template('contact.html', contact_form = contact_form) @main.route('/imageScroll') def imageScroll(): return render_template('imageScroll.html')
36bde060bbdb4cf9d0396719b8b82952a73bf2b5
bucky/collector.py
bucky/collector.py
import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) err = 0 while True: start_timestamp = time.time() if not self.collect(): err = min(err + 1, 2) else: err = 0 stop_timestamp = time.time() sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp) if sleep_time > 0.1: time.sleep(sleep_time) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) interval = self.interval while True: start_timestamp = time.time() interval = self.interval if self.collect() else interval+interval stop_timestamp = time.time() interval = min(interval, 300) interval = interval - (stop_timestamp - start_timestamp) if interval > 0.1: time.sleep(interval) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
Change the back-off algo for failures
Change the back-off algo for failures
Python
apache-2.0
jsiembida/bucky3
import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) - err = 0 + interval = self.interval while True: start_timestamp = time.time() + interval = self.interval if self.collect() else interval+interval - if not self.collect(): - err = min(err + 1, 2) - else: - err = 0 stop_timestamp = time.time() - sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp) - if sleep_time > 0.1: + interval = min(interval, 300) + interval = interval - (stop_timestamp - start_timestamp) + if interval > 0.1: - time.sleep(sleep_time) + time.sleep(interval) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
Change the back-off algo for failures
## Code Before: import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) err = 0 while True: start_timestamp = time.time() if not self.collect(): err = min(err + 1, 2) else: err = 0 stop_timestamp = time.time() sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp) if sleep_time > 0.1: time.sleep(sleep_time) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret ## Instruction: Change the back-off algo for failures ## Code After: import time import multiprocessing try: from setproctitle import setproctitle except ImportError: def setproctitle(title): pass class StatsCollector(multiprocessing.Process): def __init__(self, queue): super(StatsCollector, self).__init__() self.queue = queue def close(self): pass def run(self): setproctitle("bucky: %s" % self.__class__.__name__) interval = self.interval while True: start_timestamp = time.time() interval = self.interval if self.collect() else interval+interval stop_timestamp = time.time() interval = min(interval, 300) interval = interval - (stop_timestamp - start_timestamp) if interval > 0.1: time.sleep(interval) def collect(self): raise NotImplementedError() def add_stat(self, name, value, timestamp, **metadata): if metadata: if self.metadata: metadata.update(self.metadata) else: metadata = self.metadata if metadata: self.queue.put((None, name, value, timestamp, metadata)) else: self.queue.put((None, name, value, timestamp)) def merge_dicts(self, *dicts): ret = {} for d in dicts: if d: ret.update(d) return ret
9f005120c6d408e8cf3097dd74d5dada24305c88
src/jsonlogger.py
src/jsonlogger.py
import logging import json import re from datetime import datetime class JsonFormatter(logging.Formatter): """A custom formatter to format logging records as json objects""" def parse(self): standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE) return standard_formatters.findall(self._fmt) def format(self, record): """Formats a log record and serializes to json""" mappings = { 'asctime': create_timestamp, 'message': lambda r: r.msg, } formatters = self.parse() log_record = {} for formatter in formatters: try: log_record[formatter] = mappings[formatter](record) except KeyError: log_record[formatter] = record.__dict__[formatter] return json.dumps(log_record) def create_timestamp(record): """Creates a human readable timestamp for a log records created date""" timestamp = datetime.fromtimestamp(record.created) return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
import logging import json import re class JsonFormatter(logging.Formatter): """A custom formatter to format logging records as json objects""" def parse(self): standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE) return standard_formatters.findall(self._fmt) def format(self, record): """Formats a log record and serializes to json""" formatters = self.parse() record.message = record.getMessage() # only format time if needed if "asctime" in formatters: record.asctime = self.formatTime(record, self.datefmt) log_record = {} for formatter in formatters: log_record[formatter] = record.__dict__[formatter] return json.dumps(log_record)
Use the same logic to format message and asctime than the standard library.
Use the same logic to format message and asctime than the standard library. This way we producte better message text on some circumstances when not logging a string and use the date formater from the base class that uses the date format configured from a file or a dict.
Python
bsd-2-clause
madzak/python-json-logger,bbc/python-json-logger
import logging import json import re - from datetime import datetime + class JsonFormatter(logging.Formatter): """A custom formatter to format logging records as json objects""" def parse(self): standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE) return standard_formatters.findall(self._fmt) def format(self, record): """Formats a log record and serializes to json""" - mappings = { - 'asctime': create_timestamp, - 'message': lambda r: r.msg, - } formatters = self.parse() + record.message = record.getMessage() + # only format time if needed + if "asctime" in formatters: + record.asctime = self.formatTime(record, self.datefmt) + log_record = {} for formatter in formatters: - try: - log_record[formatter] = mappings[formatter](record) - except KeyError: - log_record[formatter] = record.__dict__[formatter] + log_record[formatter] = record.__dict__[formatter] return json.dumps(log_record) - def create_timestamp(record): - """Creates a human readable timestamp for a log records created date""" - - timestamp = datetime.fromtimestamp(record.created) - return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"), -
Use the same logic to format message and asctime than the standard library.
## Code Before: import logging import json import re from datetime import datetime class JsonFormatter(logging.Formatter): """A custom formatter to format logging records as json objects""" def parse(self): standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE) return standard_formatters.findall(self._fmt) def format(self, record): """Formats a log record and serializes to json""" mappings = { 'asctime': create_timestamp, 'message': lambda r: r.msg, } formatters = self.parse() log_record = {} for formatter in formatters: try: log_record[formatter] = mappings[formatter](record) except KeyError: log_record[formatter] = record.__dict__[formatter] return json.dumps(log_record) def create_timestamp(record): """Creates a human readable timestamp for a log records created date""" timestamp = datetime.fromtimestamp(record.created) return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"), ## Instruction: Use the same logic to format message and asctime than the standard library. ## Code After: import logging import json import re class JsonFormatter(logging.Formatter): """A custom formatter to format logging records as json objects""" def parse(self): standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE) return standard_formatters.findall(self._fmt) def format(self, record): """Formats a log record and serializes to json""" formatters = self.parse() record.message = record.getMessage() # only format time if needed if "asctime" in formatters: record.asctime = self.formatTime(record, self.datefmt) log_record = {} for formatter in formatters: log_record[formatter] = record.__dict__[formatter] return json.dumps(log_record)
937fd7c07dfe98a086a9af07f0f7b316a6f2f6d8
invoke/main.py
invoke/main.py
from ._version import __version__ from .program import Program program = Program(name="Invoke", binary='inv[oke]', version=__version__)
from . import __version__, Program program = Program( name="Invoke", binary='inv[oke]', version=__version__, )
Clean up binstub a bit
Clean up binstub a bit
Python
bsd-2-clause
frol/invoke,frol/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke
+ from . import __version__, Program - from ._version import __version__ - from .program import Program + program = Program( + name="Invoke", + binary='inv[oke]', + version=__version__, + ) - program = Program(name="Invoke", binary='inv[oke]', version=__version__) -
Clean up binstub a bit
## Code Before: from ._version import __version__ from .program import Program program = Program(name="Invoke", binary='inv[oke]', version=__version__) ## Instruction: Clean up binstub a bit ## Code After: from . import __version__, Program program = Program( name="Invoke", binary='inv[oke]', version=__version__, )
0b1587a484bd63632dbddfe5f0a4fe3c898e4fb0
awacs/dynamodb.py
awacs/dynamodb.py
from aws import Action service_name = 'Amazon DynamoDB' prefix = 'dynamodb' BatchGetItem = Action(prefix, 'BatchGetItem') CreateTable = Action(prefix, 'CreateTable') DeleteItem = Action(prefix, 'DeleteItem') DeleteTable = Action(prefix, 'DeleteTable') DescribeTable = Action(prefix, 'DescribeTable') GetItem = Action(prefix, 'GetItem') ListTables = Action(prefix, 'ListTables') PutItem = Action(prefix, 'PutItem') Query = Action(prefix, 'Query') Scan = Action(prefix, 'Scan') UpdateItem = Action(prefix, 'UpdateItem') UpdateTable = Action(prefix, 'UpdateTable')
from aws import Action from aws import ARN as BASE_ARN service_name = 'Amazon DynamoDB' prefix = 'dynamodb' class ARN(BASE_ARN): def __init__(self, region, account, table=None, index=None): sup = super(ARN, self) resource = '*' if table: resource = 'table/' + table if index: resource += '/index/' + index sup.__init__(prefix, region=region, account=account, resource=resource) BatchGetItem = Action(prefix, 'BatchGetItem') CreateTable = Action(prefix, 'CreateTable') DeleteItem = Action(prefix, 'DeleteItem') DeleteTable = Action(prefix, 'DeleteTable') DescribeTable = Action(prefix, 'DescribeTable') GetItem = Action(prefix, 'GetItem') ListTables = Action(prefix, 'ListTables') PutItem = Action(prefix, 'PutItem') Query = Action(prefix, 'Query') Scan = Action(prefix, 'Scan') UpdateItem = Action(prefix, 'UpdateItem') UpdateTable = Action(prefix, 'UpdateTable')
Add logic for DynamoDB ARNs
Add logic for DynamoDB ARNs See: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/UsingIAMWithDDB.html I also decided not to name the ARN object 'DynamoDB_ARN' or anything like that, and instead went with just 'ARN' since the class is already stored in the dynamodb module. Kind of waffling on whether or not that was the right thing to do, since it's not how this is handled elsewhere, but it seems like it makes sense. If you're going to deal with multiple ARNs, say in SDB & Dynamo, then it seems like you should be doing: from awacs.sdb import ARN as SDB_ARN from awacs.dynamodb import ARN as DynamoDB_ARN Let me know what you guys think about that.
Python
bsd-2-clause
craigbruce/awacs,cloudtools/awacs
from aws import Action + from aws import ARN as BASE_ARN service_name = 'Amazon DynamoDB' prefix = 'dynamodb' + + + class ARN(BASE_ARN): + def __init__(self, region, account, table=None, index=None): + sup = super(ARN, self) + resource = '*' + if table: + resource = 'table/' + table + if index: + resource += '/index/' + index + sup.__init__(prefix, region=region, account=account, resource=resource) + BatchGetItem = Action(prefix, 'BatchGetItem') CreateTable = Action(prefix, 'CreateTable') DeleteItem = Action(prefix, 'DeleteItem') DeleteTable = Action(prefix, 'DeleteTable') DescribeTable = Action(prefix, 'DescribeTable') GetItem = Action(prefix, 'GetItem') ListTables = Action(prefix, 'ListTables') PutItem = Action(prefix, 'PutItem') Query = Action(prefix, 'Query') Scan = Action(prefix, 'Scan') UpdateItem = Action(prefix, 'UpdateItem') UpdateTable = Action(prefix, 'UpdateTable')
Add logic for DynamoDB ARNs
## Code Before: from aws import Action service_name = 'Amazon DynamoDB' prefix = 'dynamodb' BatchGetItem = Action(prefix, 'BatchGetItem') CreateTable = Action(prefix, 'CreateTable') DeleteItem = Action(prefix, 'DeleteItem') DeleteTable = Action(prefix, 'DeleteTable') DescribeTable = Action(prefix, 'DescribeTable') GetItem = Action(prefix, 'GetItem') ListTables = Action(prefix, 'ListTables') PutItem = Action(prefix, 'PutItem') Query = Action(prefix, 'Query') Scan = Action(prefix, 'Scan') UpdateItem = Action(prefix, 'UpdateItem') UpdateTable = Action(prefix, 'UpdateTable') ## Instruction: Add logic for DynamoDB ARNs ## Code After: from aws import Action from aws import ARN as BASE_ARN service_name = 'Amazon DynamoDB' prefix = 'dynamodb' class ARN(BASE_ARN): def __init__(self, region, account, table=None, index=None): sup = super(ARN, self) resource = '*' if table: resource = 'table/' + table if index: resource += '/index/' + index sup.__init__(prefix, region=region, account=account, resource=resource) BatchGetItem = Action(prefix, 'BatchGetItem') CreateTable = Action(prefix, 'CreateTable') DeleteItem = Action(prefix, 'DeleteItem') DeleteTable = Action(prefix, 'DeleteTable') DescribeTable = Action(prefix, 'DescribeTable') GetItem = Action(prefix, 'GetItem') ListTables = Action(prefix, 'ListTables') PutItem = Action(prefix, 'PutItem') Query = Action(prefix, 'Query') Scan = Action(prefix, 'Scan') UpdateItem = Action(prefix, 'UpdateItem') UpdateTable = Action(prefix, 'UpdateTable')
f996755665c9e55af5139a473b859aa0eb507515
back2back/wsgi.py
back2back/wsgi.py
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling, MediaCling application = Cling(MediaCling(get_wsgi_application()))
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling application = Cling(get_wsgi_application())
Remove MediaCling as there isn't any.
Remove MediaCling as there isn't any.
Python
bsd-2-clause
mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application - from dj_static import Cling, MediaCling + from dj_static import Cling - application = Cling(MediaCling(get_wsgi_application())) + application = Cling(get_wsgi_application())
Remove MediaCling as there isn't any.
## Code Before: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling, MediaCling application = Cling(MediaCling(get_wsgi_application())) ## Instruction: Remove MediaCling as there isn't any. ## Code After: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling application = Cling(get_wsgi_application())
3e98ed8801d380b6ab40156b1f20a1f9fe23a755
books/views.py
books/views.py
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.all() serializer_class = BookPageSerializer
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
Order book pages by page number.
Order book pages by page number.
Python
mit
Pepedou/Famas
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ - queryset = BookPage.objects.all() + queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
Order book pages by page number.
## Code Before: from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.all() serializer_class = BookPageSerializer ## Instruction: Order book pages by page number. ## Code After: from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
fe7ab3060c43d509f995cc64998139a623b21a4a
bot/cogs/owner.py
bot/cogs/owner.py
import discord from discord.ext import commands class Owner: """Admin-only commands that make the bot dynamic.""" def __init__(self, bot): self.bot = bot @commands.command() @commands.is_owner() async def close(self, ctx: commands.Context): """Closes the bot safely. Can only be used by the owner.""" await self.bot.logout() @commands.command() @commands.is_owner() async def status(self, ctx: commands.Context, *, status: str): """Changes the bot's status. Can only be used by the owner.""" await self.bot.change_presence(activity=discord.Game(name=status)) @commands.command(name="reload") @commands.is_owner() async def _reload(self, ctx, *, ext: str = None): """Reloads a module. Can only be used by the owner.""" if ext: self.bot.unload_extension(ext) self.bot.load_extension(ext) else: for m in self.bot.initial_extensions: self.bot.unload_extension(m) self.bot.load_extension(m) def setup(bot): bot.add_cog(Owner(bot))
import discord from discord.ext import commands class Owner: """Admin-only commands that make the bot dynamic.""" def __init__(self, bot): self.bot = bot @commands.command() @commands.is_owner() async def close(self, ctx: commands.Context): """Closes the bot safely. Can only be used by the owner.""" await self.bot.logout() @commands.command() @commands.is_owner() async def status(self, ctx: commands.Context, *, status: str): """Changes the bot's status. Can only be used by the owner.""" await self.bot.change_presence(activity=discord.Game(name=status)) @commands.command(name="reload") @commands.is_owner() async def _reload(self, ctx, *, ext: str = None): """Reloads a module. Can only be used by the owner.""" if ext: self.bot.unload_extension(ext) self.bot.load_extension(ext) else: for m in self.bot.initial_extensions: self.bot.unload_extension(m) self.bot.load_extension(m) await ctx.message.add_reaction(self.bot.emoji_rustok) def setup(bot): bot.add_cog(Owner(bot))
Add OK reaction to reload command
Add OK reaction to reload command
Python
mit
ivandardi/RustbotPython,ivandardi/RustbotPython
import discord from discord.ext import commands class Owner: """Admin-only commands that make the bot dynamic.""" def __init__(self, bot): self.bot = bot @commands.command() @commands.is_owner() async def close(self, ctx: commands.Context): """Closes the bot safely. Can only be used by the owner.""" await self.bot.logout() @commands.command() @commands.is_owner() async def status(self, ctx: commands.Context, *, status: str): """Changes the bot's status. Can only be used by the owner.""" await self.bot.change_presence(activity=discord.Game(name=status)) @commands.command(name="reload") @commands.is_owner() async def _reload(self, ctx, *, ext: str = None): """Reloads a module. Can only be used by the owner.""" if ext: self.bot.unload_extension(ext) self.bot.load_extension(ext) else: for m in self.bot.initial_extensions: self.bot.unload_extension(m) self.bot.load_extension(m) + await ctx.message.add_reaction(self.bot.emoji_rustok) + def setup(bot): bot.add_cog(Owner(bot))
Add OK reaction to reload command
## Code Before: import discord from discord.ext import commands class Owner: """Admin-only commands that make the bot dynamic.""" def __init__(self, bot): self.bot = bot @commands.command() @commands.is_owner() async def close(self, ctx: commands.Context): """Closes the bot safely. Can only be used by the owner.""" await self.bot.logout() @commands.command() @commands.is_owner() async def status(self, ctx: commands.Context, *, status: str): """Changes the bot's status. Can only be used by the owner.""" await self.bot.change_presence(activity=discord.Game(name=status)) @commands.command(name="reload") @commands.is_owner() async def _reload(self, ctx, *, ext: str = None): """Reloads a module. Can only be used by the owner.""" if ext: self.bot.unload_extension(ext) self.bot.load_extension(ext) else: for m in self.bot.initial_extensions: self.bot.unload_extension(m) self.bot.load_extension(m) def setup(bot): bot.add_cog(Owner(bot)) ## Instruction: Add OK reaction to reload command ## Code After: import discord from discord.ext import commands class Owner: """Admin-only commands that make the bot dynamic.""" def __init__(self, bot): self.bot = bot @commands.command() @commands.is_owner() async def close(self, ctx: commands.Context): """Closes the bot safely. Can only be used by the owner.""" await self.bot.logout() @commands.command() @commands.is_owner() async def status(self, ctx: commands.Context, *, status: str): """Changes the bot's status. Can only be used by the owner.""" await self.bot.change_presence(activity=discord.Game(name=status)) @commands.command(name="reload") @commands.is_owner() async def _reload(self, ctx, *, ext: str = None): """Reloads a module. Can only be used by the owner.""" if ext: self.bot.unload_extension(ext) self.bot.load_extension(ext) else: for m in self.bot.initial_extensions: self.bot.unload_extension(m) self.bot.load_extension(m) await ctx.message.add_reaction(self.bot.emoji_rustok) def setup(bot): bot.add_cog(Owner(bot))
f26a59aae33fd1afef919427e0c36e744cb904fc
test/test_normalizedString.py
test/test_normalizedString.py
from rdflib import * import unittest class test_normalisedString(unittest.TestCase): def test1(self): lit2 = Literal("\two\nw", datatype=XSD.normalizedString) lit = Literal("\two\nw", datatype=XSD.string) self.assertEqual(lit == lit2, False) def test2(self): lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString) st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string) self.assertFalse(Literal.eq(st,lit)) def test3(self): lit=Literal("hey\nthere", datatype=XSD.normalizedString).n3() print(lit) self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>") if __name__ == "__main__": unittest.main()
from rdflib import Literal from rdflib.namespace import XSD import unittest class test_normalisedString(unittest.TestCase): def test1(self): lit2 = Literal("\two\nw", datatype=XSD.normalizedString) lit = Literal("\two\nw", datatype=XSD.string) self.assertEqual(lit == lit2, False) def test2(self): lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString) st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string) self.assertFalse(Literal.eq(st,lit)) def test3(self): lit = Literal("hey\nthere", datatype=XSD.normalizedString).n3() self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>") def test4(self): lit = Literal("hey\nthere\ta tab\rcarriage return", datatype=XSD.normalizedString) expected = Literal("""hey there a tab carriage return""", datatype=XSD.string) self.assertEqual(str(lit), str(expected)) if __name__ == "__main__": unittest.main()
Add a new test to test all chars that are getting replaced
Add a new test to test all chars that are getting replaced
Python
bsd-3-clause
RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib
- from rdflib import * + from rdflib import Literal + from rdflib.namespace import XSD import unittest + class test_normalisedString(unittest.TestCase): def test1(self): lit2 = Literal("\two\nw", datatype=XSD.normalizedString) lit = Literal("\two\nw", datatype=XSD.string) self.assertEqual(lit == lit2, False) def test2(self): lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString) st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string) self.assertFalse(Literal.eq(st,lit)) def test3(self): - lit=Literal("hey\nthere", datatype=XSD.normalizedString).n3() + lit = Literal("hey\nthere", datatype=XSD.normalizedString).n3() - print(lit) self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>") + def test4(self): + lit = Literal("hey\nthere\ta tab\rcarriage return", datatype=XSD.normalizedString) + expected = Literal("""hey there a tab carriage return""", datatype=XSD.string) + self.assertEqual(str(lit), str(expected)) if __name__ == "__main__": unittest.main() +
Add a new test to test all chars that are getting replaced
## Code Before: from rdflib import * import unittest class test_normalisedString(unittest.TestCase): def test1(self): lit2 = Literal("\two\nw", datatype=XSD.normalizedString) lit = Literal("\two\nw", datatype=XSD.string) self.assertEqual(lit == lit2, False) def test2(self): lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString) st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string) self.assertFalse(Literal.eq(st,lit)) def test3(self): lit=Literal("hey\nthere", datatype=XSD.normalizedString).n3() print(lit) self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>") if __name__ == "__main__": unittest.main() ## Instruction: Add a new test to test all chars that are getting replaced ## Code After: from rdflib import Literal from rdflib.namespace import XSD import unittest class test_normalisedString(unittest.TestCase): def test1(self): lit2 = Literal("\two\nw", datatype=XSD.normalizedString) lit = Literal("\two\nw", datatype=XSD.string) self.assertEqual(lit == lit2, False) def test2(self): lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString) st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string) self.assertFalse(Literal.eq(st,lit)) def test3(self): lit = Literal("hey\nthere", datatype=XSD.normalizedString).n3() self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>") def test4(self): lit = Literal("hey\nthere\ta tab\rcarriage return", datatype=XSD.normalizedString) expected = Literal("""hey there a tab carriage return""", datatype=XSD.string) self.assertEqual(str(lit), str(expected)) if __name__ == "__main__": unittest.main()
543fc894120db6e8d854e746d631c87cc53f622b
website/noveltorpedo/tests.py
website/noveltorpedo/tests.py
from django.test import TestCase from django.test import Client from noveltorpedo.models import * import unittest from django.utils import timezone client = Client() class SearchTests(TestCase): def test_that_the_front_page_loads_properly(self): response = client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'NovelTorpedo Search') def test_insertion_and_querying_of_data(self): author = Author() author.name = "Jack Frost" author.save() story = Story() story.title = "The Big One" story.save() story.authors.add(author) segment = StorySegment() segment.published = timezone.now() segment.story = story segment.title = "Chapter One" segment.contents = "This is how it all went down..." segment.save()
from django.test import TestCase from django.test import Client from noveltorpedo.models import * from django.utils import timezone from django.core.management import call_command client = Client() class SearchTests(TestCase): def test_that_the_front_page_loads_properly(self): response = client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'NovelTorpedo Search') def test_insertion_and_querying_of_data(self): # Create a new story in the database. author = Author() author.name = 'Jack Frost' author.save() story = Story() story.title = 'The Big One' story.save() story.authors.add(author) segment = StorySegment() segment.published = timezone.now() segment.story = story segment.title = 'Chapter Three' segment.contents = 'This is how it all went down...' segment.save() # Index the new story. call_command('update_index') # Query via author name. response = client.get('/', {'q': 'Jack Frost'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...') # Query via story name. response = client.get('/', {'q': 'The Big One'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...') # Query via segment contents. response = client.get('/', {'q': 'Chapter Three'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...')
Rebuild index and test variety of queries
Rebuild index and test variety of queries
Python
mit
NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo
from django.test import TestCase from django.test import Client from noveltorpedo.models import * - import unittest from django.utils import timezone + from django.core.management import call_command client = Client() class SearchTests(TestCase): def test_that_the_front_page_loads_properly(self): response = client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'NovelTorpedo Search') def test_insertion_and_querying_of_data(self): + # Create a new story in the database. author = Author() - author.name = "Jack Frost" + author.name = 'Jack Frost' author.save() story = Story() - story.title = "The Big One" + story.title = 'The Big One' story.save() story.authors.add(author) segment = StorySegment() segment.published = timezone.now() segment.story = story - segment.title = "Chapter One" + segment.title = 'Chapter Three' - segment.contents = "This is how it all went down..." + segment.contents = 'This is how it all went down...' segment.save() + + # Index the new story. + call_command('update_index') + + # Query via author name. + response = client.get('/', {'q': 'Jack Frost'}) + self.assertEqual(response.status_code, 200) + self.assertContains(response, 'Jack Frost') + self.assertContains(response, 'The Big One') + self.assertContains(response, 'Chapter Three') + self.assertContains(response, 'This is how it all went down...') + + # Query via story name. + response = client.get('/', {'q': 'The Big One'}) + self.assertEqual(response.status_code, 200) + self.assertContains(response, 'Jack Frost') + self.assertContains(response, 'The Big One') + self.assertContains(response, 'Chapter Three') + self.assertContains(response, 'This is how it all went down...') + + # Query via segment contents. + response = client.get('/', {'q': 'Chapter Three'}) + self.assertEqual(response.status_code, 200) + self.assertContains(response, 'Jack Frost') + self.assertContains(response, 'The Big One') + self.assertContains(response, 'Chapter Three') + self.assertContains(response, 'This is how it all went down...') +
Rebuild index and test variety of queries
## Code Before: from django.test import TestCase from django.test import Client from noveltorpedo.models import * import unittest from django.utils import timezone client = Client() class SearchTests(TestCase): def test_that_the_front_page_loads_properly(self): response = client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'NovelTorpedo Search') def test_insertion_and_querying_of_data(self): author = Author() author.name = "Jack Frost" author.save() story = Story() story.title = "The Big One" story.save() story.authors.add(author) segment = StorySegment() segment.published = timezone.now() segment.story = story segment.title = "Chapter One" segment.contents = "This is how it all went down..." segment.save() ## Instruction: Rebuild index and test variety of queries ## Code After: from django.test import TestCase from django.test import Client from noveltorpedo.models import * from django.utils import timezone from django.core.management import call_command client = Client() class SearchTests(TestCase): def test_that_the_front_page_loads_properly(self): response = client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, 'NovelTorpedo Search') def test_insertion_and_querying_of_data(self): # Create a new story in the database. author = Author() author.name = 'Jack Frost' author.save() story = Story() story.title = 'The Big One' story.save() story.authors.add(author) segment = StorySegment() segment.published = timezone.now() segment.story = story segment.title = 'Chapter Three' segment.contents = 'This is how it all went down...' segment.save() # Index the new story. call_command('update_index') # Query via author name. response = client.get('/', {'q': 'Jack Frost'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...') # Query via story name. response = client.get('/', {'q': 'The Big One'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...') # Query via segment contents. response = client.get('/', {'q': 'Chapter Three'}) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Jack Frost') self.assertContains(response, 'The Big One') self.assertContains(response, 'Chapter Three') self.assertContains(response, 'This is how it all went down...')