Datasets:

commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
51
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
d0aba6489a96003c9a746bd38818cffa717d1469
akatsuki/bib2html.py
akatsuki/bib2html.py
from __future__ import unicode_literals from akatsuki.exporter import export_html from akatsuki.parser import load_bibtex_file from akatsuki.utils import sort_by_date def main(bibtex_file, html_file): """Load BibTeX file and export to HTML file""" entries = load_bibtex_file(bibtex_file) entries = sort_by_date(entries, reverse=True) export_html(html_file, entries)
from __future__ import unicode_literals from akatsuki.exporter import export_html from akatsuki.parser import load_bibtex_file from akatsuki.utils import pmid_to_url, sort_by_date def main(bibtex_file, html_file): """Load BibTeX file and export to HTML file""" entries = load_bibtex_file(bibtex_file) entries = pmid_to_url(entries) entries = sort_by_date(entries, reverse=True) export_html(html_file, entries)
Add pmid to url convertion
Add pmid to url convertion
Python
mit
403JFW/akatsuki
from __future__ import unicode_literals from akatsuki.exporter import export_html from akatsuki.parser import load_bibtex_file - from akatsuki.utils import sort_by_date + from akatsuki.utils import pmid_to_url, sort_by_date def main(bibtex_file, html_file): """Load BibTeX file and export to HTML file""" entries = load_bibtex_file(bibtex_file) + entries = pmid_to_url(entries) entries = sort_by_date(entries, reverse=True) export_html(html_file, entries)
Add pmid to url convertion
## Code Before: from __future__ import unicode_literals from akatsuki.exporter import export_html from akatsuki.parser import load_bibtex_file from akatsuki.utils import sort_by_date def main(bibtex_file, html_file): """Load BibTeX file and export to HTML file""" entries = load_bibtex_file(bibtex_file) entries = sort_by_date(entries, reverse=True) export_html(html_file, entries) ## Instruction: Add pmid to url convertion ## Code After: from __future__ import unicode_literals from akatsuki.exporter import export_html from akatsuki.parser import load_bibtex_file from akatsuki.utils import pmid_to_url, sort_by_date def main(bibtex_file, html_file): """Load BibTeX file and export to HTML file""" entries = load_bibtex_file(bibtex_file) entries = pmid_to_url(entries) entries = sort_by_date(entries, reverse=True) export_html(html_file, entries)
ea48d59c4e4073de940b394d2bc99e411cfbd3fb
example_of_usage.py
example_of_usage.py
import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main()
import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'https://w3schools.com/html/html_tables.asp' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) # Get all tables pprint(p.tables) # Get tables with id attribute pprint(p.named_tables) if __name__ == '__main__': main()
Add named tables to the examples
Add named tables to the examples
Python
agpl-3.0
schmijos/html-table-parser-python3,schmijos/html-table-parser-python3
import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): - url = 'http://www.twitter.com' + url = 'https://w3schools.com/html/html_tables.asp' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) + + # Get all tables pprint(p.tables) + + # Get tables with id attribute + pprint(p.named_tables) if __name__ == '__main__': main()
Add named tables to the examples
## Code Before: import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main() ## Instruction: Add named tables to the examples ## Code After: import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'https://w3schools.com/html/html_tables.asp' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) # Get all tables pprint(p.tables) # Get tables with id attribute pprint(p.named_tables) if __name__ == '__main__': main()
020e48affc34162676193ab97dad7f8ffbdaaaa6
jupyter_kernel/magics/shell_magic.py
jupyter_kernel/magics/shell_magic.py
from jupyter_kernel import Magic import subprocess class ShellMagic(Magic): def line_shell(self, *args): """%shell COMMAND - run the line as a shell command""" command = " ".join(args) try: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval, error = process.communicate() if error: self.kernel.Error(error) except Exception as e: self.kernel.Error(e.message) retval = None if retval: self.kernel.Print(retval) def cell_shell(self): """%%shell - run the contents of the cell as shell commands""" self.line_shell(self.code) self.evaluate = False def register_magics(kernel): kernel.register_magics(ShellMagic)
from jupyter_kernel import Magic import subprocess class ShellMagic(Magic): def line_shell(self, *args): """%shell COMMAND - run the line as a shell command""" command = " ".join(args) try: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval, error = process.communicate() if error: self.kernel.Error(error) except Exception as e: self.kernel.Error(e.message) retval = None if retval: retval = retval.decode('utf-8') self.kernel.Print(retval) def cell_shell(self): """%%shell - run the contents of the cell as shell commands""" self.line_shell(self.code) self.evaluate = False def register_magics(kernel): kernel.register_magics(ShellMagic)
Fix bytes problem on python 3.
Fix bytes problem on python 3.
Python
bsd-3-clause
Calysto/metakernel
from jupyter_kernel import Magic import subprocess class ShellMagic(Magic): def line_shell(self, *args): """%shell COMMAND - run the line as a shell command""" command = " ".join(args) try: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval, error = process.communicate() if error: self.kernel.Error(error) except Exception as e: self.kernel.Error(e.message) retval = None if retval: + retval = retval.decode('utf-8') self.kernel.Print(retval) def cell_shell(self): """%%shell - run the contents of the cell as shell commands""" self.line_shell(self.code) self.evaluate = False def register_magics(kernel): kernel.register_magics(ShellMagic)
Fix bytes problem on python 3.
## Code Before: from jupyter_kernel import Magic import subprocess class ShellMagic(Magic): def line_shell(self, *args): """%shell COMMAND - run the line as a shell command""" command = " ".join(args) try: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval, error = process.communicate() if error: self.kernel.Error(error) except Exception as e: self.kernel.Error(e.message) retval = None if retval: self.kernel.Print(retval) def cell_shell(self): """%%shell - run the contents of the cell as shell commands""" self.line_shell(self.code) self.evaluate = False def register_magics(kernel): kernel.register_magics(ShellMagic) ## Instruction: Fix bytes problem on python 3. ## Code After: from jupyter_kernel import Magic import subprocess class ShellMagic(Magic): def line_shell(self, *args): """%shell COMMAND - run the line as a shell command""" command = " ".join(args) try: process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) retval, error = process.communicate() if error: self.kernel.Error(error) except Exception as e: self.kernel.Error(e.message) retval = None if retval: retval = retval.decode('utf-8') self.kernel.Print(retval) def cell_shell(self): """%%shell - run the contents of the cell as shell commands""" self.line_shell(self.code) self.evaluate = False def register_magics(kernel): kernel.register_magics(ShellMagic)
15f482fbb7b1b98b48545f6e5ab3986859c38e55
watchman/main.py
watchman/main.py
from __future__ import print_function import sys import os from sh import cd, hg def _get_subdirectories(current_dir): return [directory for directory in os.listdir(current_dir) if os.path.isdir(os.path.join(current_dir, directory)) and directory[0] != '.'] def check(): current_working_directory = os.getcwd() child_dirs = _get_subdirectories(current_working_directory) for child in child_dirs: try: change_dir = '%s/%s' % (current_working_directory, child) cd(change_dir); current_branch = hg('branch') output = '%-25s is on branch: %s' % (child, current_branch) print(output, end=''); cd('..') # print and step back one dir except Exception: continue def main(): arguments = sys.argv if 'check' == arguments[1]: check() else: print("type watchman help for, you know, help.") if __name__ == '__main__': main()
from __future__ import print_function import sys import os from sh import cd, hg def _get_subdirectories(current_dir): return [directory for directory in os.listdir(current_dir) if os.path.isdir(os.path.join(current_dir, directory)) and directory[0] != '.'] def check(): current_working_directory = os.getcwd() child_dirs = _get_subdirectories(current_working_directory) for child in child_dirs: try: current_branch = hg('branch', '-R', './%s' % child) output = '%-25s is on branch: %s' % (child, current_branch) print(output, end='') except Exception as e: continue def main(): arguments = sys.argv if 'check' == arguments[1]: check() else: print("type watchman help for, you know, help.") if __name__ == '__main__': main()
Remove change dir commands and now it sends directly.
Remove change dir commands and now it sends directly.
Python
mit
alephmelo/watchman
from __future__ import print_function import sys import os from sh import cd, hg def _get_subdirectories(current_dir): return [directory for directory in os.listdir(current_dir) if os.path.isdir(os.path.join(current_dir, directory)) and directory[0] != '.'] def check(): current_working_directory = os.getcwd() child_dirs = _get_subdirectories(current_working_directory) for child in child_dirs: try: - change_dir = '%s/%s' % (current_working_directory, child) - cd(change_dir); current_branch = hg('branch') + current_branch = hg('branch', '-R', './%s' % child) + output = '%-25s is on branch: %s' % (child, current_branch) + print(output, end='') - output = '%-25s is on branch: %s' % (child, current_branch) - - print(output, end=''); cd('..') # print and step back one dir - except Exception: + except Exception as e: continue def main(): arguments = sys.argv if 'check' == arguments[1]: check() else: print("type watchman help for, you know, help.") if __name__ == '__main__': main()
Remove change dir commands and now it sends directly.
## Code Before: from __future__ import print_function import sys import os from sh import cd, hg def _get_subdirectories(current_dir): return [directory for directory in os.listdir(current_dir) if os.path.isdir(os.path.join(current_dir, directory)) and directory[0] != '.'] def check(): current_working_directory = os.getcwd() child_dirs = _get_subdirectories(current_working_directory) for child in child_dirs: try: change_dir = '%s/%s' % (current_working_directory, child) cd(change_dir); current_branch = hg('branch') output = '%-25s is on branch: %s' % (child, current_branch) print(output, end=''); cd('..') # print and step back one dir except Exception: continue def main(): arguments = sys.argv if 'check' == arguments[1]: check() else: print("type watchman help for, you know, help.") if __name__ == '__main__': main() ## Instruction: Remove change dir commands and now it sends directly. ## Code After: from __future__ import print_function import sys import os from sh import cd, hg def _get_subdirectories(current_dir): return [directory for directory in os.listdir(current_dir) if os.path.isdir(os.path.join(current_dir, directory)) and directory[0] != '.'] def check(): current_working_directory = os.getcwd() child_dirs = _get_subdirectories(current_working_directory) for child in child_dirs: try: current_branch = hg('branch', '-R', './%s' % child) output = '%-25s is on branch: %s' % (child, current_branch) print(output, end='') except Exception as e: continue def main(): arguments = sys.argv if 'check' == arguments[1]: check() else: print("type watchman help for, you know, help.") if __name__ == '__main__': main()
ef11a6388dabd07afb3d11f7b097226e68fdf243
project/estimation/models.py
project/estimation/models.py
from .. import db class Question(db.Model): id = db.Column(db.Integer, primary_key=True) text = db.Column(db.String(240), unique=True, index=True) answer = db.Column(db.Numeric)
from .. import db class Question(db.Model): id = db.Column(db.Integer, primary_key=True) text = db.Column(db.String(240), unique=True, index=True) answer = db.Column(db.Numeric) class Estimate(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) question_id = db.Column(db.Integer, db.ForeignKey('question.id')) lowerbound = db.Column(db.Numeric) upperbound = db.Column(db.Numeric) created_on = db.Column(db.DateTime, default=db.func.now())
Add model to keep track of users' estimates.
Add model to keep track of users' estimates.
Python
mit
rahimnathwani/measure-anything
from .. import db class Question(db.Model): - id = db.Column(db.Integer, primary_key=True) + id = db.Column(db.Integer, primary_key=True) - text = db.Column(db.String(240), unique=True, index=True) + text = db.Column(db.String(240), unique=True, index=True) - answer = db.Column(db.Numeric) + answer = db.Column(db.Numeric) + + class Estimate(db.Model): + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey('user.id')) + question_id = db.Column(db.Integer, db.ForeignKey('question.id')) + lowerbound = db.Column(db.Numeric) + upperbound = db.Column(db.Numeric) + created_on = db.Column(db.DateTime, default=db.func.now())
Add model to keep track of users' estimates.
## Code Before: from .. import db class Question(db.Model): id = db.Column(db.Integer, primary_key=True) text = db.Column(db.String(240), unique=True, index=True) answer = db.Column(db.Numeric) ## Instruction: Add model to keep track of users' estimates. ## Code After: from .. import db class Question(db.Model): id = db.Column(db.Integer, primary_key=True) text = db.Column(db.String(240), unique=True, index=True) answer = db.Column(db.Numeric) class Estimate(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) question_id = db.Column(db.Integer, db.ForeignKey('question.id')) lowerbound = db.Column(db.Numeric) upperbound = db.Column(db.Numeric) created_on = db.Column(db.DateTime, default=db.func.now())
be7c5fc964ce3386df2bf246f12838e4ba2a2cb6
saleor/core/utils/filters.py
saleor/core/utils/filters.py
from __future__ import unicode_literals def get_sort_by_choices(filter): return [(choice[0], choice[1].lower()) for choice in filter.filters['sort_by'].field.choices[1::2]] def get_now_sorted_by(filter, fields): sort_by = filter.form.cleaned_data.get('sort_by') if sort_by: sort_by = fields[sort_by[0].strip('-')] else: sort_by = fields['name'] return sort_by
from __future__ import unicode_literals def get_sort_by_choices(filter): return [(choice[0], choice[1].lower()) for choice in filter.filters['sort_by'].field.choices[1::2]] def get_now_sorted_by(filter, fields, default_sort='name'): sort_by = filter.form.cleaned_data.get('sort_by') if sort_by: sort_by = fields[sort_by[0].strip('-')] else: sort_by = fields[default_sort] return sort_by
Add default_sort param to get_now_sorting_by
Add default_sort param to get_now_sorting_by
Python
bsd-3-clause
UITools/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,mociepka/saleor,maferelo/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor,UITools/saleor
from __future__ import unicode_literals def get_sort_by_choices(filter): return [(choice[0], choice[1].lower()) for choice in filter.filters['sort_by'].field.choices[1::2]] - def get_now_sorted_by(filter, fields): + def get_now_sorted_by(filter, fields, default_sort='name'): sort_by = filter.form.cleaned_data.get('sort_by') if sort_by: sort_by = fields[sort_by[0].strip('-')] else: - sort_by = fields['name'] + sort_by = fields[default_sort] return sort_by
Add default_sort param to get_now_sorting_by
## Code Before: from __future__ import unicode_literals def get_sort_by_choices(filter): return [(choice[0], choice[1].lower()) for choice in filter.filters['sort_by'].field.choices[1::2]] def get_now_sorted_by(filter, fields): sort_by = filter.form.cleaned_data.get('sort_by') if sort_by: sort_by = fields[sort_by[0].strip('-')] else: sort_by = fields['name'] return sort_by ## Instruction: Add default_sort param to get_now_sorting_by ## Code After: from __future__ import unicode_literals def get_sort_by_choices(filter): return [(choice[0], choice[1].lower()) for choice in filter.filters['sort_by'].field.choices[1::2]] def get_now_sorted_by(filter, fields, default_sort='name'): sort_by = filter.form.cleaned_data.get('sort_by') if sort_by: sort_by = fields[sort_by[0].strip('-')] else: sort_by = fields[default_sort] return sort_by
2ad47f6ce00246cbf54639438d9279b8a7fa9b29
python/tests/t_envoy_logs.py
python/tests/t_envoy_logs.py
import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador') class EnvoyLogPathTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line)
import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} envoy_log_format: {self.log_format} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
Test for Envoy logs format
Test for Envoy logs format Signed-off-by: Alvaro Saurin <5b2d0c210c4a9fd6aeaf2eaedf8273be993c90c2@datawire.io>
Python
apache-2.0
datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador
import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP - access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador') + access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') - class EnvoyLogPathTest(AmbassadorTest): + class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' + self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} + envoy_log_format: {self.log_format} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): - assert access_log_entry_regex.match(line) + assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
Test for Envoy logs format
## Code Before: import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador') class EnvoyLogPathTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line) ## Instruction: Test for Envoy logs format ## Code After: import pytest, re from kat.utils import ShellCommand from abstract_tests import AmbassadorTest, ServiceType, HTTP access_log_entry_regex = re.compile('^MY_REQUEST 200 .*') class EnvoyLogTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"' def config(self): yield self, self.format(""" --- apiVersion: ambassador/v1 kind: Module name: ambassador ambassador_id: {self.ambassador_id} config: envoy_log_path: {self.log_path} envoy_log_format: {self.log_format} """) def check(self): cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path) if not cmd.check("check envoy access log"): pytest.exit("envoy access log does not exist") for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
7adfe4822bf75d1df2dc2a566b3b26c9fd494431
rest_framework_jwt/compat.py
rest_framework_jwt/compat.py
from distutils.version import StrictVersion import rest_framework from rest_framework import serializers from django.forms import widgets if StrictVersion(rest_framework.VERSION) < StrictVersion('3.0.0'): class Serializer(serializers.Serializer): pass class PasswordField(serializers.CharField): widget = widgets.PasswordInput else: class Serializer(serializers.Serializer): @property def object(self): return self.validated_data class PasswordField(serializers.CharField): def __init__(self, *args, **kwargs): if 'style' not in kwargs: kwargs['style'] = {'input_type': 'password'} else: kwargs['style']['input_type'] = 'password' super(PasswordField, self).__init__(*args, **kwargs) def get_user_model(): try: from django.contrib.auth import get_user_model except ImportError: # Django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() return User def get_username_field(): try: username_field = get_user_model().USERNAME_FIELD except: username_field = 'username' return username_field def get_username(user): try: username = user.get_username() except AttributeError: username = user.username return username def get_request_data(request): if getattr(request, 'data', None): data = request.data else: # DRF < 3.2 data = request.DATA return data
from distutils.version import StrictVersion import rest_framework from rest_framework import serializers from django.forms import widgets DRF_VERSION_INFO = StrictVersion(rest_framework.VERSION).version DRF2 = DRF_VERSION_INFO[0] == 2 DRF3 = DRF_VERSION_INFO[0] == 3 if DRF2: class Serializer(serializers.Serializer): pass class PasswordField(serializers.CharField): widget = widgets.PasswordInput else: class Serializer(serializers.Serializer): @property def object(self): return self.validated_data class PasswordField(serializers.CharField): def __init__(self, *args, **kwargs): if 'style' not in kwargs: kwargs['style'] = {'input_type': 'password'} else: kwargs['style']['input_type'] = 'password' super(PasswordField, self).__init__(*args, **kwargs) def get_user_model(): try: from django.contrib.auth import get_user_model except ImportError: # Django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() return User def get_username_field(): try: username_field = get_user_model().USERNAME_FIELD except: username_field = 'username' return username_field def get_username(user): try: username = user.get_username() except AttributeError: username = user.username return username def get_request_data(request): if DRF2: data = request.DATA else: data = request.data return data
Use request.data in DRF >= 3
Use request.data in DRF >= 3
Python
mit
orf/django-rest-framework-jwt,shanemgrey/django-rest-framework-jwt,GetBlimp/django-rest-framework-jwt,blaklites/django-rest-framework-jwt,plentific/django-rest-framework-jwt,ArabellaTech/django-rest-framework-jwt
from distutils.version import StrictVersion import rest_framework from rest_framework import serializers from django.forms import widgets - if StrictVersion(rest_framework.VERSION) < StrictVersion('3.0.0'): + DRF_VERSION_INFO = StrictVersion(rest_framework.VERSION).version + DRF2 = DRF_VERSION_INFO[0] == 2 + DRF3 = DRF_VERSION_INFO[0] == 3 + + + if DRF2: class Serializer(serializers.Serializer): pass class PasswordField(serializers.CharField): widget = widgets.PasswordInput else: class Serializer(serializers.Serializer): @property def object(self): return self.validated_data class PasswordField(serializers.CharField): def __init__(self, *args, **kwargs): if 'style' not in kwargs: kwargs['style'] = {'input_type': 'password'} else: kwargs['style']['input_type'] = 'password' super(PasswordField, self).__init__(*args, **kwargs) def get_user_model(): try: from django.contrib.auth import get_user_model except ImportError: # Django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() return User def get_username_field(): try: username_field = get_user_model().USERNAME_FIELD except: username_field = 'username' return username_field def get_username(user): try: username = user.get_username() except AttributeError: username = user.username return username def get_request_data(request): - if getattr(request, 'data', None): + if DRF2: + data = request.DATA + else: data = request.data - else: - # DRF < 3.2 - data = request.DATA - return data
Use request.data in DRF >= 3
## Code Before: from distutils.version import StrictVersion import rest_framework from rest_framework import serializers from django.forms import widgets if StrictVersion(rest_framework.VERSION) < StrictVersion('3.0.0'): class Serializer(serializers.Serializer): pass class PasswordField(serializers.CharField): widget = widgets.PasswordInput else: class Serializer(serializers.Serializer): @property def object(self): return self.validated_data class PasswordField(serializers.CharField): def __init__(self, *args, **kwargs): if 'style' not in kwargs: kwargs['style'] = {'input_type': 'password'} else: kwargs['style']['input_type'] = 'password' super(PasswordField, self).__init__(*args, **kwargs) def get_user_model(): try: from django.contrib.auth import get_user_model except ImportError: # Django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() return User def get_username_field(): try: username_field = get_user_model().USERNAME_FIELD except: username_field = 'username' return username_field def get_username(user): try: username = user.get_username() except AttributeError: username = user.username return username def get_request_data(request): if getattr(request, 'data', None): data = request.data else: # DRF < 3.2 data = request.DATA return data ## Instruction: Use request.data in DRF >= 3 ## Code After: from distutils.version import StrictVersion import rest_framework from rest_framework import serializers from django.forms import widgets DRF_VERSION_INFO = StrictVersion(rest_framework.VERSION).version DRF2 = DRF_VERSION_INFO[0] == 2 DRF3 = DRF_VERSION_INFO[0] == 3 if DRF2: class Serializer(serializers.Serializer): pass class PasswordField(serializers.CharField): widget = widgets.PasswordInput else: class Serializer(serializers.Serializer): @property def object(self): return self.validated_data class PasswordField(serializers.CharField): def __init__(self, *args, **kwargs): if 'style' not in kwargs: kwargs['style'] = {'input_type': 'password'} else: kwargs['style']['input_type'] = 'password' super(PasswordField, self).__init__(*args, **kwargs) def get_user_model(): try: from django.contrib.auth import get_user_model except ImportError: # Django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() return User def get_username_field(): try: username_field = get_user_model().USERNAME_FIELD except: username_field = 'username' return username_field def get_username(user): try: username = user.get_username() except AttributeError: username = user.username return username def get_request_data(request): if DRF2: data = request.DATA else: data = request.data return data
21f209b618850d15734c476bd3c1b359b9a7426e
infosystem/queue.py
infosystem/queue.py
import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['ORMENU_QUEUE_URL'] self.port = flask.current_app.config['ORMENU_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME'] self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
Use INFOSYSTEM enviroment for Queue
Use INFOSYSTEM enviroment for Queue
Python
apache-2.0
samueldmq/infosystem
import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): - self.url = flask.current_app.config['ORMENU_QUEUE_URL'] + self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] - self.port = flask.current_app.config['ORMENU_QUEUE_PORT'] + self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ - flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST'] + flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] - self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME'] + self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] - self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD'] + self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
Use INFOSYSTEM enviroment for Queue
## Code Before: import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['ORMENU_QUEUE_URL'] self.port = flask.current_app.config['ORMENU_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME'] self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close() ## Instruction: Use INFOSYSTEM enviroment for Queue ## Code After: import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
305ba7ee3fff41a7d866968c5332394301c0e83f
digi/wagtail_hooks.py
digi/wagtail_hooks.py
from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup)
from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection from django.utils.html import format_html from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) # Enable editing of raw HTML @hooks.register('insert_editor_js') def enable_source_editing(): return format_html( """ <script> registerHalloPlugin('hallohtml'); </script> """ )
Enable HTML source editing in the content editor
Enable HTML source editing in the content editor
Python
mit
terotic/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel
from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection + from django.utils.html import format_html + from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) + + # Enable editing of raw HTML + @hooks.register('insert_editor_js') + def enable_source_editing(): + return format_html( + """ + <script> + registerHalloPlugin('hallohtml'); + </script> + """ + ) +
Enable HTML source editing in the content editor
## Code Before: from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) ## Instruction: Enable HTML source editing in the content editor ## Code After: from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection from django.utils.html import format_html from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) # Enable editing of raw HTML @hooks.register('insert_editor_js') def enable_source_editing(): return format_html( """ <script> registerHalloPlugin('hallohtml'); </script> """ )
14ea472acfce8b5317a8c8c970db901501ea34c0
_tests/macro_testing/runner.py
_tests/macro_testing/runner.py
import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(tests_path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path)) json_files = [f for f in os.listdir(path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
Make the paths not relative, so tests can be run from anywhere.
Make the paths not relative, so tests can be run from anywhere.
Python
cc0-1.0
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ + path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path)) - json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')] + json_files = [f for f in os.listdir(path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class - json_file_path = os.path.join(tests_path, json_file) + json_file_path = os.path.join(path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
Make the paths not relative, so tests can be run from anywhere.
## Code Before: import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(tests_path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main() ## Instruction: Make the paths not relative, so tests can be run from anywhere. ## Code After: import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path)) json_files = [f for f in os.listdir(path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
90699f4fa6c1ce2b02e81a8fef9bfafd2175fa7f
kmapper/__init__.py
kmapper/__init__.py
from .kmapper import KeplerMapper from .kmapper import cluster from .kmapper import Cover from .kmapper import GraphNerve
from .kmapper import KeplerMapper from .kmapper import cluster from .cover import Cover from .nerve import GraphNerve import pkg_resources __version__ = pkg_resources.get_distribution('kmapper').version
Add __version__ variable to package
Add __version__ variable to package
Python
mit
MLWave/kepler-mapper,MLWave/kepler-mapper,MLWave/kepler-mapper
from .kmapper import KeplerMapper from .kmapper import cluster - from .kmapper import Cover + from .cover import Cover - from .kmapper import GraphNerve + from .nerve import GraphNerve + import pkg_resources + __version__ = pkg_resources.get_distribution('kmapper').version
Add __version__ variable to package
## Code Before: from .kmapper import KeplerMapper from .kmapper import cluster from .kmapper import Cover from .kmapper import GraphNerve ## Instruction: Add __version__ variable to package ## Code After: from .kmapper import KeplerMapper from .kmapper import cluster from .cover import Cover from .nerve import GraphNerve import pkg_resources __version__ = pkg_resources.get_distribution('kmapper').version
a8bb719061a68b5d322868768203476c4ee1e9b9
gnocchi/cli.py
gnocchi/cli.py
from oslo.config import cfg from gnocchi.indexer import sqlalchemy as sql_db from gnocchi.rest import app from gnocchi import service def storage_dbsync(): service.prepare_service() indexer = sql_db.SQLAlchemyIndexer(cfg.CONF) indexer.upgrade() def api(): service.prepare_service() app.build_server()
from oslo.config import cfg from gnocchi.indexer import sqlalchemy as sql_db from gnocchi.rest import app from gnocchi import service def storage_dbsync(): service.prepare_service() indexer = sql_db.SQLAlchemyIndexer(cfg.CONF) indexer.connect() indexer.upgrade() def api(): service.prepare_service() app.build_server()
Connect to database before upgrading it
Connect to database before upgrading it This change ensure we are connected to the database before we upgrade it. Change-Id: Ia0be33892a99897ff294d004f4d935f3753e6200
Python
apache-2.0
idegtiarov/gnocchi-rep,leandroreox/gnocchi,sileht/gnocchi,idegtiarov/gnocchi-rep,gnocchixyz/gnocchi,sileht/gnocchi,idegtiarov/gnocchi-rep,gnocchixyz/gnocchi,leandroreox/gnocchi
from oslo.config import cfg from gnocchi.indexer import sqlalchemy as sql_db from gnocchi.rest import app from gnocchi import service def storage_dbsync(): service.prepare_service() indexer = sql_db.SQLAlchemyIndexer(cfg.CONF) + indexer.connect() indexer.upgrade() def api(): service.prepare_service() app.build_server()
Connect to database before upgrading it
## Code Before: from oslo.config import cfg from gnocchi.indexer import sqlalchemy as sql_db from gnocchi.rest import app from gnocchi import service def storage_dbsync(): service.prepare_service() indexer = sql_db.SQLAlchemyIndexer(cfg.CONF) indexer.upgrade() def api(): service.prepare_service() app.build_server() ## Instruction: Connect to database before upgrading it ## Code After: from oslo.config import cfg from gnocchi.indexer import sqlalchemy as sql_db from gnocchi.rest import app from gnocchi import service def storage_dbsync(): service.prepare_service() indexer = sql_db.SQLAlchemyIndexer(cfg.CONF) indexer.connect() indexer.upgrade() def api(): service.prepare_service() app.build_server()
fec7885d2632b887002f0071f4898faf52dd927c
chainerx/__init__.py
chainerx/__init__.py
import sys if sys.version_info[0] < 3: _available = False else: try: from chainerx import _core _available = True except Exception: _available = False if _available: from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA from chainerx._core import * # NOQA from builtins import bool, int, float # NOQA from chainerx.creation.from_data import asanyarray # NOQA from chainerx.creation.from_data import fromfile # NOQA from chainerx.creation.from_data import fromfunction # NOQA from chainerx.creation.from_data import fromiter # NOQA from chainerx.creation.from_data import fromstring # NOQA from chainerx.creation.from_data import loadtxt # NOQA _global_context = _core.Context() _core.set_global_default_context(_global_context) # Add workaround implementation for NumPy-compatible functions from chainerx import _numpy_compat_workarounds _numpy_compat_workarounds.populate() else: class ndarray(object): pass # for type testing def is_available(): return _available
import sys if sys.version_info[0] < 3: _available = False else: try: from chainerx import _core _available = True except Exception: _available = False if _available: from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA from chainerx._core import * # NOQA from builtins import bool, int, float # NOQA from chainerx.creation.from_data import asanyarray # NOQA from chainerx.creation.from_data import fromfile # NOQA from chainerx.creation.from_data import fromfunction # NOQA from chainerx.creation.from_data import fromiter # NOQA from chainerx.creation.from_data import fromstring # NOQA from chainerx.creation.from_data import loadtxt # NOQA _global_context = _core.Context() _core.set_global_default_context(_global_context) # Add workaround implementation for NumPy-compatible functions from chainerx import _numpy_compat_workarounds _numpy_compat_workarounds.populate() else: class ndarray(object): """Dummy class for type testing.""" def __init__(self, *args, **kwargs): raise RuntimeError('chainerx is not available.') def is_available(): return _available
Raise an error on dummy class init
Raise an error on dummy class init
Python
mit
okuta/chainer,jnishi/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,hvy/chainer,wkentaro/chainer,jnishi/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,niboshi/chainer,chainer/chainer,keisuke-umezawa/chainer,tkerola/chainer,hvy/chainer,jnishi/chainer,pfnet/chainer,wkentaro/chainer,keisuke-umezawa/chainer,niboshi/chainer,keisuke-umezawa/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,niboshi/chainer,okuta/chainer
import sys if sys.version_info[0] < 3: _available = False else: try: from chainerx import _core _available = True except Exception: _available = False if _available: from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA from chainerx._core import * # NOQA from builtins import bool, int, float # NOQA from chainerx.creation.from_data import asanyarray # NOQA from chainerx.creation.from_data import fromfile # NOQA from chainerx.creation.from_data import fromfunction # NOQA from chainerx.creation.from_data import fromiter # NOQA from chainerx.creation.from_data import fromstring # NOQA from chainerx.creation.from_data import loadtxt # NOQA _global_context = _core.Context() _core.set_global_default_context(_global_context) # Add workaround implementation for NumPy-compatible functions from chainerx import _numpy_compat_workarounds _numpy_compat_workarounds.populate() else: class ndarray(object): - pass # for type testing + + """Dummy class for type testing.""" + + def __init__(self, *args, **kwargs): + raise RuntimeError('chainerx is not available.') def is_available(): return _available
Raise an error on dummy class init
## Code Before: import sys if sys.version_info[0] < 3: _available = False else: try: from chainerx import _core _available = True except Exception: _available = False if _available: from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA from chainerx._core import * # NOQA from builtins import bool, int, float # NOQA from chainerx.creation.from_data import asanyarray # NOQA from chainerx.creation.from_data import fromfile # NOQA from chainerx.creation.from_data import fromfunction # NOQA from chainerx.creation.from_data import fromiter # NOQA from chainerx.creation.from_data import fromstring # NOQA from chainerx.creation.from_data import loadtxt # NOQA _global_context = _core.Context() _core.set_global_default_context(_global_context) # Add workaround implementation for NumPy-compatible functions from chainerx import _numpy_compat_workarounds _numpy_compat_workarounds.populate() else: class ndarray(object): pass # for type testing def is_available(): return _available ## Instruction: Raise an error on dummy class init ## Code After: import sys if sys.version_info[0] < 3: _available = False else: try: from chainerx import _core _available = True except Exception: _available = False if _available: from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA from chainerx._core import * # NOQA from builtins import bool, int, float # NOQA from chainerx.creation.from_data import asanyarray # NOQA from chainerx.creation.from_data import fromfile # NOQA from chainerx.creation.from_data import fromfunction # NOQA from chainerx.creation.from_data import fromiter # NOQA from chainerx.creation.from_data import fromstring # NOQA from chainerx.creation.from_data import loadtxt # NOQA _global_context = _core.Context() _core.set_global_default_context(_global_context) # Add workaround implementation for NumPy-compatible functions from chainerx import _numpy_compat_workarounds _numpy_compat_workarounds.populate() else: class ndarray(object): """Dummy class for type testing.""" def __init__(self, *args, **kwargs): raise RuntimeError('chainerx is not available.') def is_available(): return _available
f16c8f696a282da6c04de6b7530f1d0316eda88b
providers/edu/harvarddataverse/normalizer.py
providers/edu/harvarddataverse/normalizer.py
import arrow import dateparser from share.normalize import * class Person(Parser): given_name = ParseName(ctx).first family_name = ParseName(ctx).last additional_name = ParseName(ctx).middle suffix = ParseName(ctx).suffix class Contributor(Parser): person = Delegate(Person, ctx) cited_name = ctx order_cited = ctx('index') class Link(Parser): url = ctx type = RunPython('get_link_type', ctx) def get_link_type(self, link): if 'dx.doi.org' in link: return 'doi' elif 'dataverse.harvard.edu' in link: return 'provider' return 'misc' class ThroughLinks(Parser): link = Delegate(Link, ctx) class CreativeWork(Parser): title = ctx.name description = ctx.description contributors = Map(Delegate(Contributor), ctx.authors) date_published = ParseDate(ctx.published_at) links = Concat( Delegate(ThroughLinks, ctx.url), Delegate(ThroughLinks, ctx.image_url), )
import arrow import dateparser from share.normalize import * class Person(Parser): given_name = ParseName(ctx).first family_name = ParseName(ctx).last additional_name = ParseName(ctx).middle suffix = ParseName(ctx).suffix class Contributor(Parser): person = Delegate(Person, ctx) cited_name = ctx order_cited = ctx('index') class Link(Parser): url = ctx type = RunPython('get_link_type', ctx) def get_link_type(self, link): if 'dx.doi.org' in link: return 'doi' elif 'dataverse.harvard.edu' in link: return 'provider' return 'misc' class ThroughLinks(Parser): link = Delegate(Link, ctx) class CreativeWork(Parser): title = ctx.name description = Try(ctx.description) contributors = Map(Delegate(Contributor), Try(ctx.authors)) date_published = ParseDate(ctx.published_at) links = Concat( Delegate(ThroughLinks, ctx.url), Delegate(ThroughLinks, ctx.image_url), )
Handle missing fields in dataverse
Handle missing fields in dataverse
Python
apache-2.0
CenterForOpenScience/SHARE,laurenbarker/SHARE,aaxelb/SHARE,aaxelb/SHARE,laurenbarker/SHARE,zamattiac/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,aaxelb/SHARE
import arrow import dateparser from share.normalize import * class Person(Parser): given_name = ParseName(ctx).first family_name = ParseName(ctx).last additional_name = ParseName(ctx).middle suffix = ParseName(ctx).suffix class Contributor(Parser): person = Delegate(Person, ctx) cited_name = ctx order_cited = ctx('index') class Link(Parser): url = ctx type = RunPython('get_link_type', ctx) def get_link_type(self, link): if 'dx.doi.org' in link: return 'doi' elif 'dataverse.harvard.edu' in link: return 'provider' return 'misc' class ThroughLinks(Parser): link = Delegate(Link, ctx) class CreativeWork(Parser): title = ctx.name - description = ctx.description + description = Try(ctx.description) - contributors = Map(Delegate(Contributor), ctx.authors) + contributors = Map(Delegate(Contributor), Try(ctx.authors)) date_published = ParseDate(ctx.published_at) links = Concat( Delegate(ThroughLinks, ctx.url), Delegate(ThroughLinks, ctx.image_url), )
Handle missing fields in dataverse
## Code Before: import arrow import dateparser from share.normalize import * class Person(Parser): given_name = ParseName(ctx).first family_name = ParseName(ctx).last additional_name = ParseName(ctx).middle suffix = ParseName(ctx).suffix class Contributor(Parser): person = Delegate(Person, ctx) cited_name = ctx order_cited = ctx('index') class Link(Parser): url = ctx type = RunPython('get_link_type', ctx) def get_link_type(self, link): if 'dx.doi.org' in link: return 'doi' elif 'dataverse.harvard.edu' in link: return 'provider' return 'misc' class ThroughLinks(Parser): link = Delegate(Link, ctx) class CreativeWork(Parser): title = ctx.name description = ctx.description contributors = Map(Delegate(Contributor), ctx.authors) date_published = ParseDate(ctx.published_at) links = Concat( Delegate(ThroughLinks, ctx.url), Delegate(ThroughLinks, ctx.image_url), ) ## Instruction: Handle missing fields in dataverse ## Code After: import arrow import dateparser from share.normalize import * class Person(Parser): given_name = ParseName(ctx).first family_name = ParseName(ctx).last additional_name = ParseName(ctx).middle suffix = ParseName(ctx).suffix class Contributor(Parser): person = Delegate(Person, ctx) cited_name = ctx order_cited = ctx('index') class Link(Parser): url = ctx type = RunPython('get_link_type', ctx) def get_link_type(self, link): if 'dx.doi.org' in link: return 'doi' elif 'dataverse.harvard.edu' in link: return 'provider' return 'misc' class ThroughLinks(Parser): link = Delegate(Link, ctx) class CreativeWork(Parser): title = ctx.name description = Try(ctx.description) contributors = Map(Delegate(Contributor), Try(ctx.authors)) date_published = ParseDate(ctx.published_at) links = Concat( Delegate(ThroughLinks, ctx.url), Delegate(ThroughLinks, ctx.image_url), )
3327c204f34a725a2d070beb24a7a5a66d414930
migrations/versions/538eeb160af6_.py
migrations/versions/538eeb160af6_.py
# revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '1727fb4309d8' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
# revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
Update alembic order for merging
Update alembic order for merging
Python
apache-2.0
bunjiboys/security_monkey,stackArmor/security_monkey,markofu/security_monkey,bunjiboys/security_monkey,bunjiboys/security_monkey,markofu/security_monkey,markofu/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,stackArmor/security_monkey,stackArmor/security_monkey,markofu/security_monkey,markofu/security_monkey
# revision identifiers, used by Alembic. revision = '538eeb160af6' - down_revision = '1727fb4309d8' + down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
Update alembic order for merging
## Code Before: # revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '1727fb4309d8' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ### ## Instruction: Update alembic order for merging ## Code After: # revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
d2d822a9fb60bbc8ded7f9e3c70d91cf25f794b2
src/volunteers/models.py
src/volunteers/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.validators import MinValueValidator class Volunteer(models.Model): first_name = models.CharField(_('First name'), max_length=100) last_name = models.CharField(_('Last name'), max_length=100) age = models.PositiveIntegerField(_('Age')) phone = models.CharField(_('Phone'), max_length=100) email = models.EmailField(_('E-mail'), unique=True) is_group = models.BooleanField(_('Is group representative'), default=False) group_name = models.CharField(_('Group/organization name'), max_length=100, blank=True) participant_count = models.PositiveIntegerField(_('Participant count'), default=1, validators=[MinValueValidator(1)]) class Meta: verbose_name = _('Volunteer') verbose_name_plural = _('Volunteers') @property def name(self): template = u'{first_name} {last_name}' if self.is_group: template += u' (grupp, {participant_count} osalejat)' return template.format(**self.__dict__) def __unicode__(self): return self.name
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.validators import MinValueValidator class Volunteer(models.Model): first_name = models.CharField(_('First name'), max_length=100) last_name = models.CharField(_('Last name'), max_length=100) age = models.PositiveIntegerField(_('Age')) phone = models.CharField(_('Phone'), max_length=100) email = models.EmailField(_('E-mail'), unique=True) is_group = models.BooleanField(_('Is group representative'), default=False) group_name = models.CharField(_('Group/organization name'), max_length=100, blank=True) participant_count = models.PositiveIntegerField(_('Participant count'), default=1, validators=[MinValueValidator(1)]) class Meta: verbose_name = _('Volunteer') verbose_name_plural = _('Volunteers') @property def name(self): template = u'{first_name} {last_name}' if self.is_group: template += u' ({group_name} grupp, {participant_count} osalejat)' return template.format(**self.__dict__) def __unicode__(self): return self.name
Add group name to volunteer string representation
Add group name to volunteer string representation
Python
mit
mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign
from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.validators import MinValueValidator class Volunteer(models.Model): first_name = models.CharField(_('First name'), max_length=100) last_name = models.CharField(_('Last name'), max_length=100) age = models.PositiveIntegerField(_('Age')) phone = models.CharField(_('Phone'), max_length=100) email = models.EmailField(_('E-mail'), unique=True) is_group = models.BooleanField(_('Is group representative'), default=False) group_name = models.CharField(_('Group/organization name'), max_length=100, blank=True) participant_count = models.PositiveIntegerField(_('Participant count'), default=1, validators=[MinValueValidator(1)]) class Meta: verbose_name = _('Volunteer') verbose_name_plural = _('Volunteers') @property def name(self): template = u'{first_name} {last_name}' if self.is_group: - template += u' (grupp, {participant_count} osalejat)' + template += u' ({group_name} grupp, {participant_count} osalejat)' return template.format(**self.__dict__) def __unicode__(self): return self.name
Add group name to volunteer string representation
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.validators import MinValueValidator class Volunteer(models.Model): first_name = models.CharField(_('First name'), max_length=100) last_name = models.CharField(_('Last name'), max_length=100) age = models.PositiveIntegerField(_('Age')) phone = models.CharField(_('Phone'), max_length=100) email = models.EmailField(_('E-mail'), unique=True) is_group = models.BooleanField(_('Is group representative'), default=False) group_name = models.CharField(_('Group/organization name'), max_length=100, blank=True) participant_count = models.PositiveIntegerField(_('Participant count'), default=1, validators=[MinValueValidator(1)]) class Meta: verbose_name = _('Volunteer') verbose_name_plural = _('Volunteers') @property def name(self): template = u'{first_name} {last_name}' if self.is_group: template += u' (grupp, {participant_count} osalejat)' return template.format(**self.__dict__) def __unicode__(self): return self.name ## Instruction: Add group name to volunteer string representation ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.validators import MinValueValidator class Volunteer(models.Model): first_name = models.CharField(_('First name'), max_length=100) last_name = models.CharField(_('Last name'), max_length=100) age = models.PositiveIntegerField(_('Age')) phone = models.CharField(_('Phone'), max_length=100) email = models.EmailField(_('E-mail'), unique=True) is_group = models.BooleanField(_('Is group representative'), default=False) group_name = models.CharField(_('Group/organization name'), max_length=100, blank=True) participant_count = models.PositiveIntegerField(_('Participant count'), default=1, validators=[MinValueValidator(1)]) class Meta: verbose_name = _('Volunteer') verbose_name_plural = _('Volunteers') @property def name(self): template = u'{first_name} {last_name}' if self.is_group: template += u' ({group_name} grupp, {participant_count} osalejat)' return template.format(**self.__dict__) def __unicode__(self): return self.name
c89abd6a285225313c91ba03c0fd8ab2cfed399d
setup.py
setup.py
import os import urllib import zipfile script_path = os.path.dirname(os.path.realpath(__file__)) packer_archive_path = script_path + "/packer.zip" bin_path = script_path + "/bin" if not os.path.isfile(bin_path + "/packer"): if not os.path.exists(bin_path): os.makedirs(bin_path) try: urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path) with zipfile.ZipFile(packer_archive_path, "r") as packer_archive: packer_archive.extractall(path=bin_path) finally: os.remove(packer_archive_path) for root, subdirectories, files in os.walk(bin_path): for f in files: os.chmod("%s/%s" % (root, f), 0755)
import os import urllib import zipfile script_path = os.path.dirname(os.path.realpath(__file__)) packer_archive_path = script_path + "/packer.zip" bin_path = script_path + "/bin" if not os.path.isfile(bin_path + "/packer"): if not os.path.exists(bin_path): os.makedirs(bin_path) try: urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path) with zipfile.ZipFile(packer_archive_path, "r") as packer_archive: packer_archive.extractall(path=bin_path) finally: os.remove(packer_archive_path) for root, subdirectories, files in os.walk(bin_path): for f in files: os.chmod(root + "/" + f, 755)
Fix false positive octal syntax warning
Fix false positive octal syntax warning
Python
unlicense
dharmab/centos-vagrant
import os import urllib import zipfile script_path = os.path.dirname(os.path.realpath(__file__)) packer_archive_path = script_path + "/packer.zip" bin_path = script_path + "/bin" if not os.path.isfile(bin_path + "/packer"): if not os.path.exists(bin_path): os.makedirs(bin_path) try: urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path) with zipfile.ZipFile(packer_archive_path, "r") as packer_archive: packer_archive.extractall(path=bin_path) finally: os.remove(packer_archive_path) for root, subdirectories, files in os.walk(bin_path): for f in files: - os.chmod("%s/%s" % (root, f), 0755) + os.chmod(root + "/" + f, 755)
Fix false positive octal syntax warning
## Code Before: import os import urllib import zipfile script_path = os.path.dirname(os.path.realpath(__file__)) packer_archive_path = script_path + "/packer.zip" bin_path = script_path + "/bin" if not os.path.isfile(bin_path + "/packer"): if not os.path.exists(bin_path): os.makedirs(bin_path) try: urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path) with zipfile.ZipFile(packer_archive_path, "r") as packer_archive: packer_archive.extractall(path=bin_path) finally: os.remove(packer_archive_path) for root, subdirectories, files in os.walk(bin_path): for f in files: os.chmod("%s/%s" % (root, f), 0755) ## Instruction: Fix false positive octal syntax warning ## Code After: import os import urllib import zipfile script_path = os.path.dirname(os.path.realpath(__file__)) packer_archive_path = script_path + "/packer.zip" bin_path = script_path + "/bin" if not os.path.isfile(bin_path + "/packer"): if not os.path.exists(bin_path): os.makedirs(bin_path) try: urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path) with zipfile.ZipFile(packer_archive_path, "r") as packer_archive: packer_archive.extractall(path=bin_path) finally: os.remove(packer_archive_path) for root, subdirectories, files in os.walk(bin_path): for f in files: os.chmod(root + "/" + f, 755)
ee2d27eca45768a07a562405cf4431cb8d2b09bf
setup.py
setup.py
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Naveen Malik', maintainer_email='jewzaam@gmail.com', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','pycurl_benchmark','test_resttest'], license='Apache License, Version 2.0' )
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Sam Van Oort', maintainer_email='acetonespam@gmail.com', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','test_resttest'], license='Apache License, Version 2.0', requires=['argparse','yaml','pycurl'] )
Set maintainer and add dependencies to distutils config
Set maintainer and add dependencies to distutils config
Python
apache-2.0
sunyanhui/pyresttest,satish-suradkar/pyresttest,suvarnaraju/pyresttest,wirewit/pyresttest,netjunki/pyresttest,MorrisJobke/pyresttest,wirewit/pyresttest,suvarnaraju/pyresttest,svanoort/pyresttest,alazaro/pyresttest,sunyanhui/pyresttest,TimYi/pyresttest,MorrisJobke/pyresttest,holdenweb/pyresttest,TimYi/pyresttest,alazaro/pyresttest,janusnic/pyresttest,janusnic/pyresttest,holdenweb/pyresttest,netjunki/pyresttest,svanoort/pyresttest,satish-suradkar/pyresttest
from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', - maintainer='Naveen Malik', + maintainer='Sam Van Oort', - maintainer_email='jewzaam@gmail.com', + maintainer_email='acetonespam@gmail.com', url='https://github.com/svanoort/pyresttest', - py_modules=['resttest','pycurl_benchmark','test_resttest'], + py_modules=['resttest','test_resttest'], - license='Apache License, Version 2.0' + license='Apache License, Version 2.0', + requires=['argparse','yaml','pycurl'] )
Set maintainer and add dependencies to distutils config
## Code Before: from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Naveen Malik', maintainer_email='jewzaam@gmail.com', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','pycurl_benchmark','test_resttest'], license='Apache License, Version 2.0' ) ## Instruction: Set maintainer and add dependencies to distutils config ## Code After: from distutils.core import setup setup(name='pyresttest', version='0.1', description='Python Rest Testing', maintainer='Sam Van Oort', maintainer_email='acetonespam@gmail.com', url='https://github.com/svanoort/pyresttest', py_modules=['resttest','test_resttest'], license='Apache License, Version 2.0', requires=['argparse','yaml','pycurl'] )
8fea58292e41352b0b58947f4182dd32ff4f225d
opps/fields/models.py
opps/fields/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.boxes.models import OPPS_APPS FIELD_TYPE = ( ('checkbox', _('CheckBox')), ('radio', _('Radio')), ('text', _('Text')), ('textarea', _('TextArea')), ) class Field(models.Model): name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=255) application = models.CharField(_('Application'), max_length=255, choices=OPPS_APPS, db_index=True) type = models.CharField(_("Type"), max_length=15, choices=FIELD_TYPE, db_index=True) def __unicode__(self): return u"{} - {}".format(self.application, self.name)
from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.boxes.models import OPPS_APPS FIELD_TYPE = ( ('checkbox', _('CheckBox')), ('radio', _('Radio')), ('text', _('Text')), ('textarea', _('TextArea')), ) class Field(models.Model): name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=255) application = models.CharField(_('Application'), max_length=255, choices=OPPS_APPS, db_index=True) type = models.CharField(_("Type"), max_length=15, choices=FIELD_TYPE, db_index=True) def __unicode__(self): return u"{} - {}".format(self.application, self.name) class Option(models.Model): field = models.ForeignKey('fields.Field') name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=140) value = models.CharField(_('Value'), max_length=255) def __unicode__(self): return u"{} - {}".format(self.field.slug, self.name) class FieldOption(models.Model): field = models.ForeignKey('fields.Field') option = models.ForeignKey('fields.Option') order = models.PositiveIntegerField(_(u'Order'), default=0) def __unicode__(self): return u"{} - {}".format(self.field.slug, self.option.slug) class Meta: ordering = ['-order']
Add new model option to add field options if exist (radio/checkbox)
Add new model option to add field options if exist (radio/checkbox)
Python
mit
williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps
from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.boxes.models import OPPS_APPS FIELD_TYPE = ( ('checkbox', _('CheckBox')), ('radio', _('Radio')), ('text', _('Text')), ('textarea', _('TextArea')), ) class Field(models.Model): name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=255) application = models.CharField(_('Application'), max_length=255, choices=OPPS_APPS, db_index=True) type = models.CharField(_("Type"), max_length=15, choices=FIELD_TYPE, db_index=True) def __unicode__(self): return u"{} - {}".format(self.application, self.name) + + class Option(models.Model): + field = models.ForeignKey('fields.Field') + name = models.CharField(_('Name'), max_length=100) + slug = models.SlugField(_('Slug'), max_length=140) + value = models.CharField(_('Value'), max_length=255) + + def __unicode__(self): + return u"{} - {}".format(self.field.slug, self.name) + + + + class FieldOption(models.Model): + field = models.ForeignKey('fields.Field') + option = models.ForeignKey('fields.Option') + order = models.PositiveIntegerField(_(u'Order'), default=0) + + def __unicode__(self): + return u"{} - {}".format(self.field.slug, self.option.slug) + + class Meta: + ordering = ['-order'] +
Add new model option to add field options if exist (radio/checkbox)
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.boxes.models import OPPS_APPS FIELD_TYPE = ( ('checkbox', _('CheckBox')), ('radio', _('Radio')), ('text', _('Text')), ('textarea', _('TextArea')), ) class Field(models.Model): name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=255) application = models.CharField(_('Application'), max_length=255, choices=OPPS_APPS, db_index=True) type = models.CharField(_("Type"), max_length=15, choices=FIELD_TYPE, db_index=True) def __unicode__(self): return u"{} - {}".format(self.application, self.name) ## Instruction: Add new model option to add field options if exist (radio/checkbox) ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ from opps.boxes.models import OPPS_APPS FIELD_TYPE = ( ('checkbox', _('CheckBox')), ('radio', _('Radio')), ('text', _('Text')), ('textarea', _('TextArea')), ) class Field(models.Model): name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=255) application = models.CharField(_('Application'), max_length=255, choices=OPPS_APPS, db_index=True) type = models.CharField(_("Type"), max_length=15, choices=FIELD_TYPE, db_index=True) def __unicode__(self): return u"{} - {}".format(self.application, self.name) class Option(models.Model): field = models.ForeignKey('fields.Field') name = models.CharField(_('Name'), max_length=100) slug = models.SlugField(_('Slug'), max_length=140) value = models.CharField(_('Value'), max_length=255) def __unicode__(self): return u"{} - {}".format(self.field.slug, self.name) class FieldOption(models.Model): field = models.ForeignKey('fields.Field') option = models.ForeignKey('fields.Option') order = models.PositiveIntegerField(_(u'Order'), default=0) def __unicode__(self): return u"{} - {}".format(self.field.slug, self.option.slug) class Meta: ordering = ['-order']
6f83fb7dd071786dc01a015addbdb541e7eaf7db
meinberlin/apps/documents/migrations/0002_rename_document_to_chapter.py
meinberlin/apps/documents/migrations/0002_rename_document_to_chapter.py
from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ ('meinberlin_documents', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Document', new_name='Chapter', ), migrations.RenameField( model_name='paragraph', old_name='document', new_name='chapter', ), ]
from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): atomic=False dependencies = [ ('meinberlin_documents', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Document', new_name='Chapter', ), migrations.RenameField( model_name='paragraph', old_name='document', new_name='chapter', ), ]
Work around a migration issue in sqlite
apps/documents: Work around a migration issue in sqlite
Python
agpl-3.0
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): + atomic=False dependencies = [ ('meinberlin_documents', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Document', new_name='Chapter', ), migrations.RenameField( model_name='paragraph', old_name='document', new_name='chapter', ), ]
Work around a migration issue in sqlite
## Code Before: from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ ('meinberlin_documents', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Document', new_name='Chapter', ), migrations.RenameField( model_name='paragraph', old_name='document', new_name='chapter', ), ] ## Instruction: Work around a migration issue in sqlite ## Code After: from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): atomic=False dependencies = [ ('meinberlin_documents', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Document', new_name='Chapter', ), migrations.RenameField( model_name='paragraph', old_name='document', new_name='chapter', ), ]
3d2f19ff097cf144efd9135c52e4d584193f9ddb
tohu/v7/custom_generator/tohu_items_class.py
tohu/v7/custom_generator/tohu_items_class.py
import attr __all__ = ["make_tohu_items_class"] def make_tohu_items_class(clsname, field_names): """ Parameters ---------- clsname: string Name of the class to be created. field_names: list of strings Names of the field attributes of the class to be created. """ item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True) func_eq_orig = item_cls.__eq__ def func_eq_new(self, other): """ Custom __eq__() method which also allows comparisons with tuples and dictionaries. This is mostly for convenience during testing. """ if isinstance(other, self.__class__): return func_eq_orig(self, other) else: if isinstance(other, tuple): return attr.astuple(self) == other elif isinstance(other, dict): return attr.asdict(self) == other else: raise TypeError( f"Tohu items have types that cannot be compared: " "{self.__class__.__name__}, {other.__class__.__name__}" ) item_cls.__eq__ = func_eq_new item_cls.field_names = field_names item_cls.as_dict = lambda self: attr.asdict(self) item_cls.as_tuple = lambda self: attr.astuple(self) return item_cls
import attr __all__ = ["make_tohu_items_class"] def make_tohu_items_class(clsname, field_names): """ Parameters ---------- clsname: string Name of the class to be created. field_names: list of strings Names of the field attributes of the class to be created. """ item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True) func_eq_orig = item_cls.__eq__ def func_eq_new(self, other): """ Custom __eq__() method which also allows comparisons with tuples and dictionaries. This is mostly for convenience during testing. """ if isinstance(other, self.__class__): return func_eq_orig(self, other) else: if isinstance(other, tuple): return attr.astuple(self) == other elif isinstance(other, dict): return attr.asdict(self) == other else: raise TypeError( f"Tohu items have types that cannot be compared: " "{self.__class__.__name__}, {other.__class__.__name__}" ) item_cls.__eq__ = func_eq_new item_cls.field_names = field_names item_cls.as_dict = lambda self: attr.asdict(self) item_cls.as_tuple = lambda self: attr.astuple(self) item_cls.is_unset = False return item_cls
Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
Python
mit
maxalbert/tohu
import attr __all__ = ["make_tohu_items_class"] def make_tohu_items_class(clsname, field_names): """ Parameters ---------- clsname: string Name of the class to be created. field_names: list of strings Names of the field attributes of the class to be created. """ item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True) func_eq_orig = item_cls.__eq__ def func_eq_new(self, other): """ Custom __eq__() method which also allows comparisons with tuples and dictionaries. This is mostly for convenience during testing. """ if isinstance(other, self.__class__): return func_eq_orig(self, other) else: if isinstance(other, tuple): return attr.astuple(self) == other elif isinstance(other, dict): return attr.asdict(self) == other else: raise TypeError( f"Tohu items have types that cannot be compared: " "{self.__class__.__name__}, {other.__class__.__name__}" ) item_cls.__eq__ = func_eq_new item_cls.field_names = field_names item_cls.as_dict = lambda self: attr.asdict(self) item_cls.as_tuple = lambda self: attr.astuple(self) + item_cls.is_unset = False return item_cls
Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
## Code Before: import attr __all__ = ["make_tohu_items_class"] def make_tohu_items_class(clsname, field_names): """ Parameters ---------- clsname: string Name of the class to be created. field_names: list of strings Names of the field attributes of the class to be created. """ item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True) func_eq_orig = item_cls.__eq__ def func_eq_new(self, other): """ Custom __eq__() method which also allows comparisons with tuples and dictionaries. This is mostly for convenience during testing. """ if isinstance(other, self.__class__): return func_eq_orig(self, other) else: if isinstance(other, tuple): return attr.astuple(self) == other elif isinstance(other, dict): return attr.asdict(self) == other else: raise TypeError( f"Tohu items have types that cannot be compared: " "{self.__class__.__name__}, {other.__class__.__name__}" ) item_cls.__eq__ = func_eq_new item_cls.field_names = field_names item_cls.as_dict = lambda self: attr.asdict(self) item_cls.as_tuple = lambda self: attr.astuple(self) return item_cls ## Instruction: Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls ## Code After: import attr __all__ = ["make_tohu_items_class"] def make_tohu_items_class(clsname, field_names): """ Parameters ---------- clsname: string Name of the class to be created. field_names: list of strings Names of the field attributes of the class to be created. """ item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True) func_eq_orig = item_cls.__eq__ def func_eq_new(self, other): """ Custom __eq__() method which also allows comparisons with tuples and dictionaries. This is mostly for convenience during testing. """ if isinstance(other, self.__class__): return func_eq_orig(self, other) else: if isinstance(other, tuple): return attr.astuple(self) == other elif isinstance(other, dict): return attr.asdict(self) == other else: raise TypeError( f"Tohu items have types that cannot be compared: " "{self.__class__.__name__}, {other.__class__.__name__}" ) item_cls.__eq__ = func_eq_new item_cls.field_names = field_names item_cls.as_dict = lambda self: attr.asdict(self) item_cls.as_tuple = lambda self: attr.astuple(self) item_cls.is_unset = False return item_cls
a10407bf4d9dd404d734985717aa7bcebfa0981d
api/digital_ocean.py
api/digital_ocean.py
import urllib2 import json class Api(): group_name = "Digital Ocean" client_key = None api_key = None servers = {} def __init__(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) def grab_servers(self): DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \ ('://api.digitalocean.com', self.client_key, self.api_key) droplets = urllib2.urlopen(DROPLETS_URL) try: data = json.loads(droplets.read()) except: raise Exception("Fatal error: No droplets found") for droplet in data['droplets']: if droplet['status'] == 'active': name = droplet['name'] if name not in self.servers: self.servers[name] = [] self.servers[name].append(droplet['ip_address']) def get_servers(self, name): return self.servers[name] if name in self.servers else None
import urllib2 import json class Api(): group_name = "Digital Ocean" client_key = None api_key = None servers = {} def __init__(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) def grab_servers(self): DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \ ('://api.digitalocean.com', self.client_key, self.api_key) try: droplets = urllib2.urlopen(DROPLETS_URL) except urllib2.URLError: raise Exception("Fatal error: Unable to connect to API") try: data = json.loads(droplets.read()) except: raise Exception("Fatal error: No droplets found") for droplet in data['droplets']: if droplet['status'] == 'active': name = droplet['name'] if name not in self.servers: self.servers[name] = [] self.servers[name].append(droplet['ip_address']) def get_servers(self, name): return self.servers[name] if name in self.servers else None
Return a helpful exception if API is uncontactable
Return a helpful exception if API is uncontactable
Python
mit
dparlevliet/elastic-firewall,dparlevliet/elastic-firewall,dparlevliet/elastic-firewall
import urllib2 import json class Api(): group_name = "Digital Ocean" client_key = None api_key = None servers = {} def __init__(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) def grab_servers(self): DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \ ('://api.digitalocean.com', self.client_key, self.api_key) + try: - droplets = urllib2.urlopen(DROPLETS_URL) + droplets = urllib2.urlopen(DROPLETS_URL) + except urllib2.URLError: + raise Exception("Fatal error: Unable to connect to API") + try: data = json.loads(droplets.read()) except: raise Exception("Fatal error: No droplets found") for droplet in data['droplets']: if droplet['status'] == 'active': name = droplet['name'] if name not in self.servers: self.servers[name] = [] self.servers[name].append(droplet['ip_address']) def get_servers(self, name): return self.servers[name] if name in self.servers else None
Return a helpful exception if API is uncontactable
## Code Before: import urllib2 import json class Api(): group_name = "Digital Ocean" client_key = None api_key = None servers = {} def __init__(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) def grab_servers(self): DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \ ('://api.digitalocean.com', self.client_key, self.api_key) droplets = urllib2.urlopen(DROPLETS_URL) try: data = json.loads(droplets.read()) except: raise Exception("Fatal error: No droplets found") for droplet in data['droplets']: if droplet['status'] == 'active': name = droplet['name'] if name not in self.servers: self.servers[name] = [] self.servers[name].append(droplet['ip_address']) def get_servers(self, name): return self.servers[name] if name in self.servers else None ## Instruction: Return a helpful exception if API is uncontactable ## Code After: import urllib2 import json class Api(): group_name = "Digital Ocean" client_key = None api_key = None servers = {} def __init__(self, **kwargs): for key in kwargs: setattr(self, key, kwargs[key]) def grab_servers(self): DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \ ('://api.digitalocean.com', self.client_key, self.api_key) try: droplets = urllib2.urlopen(DROPLETS_URL) except urllib2.URLError: raise Exception("Fatal error: Unable to connect to API") try: data = json.loads(droplets.read()) except: raise Exception("Fatal error: No droplets found") for droplet in data['droplets']: if droplet['status'] == 'active': name = droplet['name'] if name not in self.servers: self.servers[name] = [] self.servers[name].append(droplet['ip_address']) def get_servers(self, name): return self.servers[name] if name in self.servers else None
7ff6c9d85eef03c225b511f39bbb07796b47659f
datapipe/history.py
datapipe/history.py
class History: def __init__(self): self.conn = sqlite3.connect('.history.db')
import sqlite3 class History: def __init__(self, path): self.conn = sqlite3.connect(path)
Make database filepath configurable on History
Make database filepath configurable on History
Python
mit
ibab/datapipe
+ import sqlite3 class History: - def __init__(self): + def __init__(self, path): - self.conn = sqlite3.connect('.history.db') + self.conn = sqlite3.connect(path)
Make database filepath configurable on History
## Code Before: class History: def __init__(self): self.conn = sqlite3.connect('.history.db') ## Instruction: Make database filepath configurable on History ## Code After: import sqlite3 class History: def __init__(self, path): self.conn = sqlite3.connect(path)
fd4539942dafe622d3f7a7d183db3d69f95a00c4
shop/urls/cart.py
shop/urls/cart.py
from django.conf.urls.defaults import url, patterns from shop.views.cart import CartDetails, CartItemDetail urlpatterns = patterns('', url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE name='cart_delete'), url('^item/$', CartDetails.as_view(action='post'), # POST name='cart_item_add'), url(r'^$', CartDetails.as_view(), name='cart'), # GET url(r'^update/$', CartDetails.as_view(action='put'), name='cart_update'), # CartItems url('^item/(?P<id>[0-9A-Za-z-_.//]+)$', CartItemDetail.as_view(), name='cart_item'), url('^item/(?P<id>[0-9A-Za-z-_.//]+)/delete$', CartItemDetail.as_view(action='delete'), name='cart_item_delete'), )
from django.conf.urls.defaults import url, patterns from shop.views.cart import CartDetails, CartItemDetail urlpatterns = patterns('', url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE name='cart_delete'), url('^item/$', CartDetails.as_view(action='post'), # POST name='cart_item_add'), url(r'^$', CartDetails.as_view(), name='cart'), # GET url(r'^update/$', CartDetails.as_view(action='put'), name='cart_update'), # CartItems url('^item/(?P<id>[0-9]+)$', CartItemDetail.as_view(), name='cart_item'), url('^item/(?P<id>[0-9]+)/delete$', CartItemDetail.as_view(action='delete'), name='cart_item_delete'), )
Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex).
Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex).
Python
bsd-3-clause
schacki/django-shop,khchine5/django-shop,khchine5/django-shop,dwx9/test,febsn/django-shop,DavideyLee/django-shop,awesto/django-shop,jrief/django-shop,dwx9/test,thenewguy/django-shop,thenewguy/django-shop,bmihelac/django-shop,pjdelport/django-shop,creimers/django-shop,creimers/django-shop,jrief/django-shop,bmihelac/django-shop,awesto/django-shop,awesto/django-shop,febsn/django-shop,febsn/django-shop,nimbis/django-shop,khchine5/django-shop,pjdelport/django-shop,rfleschenberg/django-shop,rfleschenberg/django-shop,dwx9/test,rfleschenberg/django-shop,fusionbox/django-shop,chriscauley/django-shop,jrief/django-shop,divio/django-shop,creimers/django-shop,DavideyLee/django-shop,pjdelport/django-shop,schacki/django-shop,schacki/django-shop,nimbis/django-shop,atheiste/django-shop,nimbis/django-shop,katomaso/django-shop,fusionbox/django-shop,chriscauley/django-shop,atheiste/django-shop,chriscauley/django-shop,jrutila/django-shop,jrutila/django-shop,khchine5/django-shop,schacki/django-shop,nimbis/django-shop,jrutila/django-shop,divio/django-shop,divio/django-shop,katomaso/django-shop,katomaso/django-shop,rfleschenberg/django-shop,atheiste/django-shop,jrief/django-shop
from django.conf.urls.defaults import url, patterns from shop.views.cart import CartDetails, CartItemDetail urlpatterns = patterns('', url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE name='cart_delete'), url('^item/$', CartDetails.as_view(action='post'), # POST name='cart_item_add'), url(r'^$', CartDetails.as_view(), name='cart'), # GET url(r'^update/$', CartDetails.as_view(action='put'), name='cart_update'), # CartItems - url('^item/(?P<id>[0-9A-Za-z-_.//]+)$', CartItemDetail.as_view(), + url('^item/(?P<id>[0-9]+)$', CartItemDetail.as_view(), name='cart_item'), - url('^item/(?P<id>[0-9A-Za-z-_.//]+)/delete$', + url('^item/(?P<id>[0-9]+)/delete$', CartItemDetail.as_view(action='delete'), name='cart_item_delete'), )
Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex).
## Code Before: from django.conf.urls.defaults import url, patterns from shop.views.cart import CartDetails, CartItemDetail urlpatterns = patterns('', url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE name='cart_delete'), url('^item/$', CartDetails.as_view(action='post'), # POST name='cart_item_add'), url(r'^$', CartDetails.as_view(), name='cart'), # GET url(r'^update/$', CartDetails.as_view(action='put'), name='cart_update'), # CartItems url('^item/(?P<id>[0-9A-Za-z-_.//]+)$', CartItemDetail.as_view(), name='cart_item'), url('^item/(?P<id>[0-9A-Za-z-_.//]+)/delete$', CartItemDetail.as_view(action='delete'), name='cart_item_delete'), ) ## Instruction: Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex). ## Code After: from django.conf.urls.defaults import url, patterns from shop.views.cart import CartDetails, CartItemDetail urlpatterns = patterns('', url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE name='cart_delete'), url('^item/$', CartDetails.as_view(action='post'), # POST name='cart_item_add'), url(r'^$', CartDetails.as_view(), name='cart'), # GET url(r'^update/$', CartDetails.as_view(action='put'), name='cart_update'), # CartItems url('^item/(?P<id>[0-9]+)$', CartItemDetail.as_view(), name='cart_item'), url('^item/(?P<id>[0-9]+)/delete$', CartItemDetail.as_view(action='delete'), name='cart_item_delete'), )
10948cd88d51383e13af0a116703984752092c6a
jenkinsapi_tests/systests/test_jenkins_matrix.py
jenkinsapi_tests/systests/test_jenkins_matrix.py
''' System tests for `jenkinsapi.jenkins` module. ''' import re import time import unittest from jenkinsapi_tests.systests.base import BaseSystemTest from jenkinsapi_tests.systests.job_configs import MATRIX_JOB from jenkinsapi_tests.test_utils.random_strings import random_string class TestMatrixJob(BaseSystemTest): def test_invoke_matrix_job(self): job_name = 'create_%s' % random_string() job = self.jenkins.create_job(job_name, MATRIX_JOB) job.invoke(block=True) b = job.get_last_build() while b.is_running(): time.sleep(1) s = set() for r in b.get_matrix_runs(): self.assertEquals(r.get_number(), b.get_number()) self.assertEquals(r.get_upstream_build(), b) m = re.search(u'\xbb (.*) #\\d+$', r.name) self.assertIsNotNone(m) s.add(m.group(1)) # This is a bad test, it simply verifies that this function does # not crash on a build from a matrix job. self.assertFalse(b.get_master_job_name()) self.assertEqual(s, set(['one', 'two', 'three'])) if __name__ == '__main__': unittest.main()
''' System tests for `jenkinsapi.jenkins` module. ''' import re import time import unittest from jenkinsapi_tests.systests.base import BaseSystemTest from jenkinsapi_tests.systests.job_configs import MATRIX_JOB from jenkinsapi_tests.test_utils.random_strings import random_string class TestMatrixJob(BaseSystemTest): def test_invoke_matrix_job(self): job_name = 'create_%s' % random_string() job = self.jenkins.create_job(job_name, MATRIX_JOB) job.invoke(block=True) build = job.get_last_build() while build.is_running(): time.sleep(1) set_of_groups = set() for run in build.get_matrix_runs(): self.assertEquals(run.get_number(), build.get_number()) self.assertEquals(run.get_upstream_build(), build) match_result = re.search(u'\xbb (.*) #\\d+$', run.name) self.assertIsNotNone(match_result) set_of_groups.add(match_result.group(1)) build.get_master_job_name() # This is a bad test, it simply verifies that this function does # not crash on a build from a matrix job. self.assertFalse(build.get_master_job_name()) self.assertEqual(set_of_groups, set(['one', 'two', 'three'])) if __name__ == '__main__': unittest.main()
Tidy up this test - still quite bad & useless.
Tidy up this test - still quite bad & useless.
Python
mit
imsardine/jenkinsapi,salimfadhley/jenkinsapi,JohnLZeller/jenkinsapi,JohnLZeller/jenkinsapi,aerickson/jenkinsapi,domenkozar/jenkinsapi,zaro0508/jenkinsapi,imsardine/jenkinsapi,zaro0508/jenkinsapi,jduan/jenkinsapi,mistermocha/jenkinsapi,domenkozar/jenkinsapi,salimfadhley/jenkinsapi,zaro0508/jenkinsapi,mistermocha/jenkinsapi,aerickson/jenkinsapi,jduan/jenkinsapi,JohnLZeller/jenkinsapi,mistermocha/jenkinsapi,imsardine/jenkinsapi
''' System tests for `jenkinsapi.jenkins` module. ''' import re import time import unittest from jenkinsapi_tests.systests.base import BaseSystemTest from jenkinsapi_tests.systests.job_configs import MATRIX_JOB from jenkinsapi_tests.test_utils.random_strings import random_string class TestMatrixJob(BaseSystemTest): def test_invoke_matrix_job(self): job_name = 'create_%s' % random_string() job = self.jenkins.create_job(job_name, MATRIX_JOB) job.invoke(block=True) - b = job.get_last_build() + build = job.get_last_build() - while b.is_running(): + while build.is_running(): time.sleep(1) - s = set() + set_of_groups = set() - for r in b.get_matrix_runs(): + for run in build.get_matrix_runs(): - self.assertEquals(r.get_number(), b.get_number()) + self.assertEquals(run.get_number(), build.get_number()) - self.assertEquals(r.get_upstream_build(), b) + self.assertEquals(run.get_upstream_build(), build) - m = re.search(u'\xbb (.*) #\\d+$', r.name) + match_result = re.search(u'\xbb (.*) #\\d+$', run.name) - self.assertIsNotNone(m) + self.assertIsNotNone(match_result) - s.add(m.group(1)) + set_of_groups.add(match_result.group(1)) + build.get_master_job_name() # This is a bad test, it simply verifies that this function does # not crash on a build from a matrix job. - self.assertFalse(b.get_master_job_name()) + self.assertFalse(build.get_master_job_name()) - self.assertEqual(s, set(['one', 'two', 'three'])) + self.assertEqual(set_of_groups, set(['one', 'two', 'three'])) if __name__ == '__main__': unittest.main()
Tidy up this test - still quite bad & useless.
## Code Before: ''' System tests for `jenkinsapi.jenkins` module. ''' import re import time import unittest from jenkinsapi_tests.systests.base import BaseSystemTest from jenkinsapi_tests.systests.job_configs import MATRIX_JOB from jenkinsapi_tests.test_utils.random_strings import random_string class TestMatrixJob(BaseSystemTest): def test_invoke_matrix_job(self): job_name = 'create_%s' % random_string() job = self.jenkins.create_job(job_name, MATRIX_JOB) job.invoke(block=True) b = job.get_last_build() while b.is_running(): time.sleep(1) s = set() for r in b.get_matrix_runs(): self.assertEquals(r.get_number(), b.get_number()) self.assertEquals(r.get_upstream_build(), b) m = re.search(u'\xbb (.*) #\\d+$', r.name) self.assertIsNotNone(m) s.add(m.group(1)) # This is a bad test, it simply verifies that this function does # not crash on a build from a matrix job. self.assertFalse(b.get_master_job_name()) self.assertEqual(s, set(['one', 'two', 'three'])) if __name__ == '__main__': unittest.main() ## Instruction: Tidy up this test - still quite bad & useless. ## Code After: ''' System tests for `jenkinsapi.jenkins` module. ''' import re import time import unittest from jenkinsapi_tests.systests.base import BaseSystemTest from jenkinsapi_tests.systests.job_configs import MATRIX_JOB from jenkinsapi_tests.test_utils.random_strings import random_string class TestMatrixJob(BaseSystemTest): def test_invoke_matrix_job(self): job_name = 'create_%s' % random_string() job = self.jenkins.create_job(job_name, MATRIX_JOB) job.invoke(block=True) build = job.get_last_build() while build.is_running(): time.sleep(1) set_of_groups = set() for run in build.get_matrix_runs(): self.assertEquals(run.get_number(), build.get_number()) self.assertEquals(run.get_upstream_build(), build) match_result = re.search(u'\xbb (.*) #\\d+$', run.name) self.assertIsNotNone(match_result) set_of_groups.add(match_result.group(1)) build.get_master_job_name() # This is a bad test, it simply verifies that this function does # not crash on a build from a matrix job. self.assertFalse(build.get_master_job_name()) self.assertEqual(set_of_groups, set(['one', 'two', 'three'])) if __name__ == '__main__': unittest.main()
4651d3b5666fe3ddf3bd92b31ee6ffe4a72ce94e
core/api/__init__.py
core/api/__init__.py
import os from flask import Flask, jsonify from flask_pymongo import PyMongo, BSONObjectIdConverter from werkzeug.exceptions import HTTPException, default_exceptions from core.api import settings def create_app(environment=None): app = Flask('veritrans') app.url_map.converters['ObjectId'] = BSONObjectIdConverter # Config app for environment if not environment: environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod') app.config.from_object('core.api.settings.%s' % environment) # convert exceptions to JSON def make_json_error(ex): response = jsonify( message=str(ex) ) response.status_code = (ex.code if isinstance(ex, HTTPException) else 500) return response for code in default_exceptions.items(): app.error_handler_spec[None][code] = make_json_error from core.api.views.endpoints import api app.register_module(api) return app class API(object): app = None mongo_client = None @staticmethod def init(): env = os.environ.get('SITE_NAME', 'Dev') API.app = create_app(env) API.mongo_client = PyMongo(API.app)
import os from flask import Flask, jsonify from flask_pymongo import PyMongo, BSONObjectIdConverter from werkzeug.exceptions import HTTPException, default_exceptions from core.api import settings def create_app(environment=None): app = Flask('veritrans') app.url_map.converters['ObjectId'] = BSONObjectIdConverter # Config app for environment if not environment: environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod') app.config.from_object('core.api.settings.%s' % environment) # convert exceptions to JSON def make_json_error(ex): response = jsonify( message=str(ex) ) response.status_code = (ex.code if isinstance(ex, HTTPException) else 500) return response for code in default_exceptions.items(): app.error_handler_spec[None][code] = make_json_error from core.api.views.endpoints import api app.register_module(api) return app class API(object): app = None mongo_client = None @staticmethod def init(): API.app = create_app() API.mongo_client = PyMongo(API.app)
Use Production config unless specified
Use Production config unless specified
Python
mit
onyb/veritrans-payment-portals
import os from flask import Flask, jsonify from flask_pymongo import PyMongo, BSONObjectIdConverter from werkzeug.exceptions import HTTPException, default_exceptions from core.api import settings def create_app(environment=None): app = Flask('veritrans') app.url_map.converters['ObjectId'] = BSONObjectIdConverter # Config app for environment if not environment: environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod') app.config.from_object('core.api.settings.%s' % environment) # convert exceptions to JSON def make_json_error(ex): response = jsonify( message=str(ex) ) response.status_code = (ex.code if isinstance(ex, HTTPException) else 500) return response for code in default_exceptions.items(): app.error_handler_spec[None][code] = make_json_error from core.api.views.endpoints import api app.register_module(api) return app class API(object): app = None mongo_client = None @staticmethod def init(): - env = os.environ.get('SITE_NAME', 'Dev') - API.app = create_app(env) + API.app = create_app() API.mongo_client = PyMongo(API.app)
Use Production config unless specified
## Code Before: import os from flask import Flask, jsonify from flask_pymongo import PyMongo, BSONObjectIdConverter from werkzeug.exceptions import HTTPException, default_exceptions from core.api import settings def create_app(environment=None): app = Flask('veritrans') app.url_map.converters['ObjectId'] = BSONObjectIdConverter # Config app for environment if not environment: environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod') app.config.from_object('core.api.settings.%s' % environment) # convert exceptions to JSON def make_json_error(ex): response = jsonify( message=str(ex) ) response.status_code = (ex.code if isinstance(ex, HTTPException) else 500) return response for code in default_exceptions.items(): app.error_handler_spec[None][code] = make_json_error from core.api.views.endpoints import api app.register_module(api) return app class API(object): app = None mongo_client = None @staticmethod def init(): env = os.environ.get('SITE_NAME', 'Dev') API.app = create_app(env) API.mongo_client = PyMongo(API.app) ## Instruction: Use Production config unless specified ## Code After: import os from flask import Flask, jsonify from flask_pymongo import PyMongo, BSONObjectIdConverter from werkzeug.exceptions import HTTPException, default_exceptions from core.api import settings def create_app(environment=None): app = Flask('veritrans') app.url_map.converters['ObjectId'] = BSONObjectIdConverter # Config app for environment if not environment: environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod') app.config.from_object('core.api.settings.%s' % environment) # convert exceptions to JSON def make_json_error(ex): response = jsonify( message=str(ex) ) response.status_code = (ex.code if isinstance(ex, HTTPException) else 500) return response for code in default_exceptions.items(): app.error_handler_spec[None][code] = make_json_error from core.api.views.endpoints import api app.register_module(api) return app class API(object): app = None mongo_client = None @staticmethod def init(): API.app = create_app() API.mongo_client = PyMongo(API.app)
ced218643784838d68961a926cc0dd18c3a3f01f
skald/geometry.py
skald/geometry.py
from collections import namedtuple Size = namedtuple("Size", ["width", "height"]) Rectangle = namedtuple("Rectangle", ["x0", "y0", "x1", "y1"]) class Point(namedtuple("Point", ["x", "y"])): """Point in a two-dimensional space. Named tuple implementation that allows for addition and subtraction. """ __slots__ = () def __add__(self, other): x = self.x + other.x y = self.y + other.y return Point(x, y) def __sub__(self, other): x = self.x - other.x y = self.y - other.y return Point(x, y) class Box(namedtuple("Box", ["point", "size"])): __slots__ = () @property def rectangle(self): return Rectangle( x0=self.point.x, y0=self.point.y, x1=self.point.x+self.size.width, y1=self.point.y+self.size.height )
from collections import namedtuple Size = namedtuple("Size", ["width", "height"]) class Rectangle(namedtuple("Rectangle", ["x0", "y0", "x1", "y1"])): def __contains__(self, other): """Check if this rectangle and `other` overlaps eachother. Essentially this is a bit of a hack to be able to write `rect1 in rect2`. """ if self.x0 < other.x0 and self.x1 > other.x1 and \ self.y0 < other.y0 and self.y1 > other.y1: return True return False class Point(namedtuple("Point", ["x", "y"])): """Point in a two-dimensional space. Named tuple implementation that allows for addition and subtraction. """ __slots__ = () def __add__(self, other): x = self.x + other.x y = self.y + other.y return Point(x, y) def __sub__(self, other): x = self.x - other.x y = self.y - other.y return Point(x, y) class Box(namedtuple("Box", ["point", "size"])): __slots__ = () @property def rectangle(self): return Rectangle( x0=self.point.x, y0=self.point.y, x1=self.point.x+self.size.width, y1=self.point.y+self.size.height )
Add intersection test for rectangles
Add intersection test for rectangles
Python
mit
bjornarg/skald,bjornarg/skald
from collections import namedtuple Size = namedtuple("Size", ["width", "height"]) + - Rectangle = namedtuple("Rectangle", ["x0", "y0", "x1", "y1"]) + class Rectangle(namedtuple("Rectangle", ["x0", "y0", "x1", "y1"])): + def __contains__(self, other): + """Check if this rectangle and `other` overlaps eachother. + + Essentially this is a bit of a hack to be able to write + `rect1 in rect2`. + """ + if self.x0 < other.x0 and self.x1 > other.x1 and \ + self.y0 < other.y0 and self.y1 > other.y1: + return True + return False + class Point(namedtuple("Point", ["x", "y"])): """Point in a two-dimensional space. Named tuple implementation that allows for addition and subtraction. """ __slots__ = () def __add__(self, other): x = self.x + other.x y = self.y + other.y return Point(x, y) def __sub__(self, other): x = self.x - other.x y = self.y - other.y return Point(x, y) class Box(namedtuple("Box", ["point", "size"])): __slots__ = () @property def rectangle(self): return Rectangle( x0=self.point.x, y0=self.point.y, x1=self.point.x+self.size.width, y1=self.point.y+self.size.height )
Add intersection test for rectangles
## Code Before: from collections import namedtuple Size = namedtuple("Size", ["width", "height"]) Rectangle = namedtuple("Rectangle", ["x0", "y0", "x1", "y1"]) class Point(namedtuple("Point", ["x", "y"])): """Point in a two-dimensional space. Named tuple implementation that allows for addition and subtraction. """ __slots__ = () def __add__(self, other): x = self.x + other.x y = self.y + other.y return Point(x, y) def __sub__(self, other): x = self.x - other.x y = self.y - other.y return Point(x, y) class Box(namedtuple("Box", ["point", "size"])): __slots__ = () @property def rectangle(self): return Rectangle( x0=self.point.x, y0=self.point.y, x1=self.point.x+self.size.width, y1=self.point.y+self.size.height ) ## Instruction: Add intersection test for rectangles ## Code After: from collections import namedtuple Size = namedtuple("Size", ["width", "height"]) class Rectangle(namedtuple("Rectangle", ["x0", "y0", "x1", "y1"])): def __contains__(self, other): """Check if this rectangle and `other` overlaps eachother. Essentially this is a bit of a hack to be able to write `rect1 in rect2`. """ if self.x0 < other.x0 and self.x1 > other.x1 and \ self.y0 < other.y0 and self.y1 > other.y1: return True return False class Point(namedtuple("Point", ["x", "y"])): """Point in a two-dimensional space. Named tuple implementation that allows for addition and subtraction. """ __slots__ = () def __add__(self, other): x = self.x + other.x y = self.y + other.y return Point(x, y) def __sub__(self, other): x = self.x - other.x y = self.y - other.y return Point(x, y) class Box(namedtuple("Box", ["point", "size"])): __slots__ = () @property def rectangle(self): return Rectangle( x0=self.point.x, y0=self.point.y, x1=self.point.x+self.size.width, y1=self.point.y+self.size.height )
8f03f51c89aeea44943f9cb0b39330e676ae0089
utils.py
utils.py
import vx from contextlib import contextmanager from functools import partial import sys from io import StringIO def _expose(f=None, name=None): if f is None: return partial(_expose, name=name) if name is None: name = f.__name__.lstrip('_') if getattr(vx, name, None) is not None: raise AttributeError("Cannot expose duplicate name: '{}'".format(name)) setattr(vx, name, f) return f vx.expose = _expose @vx.expose def _repeat(c, times=4): for _ in range(times): c() @vx.expose @contextmanager def _cursor_wander(command=None, window=None): if window is None: window = vx.window.focused_window y, x = vx.get_linecol_window(window) if command is not None: command() yp, xp = vx.get_linecol_window(window) yield (yp, xp) vx.set_linecol_window(window, y, x) @contextmanager def stdoutIO(stdout=None): old = sys.stdout if stdout is None: stdout = StringIO() sys.stdout = stdout yield stdout sys.stdout = old
import vx from contextlib import contextmanager from functools import partial import sys from io import StringIO def _expose(f=None, name=None): if f is None: return partial(_expose, name=name) if name is None: name = f.__name__.lstrip('_') if getattr(vx, name, None) is not None: raise AttributeError("Cannot expose duplicate name: '{}'".format(name)) setattr(vx, name, f) return f vx.expose = _expose @vx.expose def _repeat(c, times=4): res = [] for _ in range(times): res.append(c()) return res @vx.expose @contextmanager def _cursor_wander(command=None, window=None): if window is None: window = vx.window.focused_window y, x = vx.get_linecol_window(window) if command is not None: command() yp, xp = vx.get_linecol_window(window) yield (yp, xp) vx.set_linecol_window(window, y, x) @contextmanager def stdoutIO(stdout=None): old = sys.stdout if stdout is None: stdout = StringIO() sys.stdout = stdout yield stdout sys.stdout = old
Change repeat command to return a list of the results of the repeated commands
Change repeat command to return a list of the results of the repeated commands
Python
mit
philipdexter/vx,philipdexter/vx
import vx from contextlib import contextmanager from functools import partial import sys from io import StringIO def _expose(f=None, name=None): if f is None: return partial(_expose, name=name) if name is None: name = f.__name__.lstrip('_') if getattr(vx, name, None) is not None: raise AttributeError("Cannot expose duplicate name: '{}'".format(name)) setattr(vx, name, f) return f vx.expose = _expose @vx.expose def _repeat(c, times=4): + res = [] for _ in range(times): - c() + res.append(c()) + return res @vx.expose @contextmanager def _cursor_wander(command=None, window=None): if window is None: window = vx.window.focused_window y, x = vx.get_linecol_window(window) if command is not None: command() yp, xp = vx.get_linecol_window(window) yield (yp, xp) vx.set_linecol_window(window, y, x) @contextmanager def stdoutIO(stdout=None): old = sys.stdout if stdout is None: stdout = StringIO() sys.stdout = stdout yield stdout sys.stdout = old
Change repeat command to return a list of the results of the repeated commands
## Code Before: import vx from contextlib import contextmanager from functools import partial import sys from io import StringIO def _expose(f=None, name=None): if f is None: return partial(_expose, name=name) if name is None: name = f.__name__.lstrip('_') if getattr(vx, name, None) is not None: raise AttributeError("Cannot expose duplicate name: '{}'".format(name)) setattr(vx, name, f) return f vx.expose = _expose @vx.expose def _repeat(c, times=4): for _ in range(times): c() @vx.expose @contextmanager def _cursor_wander(command=None, window=None): if window is None: window = vx.window.focused_window y, x = vx.get_linecol_window(window) if command is not None: command() yp, xp = vx.get_linecol_window(window) yield (yp, xp) vx.set_linecol_window(window, y, x) @contextmanager def stdoutIO(stdout=None): old = sys.stdout if stdout is None: stdout = StringIO() sys.stdout = stdout yield stdout sys.stdout = old ## Instruction: Change repeat command to return a list of the results of the repeated commands ## Code After: import vx from contextlib import contextmanager from functools import partial import sys from io import StringIO def _expose(f=None, name=None): if f is None: return partial(_expose, name=name) if name is None: name = f.__name__.lstrip('_') if getattr(vx, name, None) is not None: raise AttributeError("Cannot expose duplicate name: '{}'".format(name)) setattr(vx, name, f) return f vx.expose = _expose @vx.expose def _repeat(c, times=4): res = [] for _ in range(times): res.append(c()) return res @vx.expose @contextmanager def _cursor_wander(command=None, window=None): if window is None: window = vx.window.focused_window y, x = vx.get_linecol_window(window) if command is not None: command() yp, xp = vx.get_linecol_window(window) yield (yp, xp) vx.set_linecol_window(window, y, x) @contextmanager def stdoutIO(stdout=None): old = sys.stdout if stdout is None: stdout = StringIO() sys.stdout = stdout yield stdout sys.stdout = old
822e6123cc598b4f6a0eafedfb2f0d0cbfba5f37
currencies/migrations/0003_auto_20151216_1906.py
currencies/migrations/0003_auto_20151216_1906.py
from __future__ import unicode_literals from django.db import migrations from extra_countries.models import ExtraCountry def add_currencies_with_countries(apps, schema_editor): # We can't import the model directly as it may be a newer # version than this migration expects. We use the historical version. Currency = apps.get_model("currencies", "Currency") for extra_country in ExtraCountry.objects.all(): print("seeding currency for county: %s" % extra_country.country.name) # trying to find a currency with the same code first try: currency = Currency.objects.get(code=extra_country.country.currency) except Currency.DoesNotExist: # no such currency yet currency = Currency(code=extra_country.country.currency, name=extra_country.country.currency_name) currency.save() currency.countries.add(extra_country.pk) def reverse_data(apps, schema_editor): Currency = apps.get_model("currencies", "Currency") Currency.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('currencies', '0002_currency_countries'), ] operations = [ migrations.RunPython(add_currencies_with_countries, reverse_data) ]
from __future__ import unicode_literals from django.db import migrations from extra_countries.models import ExtraCountry def add_currencies_with_countries(apps, schema_editor): # We can't import the model directly as it may be a newer # version than this migration expects. We use the historical version. Currency = apps.get_model("currencies", "Currency") for extra_country in ExtraCountry.objects.all(): print("seeding currency for county: %s" % extra_country.country.name) # trying to find a currency with the same code first try: currency = Currency.objects.get(code=extra_country.country.currency) except Currency.DoesNotExist: # no such currency yet currency = Currency(code=extra_country.country.currency, name=extra_country.country.currency_name) if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''): pass else: currency.save() currency.countries.add(extra_country.pk) def reverse_data(apps, schema_editor): Currency = apps.get_model("currencies", "Currency") Currency.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('currencies', '0002_currency_countries'), ] operations = [ migrations.RunPython(add_currencies_with_countries, reverse_data) ]
Fix currencies seeding, so it won't have empty currencies
Fix currencies seeding, so it won't have empty currencies
Python
mit
openspending/cosmopolitan,kiote/cosmopolitan
from __future__ import unicode_literals from django.db import migrations from extra_countries.models import ExtraCountry def add_currencies_with_countries(apps, schema_editor): # We can't import the model directly as it may be a newer # version than this migration expects. We use the historical version. Currency = apps.get_model("currencies", "Currency") for extra_country in ExtraCountry.objects.all(): print("seeding currency for county: %s" % extra_country.country.name) # trying to find a currency with the same code first try: currency = Currency.objects.get(code=extra_country.country.currency) except Currency.DoesNotExist: # no such currency yet currency = Currency(code=extra_country.country.currency, name=extra_country.country.currency_name) + if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''): + pass + else: - currency.save() + currency.save() - currency.countries.add(extra_country.pk) + currency.countries.add(extra_country.pk) def reverse_data(apps, schema_editor): Currency = apps.get_model("currencies", "Currency") Currency.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('currencies', '0002_currency_countries'), ] operations = [ migrations.RunPython(add_currencies_with_countries, reverse_data) ]
Fix currencies seeding, so it won't have empty currencies
## Code Before: from __future__ import unicode_literals from django.db import migrations from extra_countries.models import ExtraCountry def add_currencies_with_countries(apps, schema_editor): # We can't import the model directly as it may be a newer # version than this migration expects. We use the historical version. Currency = apps.get_model("currencies", "Currency") for extra_country in ExtraCountry.objects.all(): print("seeding currency for county: %s" % extra_country.country.name) # trying to find a currency with the same code first try: currency = Currency.objects.get(code=extra_country.country.currency) except Currency.DoesNotExist: # no such currency yet currency = Currency(code=extra_country.country.currency, name=extra_country.country.currency_name) currency.save() currency.countries.add(extra_country.pk) def reverse_data(apps, schema_editor): Currency = apps.get_model("currencies", "Currency") Currency.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('currencies', '0002_currency_countries'), ] operations = [ migrations.RunPython(add_currencies_with_countries, reverse_data) ] ## Instruction: Fix currencies seeding, so it won't have empty currencies ## Code After: from __future__ import unicode_literals from django.db import migrations from extra_countries.models import ExtraCountry def add_currencies_with_countries(apps, schema_editor): # We can't import the model directly as it may be a newer # version than this migration expects. We use the historical version. Currency = apps.get_model("currencies", "Currency") for extra_country in ExtraCountry.objects.all(): print("seeding currency for county: %s" % extra_country.country.name) # trying to find a currency with the same code first try: currency = Currency.objects.get(code=extra_country.country.currency) except Currency.DoesNotExist: # no such currency yet currency = Currency(code=extra_country.country.currency, name=extra_country.country.currency_name) if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''): pass else: currency.save() currency.countries.add(extra_country.pk) def reverse_data(apps, schema_editor): Currency = apps.get_model("currencies", "Currency") Currency.objects.all().delete() class Migration(migrations.Migration): dependencies = [ ('currencies', '0002_currency_countries'), ] operations = [ migrations.RunPython(add_currencies_with_countries, reverse_data) ]
041b271baa7ae0bbd20c30ac4f70b42fda267e93
mozillians/groups/__init__.py
mozillians/groups/__init__.py
from django.apps import AppConfig CIS_GROUPS = [ 'cis_whitelist', 'nda' ] default_app_config = 'mozillians.groups.GroupConfig' class GroupConfig(AppConfig): name = 'mozillians.groups'
from django.apps import AppConfig CIS_GROUPS = [ 'cis_whitelist', 'nda', 'open-innovation-reps-council' ] default_app_config = 'mozillians.groups.GroupConfig' class GroupConfig(AppConfig): name = 'mozillians.groups'
Add a group in the whitelist.
Add a group in the whitelist.
Python
bsd-3-clause
mozilla/mozillians,akatsoulas/mozillians,mozilla/mozillians,johngian/mozillians,mozilla/mozillians,mozilla/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,johngian/mozillians,johngian/mozillians,johngian/mozillians,akatsoulas/mozillians
from django.apps import AppConfig CIS_GROUPS = [ 'cis_whitelist', - 'nda' + 'nda', + 'open-innovation-reps-council' ] default_app_config = 'mozillians.groups.GroupConfig' class GroupConfig(AppConfig): name = 'mozillians.groups'
Add a group in the whitelist.
## Code Before: from django.apps import AppConfig CIS_GROUPS = [ 'cis_whitelist', 'nda' ] default_app_config = 'mozillians.groups.GroupConfig' class GroupConfig(AppConfig): name = 'mozillians.groups' ## Instruction: Add a group in the whitelist. ## Code After: from django.apps import AppConfig CIS_GROUPS = [ 'cis_whitelist', 'nda', 'open-innovation-reps-council' ] default_app_config = 'mozillians.groups.GroupConfig' class GroupConfig(AppConfig): name = 'mozillians.groups'
199c9bae8e2ad42ee1c8699c678dd56d6074b2de
main/models.py
main/models.py
from django.db import models from django.contrib.auth.models import User import string, random from django import forms # Create your models here. def _generate_default_hashtag(): return "".join(random.choice(string.lowercase) for i in range(3)) class Wall(models.Model): hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag()) user = models.ForeignKey(User, editable=False) sms_keyword = models.CharField(max_length=20) def __unicode__(self): return self.name TRAFFIC_SOURCE = ( ('BG', 'Blog'), ('FR', 'Friend'), ('OT', 'Other',) )
from django.db import models from django.contrib.auth.models import User import string, random from django import forms # Create your models here. def _generate_default_hashtag(): return "".join(random.choice(string.lowercase) for i in range(3)) class Wall(models.Model): hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag()) user = models.ForeignKey(User, editable=False) sms_keyword = models.CharField(max_length=20) def __unicode__(self): return self.sms_keyword TRAFFIC_SOURCE = ( ('BG', 'Blog'), ('FR', 'Friend'), ('OT', 'Other',) )
Return sms_keyword as wall name
Return sms_keyword as wall name
Python
mit
Aaron1011/texting_wall
from django.db import models from django.contrib.auth.models import User import string, random from django import forms # Create your models here. def _generate_default_hashtag(): return "".join(random.choice(string.lowercase) for i in range(3)) class Wall(models.Model): hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag()) user = models.ForeignKey(User, editable=False) sms_keyword = models.CharField(max_length=20) def __unicode__(self): - return self.name + return self.sms_keyword - + TRAFFIC_SOURCE = ( ('BG', 'Blog'), ('FR', 'Friend'), ('OT', 'Other',) ) + +
Return sms_keyword as wall name
## Code Before: from django.db import models from django.contrib.auth.models import User import string, random from django import forms # Create your models here. def _generate_default_hashtag(): return "".join(random.choice(string.lowercase) for i in range(3)) class Wall(models.Model): hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag()) user = models.ForeignKey(User, editable=False) sms_keyword = models.CharField(max_length=20) def __unicode__(self): return self.name TRAFFIC_SOURCE = ( ('BG', 'Blog'), ('FR', 'Friend'), ('OT', 'Other',) ) ## Instruction: Return sms_keyword as wall name ## Code After: from django.db import models from django.contrib.auth.models import User import string, random from django import forms # Create your models here. def _generate_default_hashtag(): return "".join(random.choice(string.lowercase) for i in range(3)) class Wall(models.Model): hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag()) user = models.ForeignKey(User, editable=False) sms_keyword = models.CharField(max_length=20) def __unicode__(self): return self.sms_keyword TRAFFIC_SOURCE = ( ('BG', 'Blog'), ('FR', 'Friend'), ('OT', 'Other',) )
b6ec3ba9efae7b6b291391b0333e80f2e9fc6fa0
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
import uuid from django.db import migrations import waldur_core.core.fields def gen_uuid(apps, schema_editor): InvoiceItem = apps.get_model('invoices', 'InvoiceItem') for row in InvoiceItem.objects.all(): row.uuid = uuid.uuid4().hex row.save(update_fields=['uuid']) class Migration(migrations.Migration): dependencies = [ ('invoices', '0052_delete_servicedowntime'), ] operations = [ migrations.AddField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(null=True), ), migrations.RunPython(gen_uuid, elidable=True), migrations.AlterField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(), ), ]
import uuid from django.db import migrations, models import waldur_core.core.fields def gen_uuid(apps, schema_editor): InvoiceItem = apps.get_model('invoices', 'InvoiceItem') for row in InvoiceItem.objects.all(): row.uuid = uuid.uuid4().hex row.save(update_fields=['uuid']) class Migration(migrations.Migration): dependencies = [ ('invoices', '0052_delete_servicedowntime'), ] operations = [ migrations.AddField( model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True), ), migrations.RunPython(gen_uuid, elidable=True), migrations.AlterField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(), ), ]
Fix database migration script for UUID field in invoice item model.
Fix database migration script for UUID field in invoice item model.
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
import uuid - from django.db import migrations + from django.db import migrations, models import waldur_core.core.fields def gen_uuid(apps, schema_editor): InvoiceItem = apps.get_model('invoices', 'InvoiceItem') for row in InvoiceItem.objects.all(): row.uuid = uuid.uuid4().hex row.save(update_fields=['uuid']) class Migration(migrations.Migration): dependencies = [ ('invoices', '0052_delete_servicedowntime'), ] operations = [ migrations.AddField( + model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True), - model_name='invoiceitem', - name='uuid', - field=waldur_core.core.fields.UUIDField(null=True), ), migrations.RunPython(gen_uuid, elidable=True), migrations.AlterField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(), ), ]
Fix database migration script for UUID field in invoice item model.
## Code Before: import uuid from django.db import migrations import waldur_core.core.fields def gen_uuid(apps, schema_editor): InvoiceItem = apps.get_model('invoices', 'InvoiceItem') for row in InvoiceItem.objects.all(): row.uuid = uuid.uuid4().hex row.save(update_fields=['uuid']) class Migration(migrations.Migration): dependencies = [ ('invoices', '0052_delete_servicedowntime'), ] operations = [ migrations.AddField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(null=True), ), migrations.RunPython(gen_uuid, elidable=True), migrations.AlterField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(), ), ] ## Instruction: Fix database migration script for UUID field in invoice item model. ## Code After: import uuid from django.db import migrations, models import waldur_core.core.fields def gen_uuid(apps, schema_editor): InvoiceItem = apps.get_model('invoices', 'InvoiceItem') for row in InvoiceItem.objects.all(): row.uuid = uuid.uuid4().hex row.save(update_fields=['uuid']) class Migration(migrations.Migration): dependencies = [ ('invoices', '0052_delete_servicedowntime'), ] operations = [ migrations.AddField( model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True), ), migrations.RunPython(gen_uuid, elidable=True), migrations.AlterField( model_name='invoiceitem', name='uuid', field=waldur_core.core.fields.UUIDField(), ), ]
9b19d366c7e1cf41ffc6af4eaed789995ddc5cc2
byceps/blueprints/core_admin/views.py
byceps/blueprints/core_admin/views.py
from ...services.brand import service as brand_service from ...util.framework.blueprint import create_blueprint from ..authorization.registry import permission_registry from .authorization import AdminPermission blueprint = create_blueprint('core_admin', __name__) permission_registry.register_enum(AdminPermission) @blueprint.app_context_processor def inject_brands(): brands = brand_service.get_brands() return { 'all_brands': brands, }
from ...services.brand import service as brand_service from ...util.framework.blueprint import create_blueprint from ..authorization.registry import permission_registry from .authorization import AdminPermission blueprint = create_blueprint('core_admin', __name__) permission_registry.register_enum(AdminPermission) @blueprint.app_context_processor def inject_template_variables(): brands = brand_service.get_brands() return { 'all_brands': brands, }
Generalize name of function to inject admin template variables
Generalize name of function to inject admin template variables
Python
bsd-3-clause
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
from ...services.brand import service as brand_service from ...util.framework.blueprint import create_blueprint from ..authorization.registry import permission_registry from .authorization import AdminPermission blueprint = create_blueprint('core_admin', __name__) permission_registry.register_enum(AdminPermission) @blueprint.app_context_processor - def inject_brands(): + def inject_template_variables(): brands = brand_service.get_brands() return { 'all_brands': brands, }
Generalize name of function to inject admin template variables
## Code Before: from ...services.brand import service as brand_service from ...util.framework.blueprint import create_blueprint from ..authorization.registry import permission_registry from .authorization import AdminPermission blueprint = create_blueprint('core_admin', __name__) permission_registry.register_enum(AdminPermission) @blueprint.app_context_processor def inject_brands(): brands = brand_service.get_brands() return { 'all_brands': brands, } ## Instruction: Generalize name of function to inject admin template variables ## Code After: from ...services.brand import service as brand_service from ...util.framework.blueprint import create_blueprint from ..authorization.registry import permission_registry from .authorization import AdminPermission blueprint = create_blueprint('core_admin', __name__) permission_registry.register_enum(AdminPermission) @blueprint.app_context_processor def inject_template_variables(): brands = brand_service.get_brands() return { 'all_brands': brands, }
346ffdb3e3836e2931f838a6dd929a325da0d5e6
tests/test_arithmetic.py
tests/test_arithmetic.py
from intervals import Interval class TestArithmeticOperators(object): def test_add_operator(self): assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4) def test_sub_operator(self): assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2) def test_isub_operator(self): range_ = Interval(1, 3) range_ -= Interval(1, 2) assert range_ == Interval(-1, 2) def test_iadd_operator(self): range_ = Interval(1, 2) range_ += Interval(1, 2) assert range_ == Interval(2, 4)
from pytest import mark from intervals import Interval class TestArithmeticOperators(object): def test_add_operator(self): assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4) def test_sub_operator(self): assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2) def test_isub_operator(self): range_ = Interval(1, 3) range_ -= Interval(1, 2) assert range_ == Interval(-1, 2) def test_iadd_operator(self): range_ = Interval(1, 2) range_ += Interval(1, 2) assert range_ == Interval(2, 4) @mark.parametrize(('first', 'second', 'intersection'), ( ('[1, 5]', '[2, 9]', '[2, 5]'), ('[3, 4]', '[3, 9]', '[3, 4]'), ('(3, 6]', '[2, 6)', '(3, 6)') )) def test_intersection(self, first, second, intersection): Interval(first) & Interval(second) == Interval(intersection)
Add some tests for intersection
Add some tests for intersection
Python
bsd-3-clause
kvesteri/intervals
+ from pytest import mark from intervals import Interval class TestArithmeticOperators(object): def test_add_operator(self): assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4) def test_sub_operator(self): assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2) def test_isub_operator(self): range_ = Interval(1, 3) range_ -= Interval(1, 2) assert range_ == Interval(-1, 2) def test_iadd_operator(self): range_ = Interval(1, 2) range_ += Interval(1, 2) assert range_ == Interval(2, 4) + @mark.parametrize(('first', 'second', 'intersection'), ( + ('[1, 5]', '[2, 9]', '[2, 5]'), + ('[3, 4]', '[3, 9]', '[3, 4]'), + ('(3, 6]', '[2, 6)', '(3, 6)') + )) + def test_intersection(self, first, second, intersection): + Interval(first) & Interval(second) == Interval(intersection) +
Add some tests for intersection
## Code Before: from intervals import Interval class TestArithmeticOperators(object): def test_add_operator(self): assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4) def test_sub_operator(self): assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2) def test_isub_operator(self): range_ = Interval(1, 3) range_ -= Interval(1, 2) assert range_ == Interval(-1, 2) def test_iadd_operator(self): range_ = Interval(1, 2) range_ += Interval(1, 2) assert range_ == Interval(2, 4) ## Instruction: Add some tests for intersection ## Code After: from pytest import mark from intervals import Interval class TestArithmeticOperators(object): def test_add_operator(self): assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4) def test_sub_operator(self): assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2) def test_isub_operator(self): range_ = Interval(1, 3) range_ -= Interval(1, 2) assert range_ == Interval(-1, 2) def test_iadd_operator(self): range_ = Interval(1, 2) range_ += Interval(1, 2) assert range_ == Interval(2, 4) @mark.parametrize(('first', 'second', 'intersection'), ( ('[1, 5]', '[2, 9]', '[2, 5]'), ('[3, 4]', '[3, 9]', '[3, 4]'), ('(3, 6]', '[2, 6)', '(3, 6)') )) def test_intersection(self, first, second, intersection): Interval(first) & Interval(second) == Interval(intersection)
4cfd8771b91c7c2b9f28ca4b9776e9770683093b
frigg/builds/admin.py
frigg/builds/admin.py
from django.contrib import admin from .models import Build, BuildResult, Project class BuildResultInline(admin.StackedInline): model = BuildResult readonly_fields = ('result_log', 'succeeded', 'return_code') extra = 0 max_num = 0 class BuildInline(admin.TabularInline): model = Build readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha') extra = 0 max_num = 0 @admin.register(Project) class ProjectAdmin(admin.ModelAdmin): list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number') inlines = [BuildInline] list_filter = ['owner'] @admin.register(Build) class BuildAdmin(admin.ModelAdmin): list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color') inlines = [BuildResultInline] list_filter = ['project'] @admin.register(BuildResult) class BuildResultAdmin(admin.ModelAdmin): list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
from django.contrib import admin from django.template.defaultfilters import pluralize from .models import Build, BuildResult, Project class BuildResultInline(admin.StackedInline): model = BuildResult readonly_fields = ('result_log', 'succeeded', 'return_code') extra = 0 max_num = 0 class BuildInline(admin.TabularInline): model = Build readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha') extra = 0 max_num = 0 @admin.register(Project) class ProjectAdmin(admin.ModelAdmin): list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number') inlines = [BuildInline] list_filter = ['owner'] @admin.register(Build) class BuildAdmin(admin.ModelAdmin): list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color') inlines = [BuildResultInline] list_filter = ['project'] actions = ['restart_build'] def restart_build(self, request, queryset): for build in queryset: build.start() self.message_user( request, '{} build{} was restarted'.format(len(queryset), pluralize(len(queryset))) ) restart_build.short_description = 'Restart selected builds' @admin.register(BuildResult) class BuildResultAdmin(admin.ModelAdmin): list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
Add restart_build action to BuildAdmin
Add restart_build action to BuildAdmin
Python
mit
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
from django.contrib import admin + from django.template.defaultfilters import pluralize from .models import Build, BuildResult, Project class BuildResultInline(admin.StackedInline): model = BuildResult readonly_fields = ('result_log', 'succeeded', 'return_code') extra = 0 max_num = 0 class BuildInline(admin.TabularInline): model = Build readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha') extra = 0 max_num = 0 @admin.register(Project) class ProjectAdmin(admin.ModelAdmin): list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number') inlines = [BuildInline] list_filter = ['owner'] @admin.register(Build) class BuildAdmin(admin.ModelAdmin): list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color') inlines = [BuildResultInline] list_filter = ['project'] + actions = ['restart_build'] + + def restart_build(self, request, queryset): + for build in queryset: + build.start() + + self.message_user( + request, + '{} build{} was restarted'.format(len(queryset), pluralize(len(queryset))) + ) + + restart_build.short_description = 'Restart selected builds' @admin.register(BuildResult) class BuildResultAdmin(admin.ModelAdmin): list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
Add restart_build action to BuildAdmin
## Code Before: from django.contrib import admin from .models import Build, BuildResult, Project class BuildResultInline(admin.StackedInline): model = BuildResult readonly_fields = ('result_log', 'succeeded', 'return_code') extra = 0 max_num = 0 class BuildInline(admin.TabularInline): model = Build readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha') extra = 0 max_num = 0 @admin.register(Project) class ProjectAdmin(admin.ModelAdmin): list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number') inlines = [BuildInline] list_filter = ['owner'] @admin.register(Build) class BuildAdmin(admin.ModelAdmin): list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color') inlines = [BuildResultInline] list_filter = ['project'] @admin.register(BuildResult) class BuildResultAdmin(admin.ModelAdmin): list_display = ('__str__', 'succeeded', 'return_code', 'coverage') ## Instruction: Add restart_build action to BuildAdmin ## Code After: from django.contrib import admin from django.template.defaultfilters import pluralize from .models import Build, BuildResult, Project class BuildResultInline(admin.StackedInline): model = BuildResult readonly_fields = ('result_log', 'succeeded', 'return_code') extra = 0 max_num = 0 class BuildInline(admin.TabularInline): model = Build readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha') extra = 0 max_num = 0 @admin.register(Project) class ProjectAdmin(admin.ModelAdmin): list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number') inlines = [BuildInline] list_filter = ['owner'] @admin.register(Build) class BuildAdmin(admin.ModelAdmin): list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color') inlines = [BuildResultInline] list_filter = ['project'] actions = ['restart_build'] def restart_build(self, request, queryset): for build in queryset: build.start() self.message_user( request, '{} build{} was restarted'.format(len(queryset), pluralize(len(queryset))) ) restart_build.short_description = 'Restart selected builds' @admin.register(BuildResult) class BuildResultAdmin(admin.ModelAdmin): list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
b1b1392d2f268a5c74fd21c826a3ea6387567cab
froide/bounce/apps.py
froide/bounce/apps.py
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled account_canceled.connect(cancel_user) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete()
import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry account_canceled.connect(cancel_user) registry.register(export_user_data) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') )
Add user data export for bounce handling
Add user data export for bounce handling
Python
mit
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide
+ import json + from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled + from froide.account.export import registry account_canceled.connect(cancel_user) + registry.register(export_user_data) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() + + def export_user_data(user): + from .models import Bounce + + bounces = Bounce.objects.filter(user=user) + if not bounces: + return + yield ('bounces.json', json.dumps([ + { + 'last_update': ( + b.last_update.isoformat() if b.last_update else None + ), + 'bounces': b.bounces, + 'email': b.email, + } + for b in bounces]).encode('utf-8') + ) +
Add user data export for bounce handling
## Code Before: from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled account_canceled.connect(cancel_user) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() ## Instruction: Add user data export for bounce handling ## Code After: import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry account_canceled.connect(cancel_user) registry.register(export_user_data) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') )
83ea38ee5616b1637cc2d983d4518d83793c7b72
lint/events.py
lint/events.py
from collections import defaultdict import traceback LINT_START = 'LINT_START' LINT_RESULT = 'LINT_RESULT' LINT_END = 'LINT_END' listeners = defaultdict(set) def subscribe(topic, fn): listeners[topic].add(fn) def unsubscribe(topic, fn): try: listeners[topic].remove(fn) except KeyError: pass def broadcast(topic, message=None): payload = message.copy() or {} for fn in listeners.get(topic, []): try: fn(**payload) except Exception: traceback.print_exc() map_fn_to_topic = {} def on(topic): def inner(fn): subscribe(topic, fn) map_fn_to_topic[fn] = topic return fn return inner def off(fn): topic = map_fn_to_topic.get(fn, None) if topic: unsubscribe(topic, fn)
from collections import defaultdict import traceback LINT_START = 'LINT_START' # (buffer_id) LINT_RESULT = 'LINT_RESULT' # (buffer_id, linter_name, errors) LINT_END = 'LINT_END' # (buffer_id) listeners = defaultdict(set) def subscribe(topic, fn): listeners[topic].add(fn) def unsubscribe(topic, fn): try: listeners[topic].remove(fn) except KeyError: pass def broadcast(topic, message=None): payload = message.copy() or {} for fn in listeners.get(topic, []): try: fn(**payload) except Exception: traceback.print_exc() map_fn_to_topic = {} def on(topic): def inner(fn): subscribe(topic, fn) map_fn_to_topic[fn] = topic return fn return inner def off(fn): topic = map_fn_to_topic.get(fn, None) if topic: unsubscribe(topic, fn)
Add very brief comments about the event types
Add very brief comments about the event types
Python
mit
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
from collections import defaultdict import traceback - LINT_START = 'LINT_START' - LINT_RESULT = 'LINT_RESULT' - LINT_END = 'LINT_END' + LINT_START = 'LINT_START' # (buffer_id) + LINT_RESULT = 'LINT_RESULT' # (buffer_id, linter_name, errors) + LINT_END = 'LINT_END' # (buffer_id) listeners = defaultdict(set) def subscribe(topic, fn): listeners[topic].add(fn) def unsubscribe(topic, fn): try: listeners[topic].remove(fn) except KeyError: pass def broadcast(topic, message=None): payload = message.copy() or {} for fn in listeners.get(topic, []): try: fn(**payload) except Exception: traceback.print_exc() map_fn_to_topic = {} def on(topic): def inner(fn): subscribe(topic, fn) map_fn_to_topic[fn] = topic return fn return inner def off(fn): topic = map_fn_to_topic.get(fn, None) if topic: unsubscribe(topic, fn)
Add very brief comments about the event types
## Code Before: from collections import defaultdict import traceback LINT_START = 'LINT_START' LINT_RESULT = 'LINT_RESULT' LINT_END = 'LINT_END' listeners = defaultdict(set) def subscribe(topic, fn): listeners[topic].add(fn) def unsubscribe(topic, fn): try: listeners[topic].remove(fn) except KeyError: pass def broadcast(topic, message=None): payload = message.copy() or {} for fn in listeners.get(topic, []): try: fn(**payload) except Exception: traceback.print_exc() map_fn_to_topic = {} def on(topic): def inner(fn): subscribe(topic, fn) map_fn_to_topic[fn] = topic return fn return inner def off(fn): topic = map_fn_to_topic.get(fn, None) if topic: unsubscribe(topic, fn) ## Instruction: Add very brief comments about the event types ## Code After: from collections import defaultdict import traceback LINT_START = 'LINT_START' # (buffer_id) LINT_RESULT = 'LINT_RESULT' # (buffer_id, linter_name, errors) LINT_END = 'LINT_END' # (buffer_id) listeners = defaultdict(set) def subscribe(topic, fn): listeners[topic].add(fn) def unsubscribe(topic, fn): try: listeners[topic].remove(fn) except KeyError: pass def broadcast(topic, message=None): payload = message.copy() or {} for fn in listeners.get(topic, []): try: fn(**payload) except Exception: traceback.print_exc() map_fn_to_topic = {} def on(topic): def inner(fn): subscribe(topic, fn) map_fn_to_topic[fn] = topic return fn return inner def off(fn): topic = map_fn_to_topic.get(fn, None) if topic: unsubscribe(topic, fn)
92d253fdce108162ab2ce05dd38da971ca42293d
keystone/contrib/kds/common/service.py
keystone/contrib/kds/common/service.py
from oslo.config import cfg from keystone.openstack.common import log CONF = cfg.CONF API_SERVICE_OPTS = [ cfg.StrOpt('bind_ip', default='0.0.0.0', help='IP for the server to bind to'), cfg.IntOpt('port', default=9109, help='The port for the server'), ] CONF.register_opts(API_SERVICE_OPTS) def parse_args(args, default_config_files=None): CONF(args=args[1:], project='kds', default_config_files=default_config_files) def prepare_service(argv=[]): cfg.set_defaults(log.log_opts, default_log_levels=['sqlalchemy=WARN', 'eventlet.wsgi.server=WARN' ]) parse_args(argv) log.setup('kds')
from oslo.config import cfg from keystone.openstack.common import log CONF = cfg.CONF FILE_OPTIONS = { None: [ cfg.StrOpt('bind_ip', default='0.0.0.0', help='IP for the server to bind to'), cfg.IntOpt('port', default=9109, help='The port for the server')]} def configure(conf=None): if conf is None: conf = CONF for group in FILE_OPTIONS: conf.register_opts(FILE_OPTIONS[group], group=group) def parse_args(args, default_config_files=None): CONF(args=args[1:], project='kds', default_config_files=default_config_files) def prepare_service(argv=[]): cfg.set_defaults(log.log_opts, default_log_levels=['sqlalchemy=WARN', 'eventlet.wsgi.server=WARN' ]) parse_args(argv) log.setup('kds') configure()
Restructure KDS options to be more like Keystone's options
Restructure KDS options to be more like Keystone's options Restructure the KDS options to be more closely aligned with the way Keystone options work and allowing movement towards not registering the options on import. This will also prevent KDS options from appearing in the Keystone auto-generated sample config. Change-Id: I073aa58ff3132e2714478f54c88c3a8200ff47da
Python
apache-2.0
rushiagr/keystone,jumpstarter-io/keystone,reeshupatel/demo,dstanek/keystone,idjaw/keystone,jonnary/keystone,vivekdhayaal/keystone,MaheshIBM/keystone,klmitch/keystone,rajalokan/keystone,rajalokan/keystone,nuxeh/keystone,ging/keystone,rushiagr/keystone,takeshineshiro/keystone,ilay09/keystone,nuxeh/keystone,roopali8/keystone,JioCloud/keystone,himanshu-setia/keystone,blueboxgroup/keystone,dims/keystone,vivekdhayaal/keystone,jamielennox/keystone,maestro-hybrid-cloud/keystone,idjaw/keystone,ilay09/keystone,maestro-hybrid-cloud/keystone,ajayaa/keystone,mahak/keystone,mahak/keystone,blueboxgroup/keystone,jamielennox/keystone,cernops/keystone,openstack/keystone,ging/keystone,rushiagr/keystone,rodrigods/keystone,JioCloud/keystone,openstack/keystone,jonnary/keystone,klmitch/keystone,reeshupatel/demo,takeshineshiro/keystone,dstanek/keystone,MaheshIBM/keystone,UTSA-ICS/keystone-kerberos,ajayaa/keystone,dstanek/keystone,rodrigods/keystone,nuxeh/keystone,vivekdhayaal/keystone,rajalokan/keystone,promptworks/keystone,cernops/keystone,jumpstarter-io/keystone,openstack/keystone,mahak/keystone,himanshu-setia/keystone,roopali8/keystone,ilay09/keystone,dims/keystone,reeshupatel/demo,promptworks/keystone,jumpstarter-io/keystone,UTSA-ICS/keystone-kerberos,promptworks/keystone
from oslo.config import cfg from keystone.openstack.common import log CONF = cfg.CONF - API_SERVICE_OPTS = [ + FILE_OPTIONS = { + None: [ - cfg.StrOpt('bind_ip', + cfg.StrOpt('bind_ip', - default='0.0.0.0', + default='0.0.0.0', - help='IP for the server to bind to'), + help='IP for the server to bind to'), - cfg.IntOpt('port', + cfg.IntOpt('port', - default=9109, + default=9109, - help='The port for the server'), + help='The port for the server')]} - ] - CONF.register_opts(API_SERVICE_OPTS) + + def configure(conf=None): + if conf is None: + conf = CONF + + for group in FILE_OPTIONS: + conf.register_opts(FILE_OPTIONS[group], group=group) def parse_args(args, default_config_files=None): CONF(args=args[1:], project='kds', default_config_files=default_config_files) def prepare_service(argv=[]): cfg.set_defaults(log.log_opts, default_log_levels=['sqlalchemy=WARN', 'eventlet.wsgi.server=WARN' ]) parse_args(argv) log.setup('kds') + + configure() +
Restructure KDS options to be more like Keystone's options
## Code Before: from oslo.config import cfg from keystone.openstack.common import log CONF = cfg.CONF API_SERVICE_OPTS = [ cfg.StrOpt('bind_ip', default='0.0.0.0', help='IP for the server to bind to'), cfg.IntOpt('port', default=9109, help='The port for the server'), ] CONF.register_opts(API_SERVICE_OPTS) def parse_args(args, default_config_files=None): CONF(args=args[1:], project='kds', default_config_files=default_config_files) def prepare_service(argv=[]): cfg.set_defaults(log.log_opts, default_log_levels=['sqlalchemy=WARN', 'eventlet.wsgi.server=WARN' ]) parse_args(argv) log.setup('kds') ## Instruction: Restructure KDS options to be more like Keystone's options ## Code After: from oslo.config import cfg from keystone.openstack.common import log CONF = cfg.CONF FILE_OPTIONS = { None: [ cfg.StrOpt('bind_ip', default='0.0.0.0', help='IP for the server to bind to'), cfg.IntOpt('port', default=9109, help='The port for the server')]} def configure(conf=None): if conf is None: conf = CONF for group in FILE_OPTIONS: conf.register_opts(FILE_OPTIONS[group], group=group) def parse_args(args, default_config_files=None): CONF(args=args[1:], project='kds', default_config_files=default_config_files) def prepare_service(argv=[]): cfg.set_defaults(log.log_opts, default_log_levels=['sqlalchemy=WARN', 'eventlet.wsgi.server=WARN' ]) parse_args(argv) log.setup('kds') configure()
eaa13f9005a8aaf8c748a98de697b03eee9e675b
salt/client/netapi.py
salt/client/netapi.py
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' if not len(self.netapi): logger.error("Did not find any netapi configurations, nothing to start") for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
Add log error if we run salt-api w/ no config
Add log error if we run salt-api w/ no config Currently, the salt-api script will exit with no error or hint of why it failed if there is no netapi module configured. Added a short line if we find no api modules to start, warning the user that the config may be missing. Fixes #28240
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' + if not len(self.netapi): + logger.error("Did not find any netapi configurations, nothing to start") + for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
Add log error if we run salt-api w/ no config
## Code Before: ''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run() ## Instruction: Add log error if we run salt-api w/ no config ## Code After: ''' The main entry point for salt-api ''' from __future__ import absolute_import # Import python libs import logging # Import salt-api libs import salt.loader import salt.utils.process logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.process_manager = salt.utils.process.ProcessManager() self.netapi = salt.loader.netapi(self.opts) def run(self): ''' Load and start all available api modules ''' if not len(self.netapi): logger.error("Did not find any netapi configurations, nothing to start") for fun in self.netapi: if fun.endswith('.start'): logger.info('Starting {0} netapi module'.format(fun)) self.process_manager.add_process(self.netapi[fun]) self.process_manager.run()
0f1ed52e7525ea5f41d63642bca1eaeb9d5af8ba
emission/core/wrapper/labelprediction.py
emission/core/wrapper/labelprediction.py
import emission.core.wrapper.wrapperbase as ecwb class Labelprediction(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of "prediction": ecwb.WrapperBase.Access.WORM, # What we predict "start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline "end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline } enums = {} geojson = {} local_dates = {} def _populateDependencies(self): pass
import emission.core.wrapper.wrapperbase as ecwb # The "prediction" data structure is a list of label possibilities, each one consisting of a set of labels and a probability: # [ # {"labels": {"labeltype1": "labelvalue1", "labeltype2": "labelvalue2"}, "p": 0.61}, # {"labels": {"labeltype1": "labelvalue3", "labeltype2": "labelvalue4"}, "p": 0.27}, # ... # ] class Labelprediction(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of "prediction": ecwb.WrapperBase.Access.WORM, # What we predict -- see above "start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline "end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline } enums = {} geojson = {} local_dates = {} def _populateDependencies(self): pass
Add comments explaining prediction data structure
Add comments explaining prediction data structure
Python
bsd-3-clause
shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server
import emission.core.wrapper.wrapperbase as ecwb + + # The "prediction" data structure is a list of label possibilities, each one consisting of a set of labels and a probability: + # [ + # {"labels": {"labeltype1": "labelvalue1", "labeltype2": "labelvalue2"}, "p": 0.61}, + # {"labels": {"labeltype1": "labelvalue3", "labeltype2": "labelvalue4"}, "p": 0.27}, + # ... + # ] class Labelprediction(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of - "prediction": ecwb.WrapperBase.Access.WORM, # What we predict + "prediction": ecwb.WrapperBase.Access.WORM, # What we predict -- see above "start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline "end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline } enums = {} geojson = {} local_dates = {} def _populateDependencies(self): pass
Add comments explaining prediction data structure
## Code Before: import emission.core.wrapper.wrapperbase as ecwb class Labelprediction(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of "prediction": ecwb.WrapperBase.Access.WORM, # What we predict "start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline "end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline } enums = {} geojson = {} local_dates = {} def _populateDependencies(self): pass ## Instruction: Add comments explaining prediction data structure ## Code After: import emission.core.wrapper.wrapperbase as ecwb # The "prediction" data structure is a list of label possibilities, each one consisting of a set of labels and a probability: # [ # {"labels": {"labeltype1": "labelvalue1", "labeltype2": "labelvalue2"}, "p": 0.61}, # {"labels": {"labeltype1": "labelvalue3", "labeltype2": "labelvalue4"}, "p": 0.27}, # ... # ] class Labelprediction(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of "prediction": ecwb.WrapperBase.Access.WORM, # What we predict -- see above "start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline "end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline } enums = {} geojson = {} local_dates = {} def _populateDependencies(self): pass
5ca96beb26dd2ab5285a57f5cade6f01160df368
joequery/blog/posts/code/notes-on-dynamic-programming-part-1/meta.py
joequery/blog/posts/code/notes-on-dynamic-programming-part-1/meta.py
title="Notes on dynamic programming - part 1" description=""" Part 1 of extensive notes discussing the fundamentals of dynamic programming. Examples in these notes include the Fibonacci sequence and Warshall's algorithm. Pseudocode and Python implementations of the algorithms are provided. """ time="2012-12-10 Mon 02:28 AM" # related=[("Some article", "its/url")]
title="Notes on dynamic programming - part 1" description=""" Part 1 of extensive notes discussing the fundamentals of dynamic programming. Examples in these notes include the Fibonacci sequence, the Binomial Formula, and Warshall's algorithm. Python implementations of the algorithms are provided. """ time="2012-12-10 Mon 02:48 AM" # related=[("Some article", "its/url")]
Update description and timestamp for dynamic programming part 1
Update description and timestamp for dynamic programming part 1
Python
mit
joequery/joequery.me,joequery/joequery.me,joequery/joequery.me,joequery/joequery.me
title="Notes on dynamic programming - part 1" description=""" Part 1 of extensive notes discussing the fundamentals of dynamic programming. - Examples in these notes include the Fibonacci sequence and Warshall's + Examples in these notes include the Fibonacci sequence, the Binomial Formula, - algorithm. Pseudocode and Python implementations of the algorithms are + and Warshall's algorithm. Python implementations of the algorithms are provided. """ - time="2012-12-10 Mon 02:28 AM" + time="2012-12-10 Mon 02:48 AM" # related=[("Some article", "its/url")]
Update description and timestamp for dynamic programming part 1
## Code Before: title="Notes on dynamic programming - part 1" description=""" Part 1 of extensive notes discussing the fundamentals of dynamic programming. Examples in these notes include the Fibonacci sequence and Warshall's algorithm. Pseudocode and Python implementations of the algorithms are provided. """ time="2012-12-10 Mon 02:28 AM" # related=[("Some article", "its/url")] ## Instruction: Update description and timestamp for dynamic programming part 1 ## Code After: title="Notes on dynamic programming - part 1" description=""" Part 1 of extensive notes discussing the fundamentals of dynamic programming. Examples in these notes include the Fibonacci sequence, the Binomial Formula, and Warshall's algorithm. Python implementations of the algorithms are provided. """ time="2012-12-10 Mon 02:48 AM" # related=[("Some article", "its/url")]
c7660db45e0275a685a6cc450fd4341a69c52b92
threaded_multihost/fields.py
threaded_multihost/fields.py
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, User, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value try: from south.modelsinspector import add_introspection_rules except ImportError: add_introspection_rules = False if add_introspection_rules: add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
Patch from chrischambers to enable south migrations.
Patch from chrischambers to enable south migrations.
Python
bsd-3-clause
diver-in-sky/django-threaded-multihost
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): + kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) - ForeignKey.__init__(self, User, **kwargs) + ForeignKey.__init__(self, **kwargs) - + class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value + try: + from south.modelsinspector import add_introspection_rules + except ImportError: + add_introspection_rules = False + + if add_introspection_rules: + add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"]) +
Patch from chrischambers to enable south migrations.
## Code Before: from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, User, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value ## Instruction: Patch from chrischambers to enable south migrations. ## Code After: from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value try: from south.modelsinspector import add_introspection_rules except ImportError: add_introspection_rules = False if add_introspection_rules: add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
fc472d043e81c2b5687a0f83dbbdd0dd02b73e35
flowtype/commands/exec_flow.py
flowtype/commands/exec_flow.py
import os import json import threading import subprocess import sublime class ExecFlowCommand(threading.Thread): """Threaded class used for running flow commands in a different thread. The subprocess must be threaded so we don't lockup the UI. """ def __init__(self, cmd, content): """Initialize with the command and the file content to send.""" self.cmd = cmd self.content = content self.stdout = None self.returncode = 0 self.stderr = None threading.Thread.__init__(self) def run(self): """Execute the command in a subprocess.""" read, write = os.pipe() os.write(write, str.encode(self.content)) os.close(write) try: output = subprocess.check_output( self.cmd, shell=sublime.platform() == 'windows', stdin=read, stderr=subprocess.STDOUT ) if type(output) is bytes: output = output.decode('utf-8') try: self.stdout = json.loads(output) except ValueError: self.stdout = output os.close(read) except subprocess.CalledProcessError as err: self.stderr = str(err) self.returncode = 1
import os import json import threading import subprocess import sublime class ExecFlowCommand(threading.Thread): """Threaded class used for running flow commands in a different thread. The subprocess must be threaded so we don't lockup the UI. """ def __init__(self, cmd, content): """Initialize with the command and the file content to send.""" self.cmd = cmd self.content = content self.stdout = None self.returncode = 0 self.stderr = None threading.Thread.__init__(self) def run(self): """Execute the command in a subprocess.""" read, write = os.pipe() os.write(write, str.encode(self.content)) os.close(write) try: output = subprocess.check_output( self.cmd, shell=sublime.platform() == 'windows', stdin=read, stderr=subprocess.STDOUT ) if type(output) is bytes: output = output.decode('utf-8') try: self.stdout = json.loads(output) except ValueError: self.stdout = output os.close(read) except subprocess.CalledProcessError as err: if type(err.output) is bytes: output = err.output.decode('utf-8') else: output = err.output self.stderr = str(err) + ': ' + str(output) self.returncode = 1
Add error output to exec error messages
Add error output to exec error messages e.g. for an error like "env: ‘node’: No such file or directory" the sublime console was only reporting "exited with code 127" which wasn't very helpful in determining the cause.
Python
mit
Pegase745/sublime-flowtype
import os import json import threading import subprocess import sublime class ExecFlowCommand(threading.Thread): """Threaded class used for running flow commands in a different thread. The subprocess must be threaded so we don't lockup the UI. """ def __init__(self, cmd, content): """Initialize with the command and the file content to send.""" self.cmd = cmd self.content = content self.stdout = None self.returncode = 0 self.stderr = None threading.Thread.__init__(self) def run(self): """Execute the command in a subprocess.""" read, write = os.pipe() os.write(write, str.encode(self.content)) os.close(write) try: output = subprocess.check_output( self.cmd, shell=sublime.platform() == 'windows', stdin=read, stderr=subprocess.STDOUT ) if type(output) is bytes: output = output.decode('utf-8') try: self.stdout = json.loads(output) except ValueError: self.stdout = output os.close(read) except subprocess.CalledProcessError as err: + if type(err.output) is bytes: + output = err.output.decode('utf-8') + else: + output = err.output - self.stderr = str(err) + self.stderr = str(err) + ': ' + str(output) self.returncode = 1
Add error output to exec error messages
## Code Before: import os import json import threading import subprocess import sublime class ExecFlowCommand(threading.Thread): """Threaded class used for running flow commands in a different thread. The subprocess must be threaded so we don't lockup the UI. """ def __init__(self, cmd, content): """Initialize with the command and the file content to send.""" self.cmd = cmd self.content = content self.stdout = None self.returncode = 0 self.stderr = None threading.Thread.__init__(self) def run(self): """Execute the command in a subprocess.""" read, write = os.pipe() os.write(write, str.encode(self.content)) os.close(write) try: output = subprocess.check_output( self.cmd, shell=sublime.platform() == 'windows', stdin=read, stderr=subprocess.STDOUT ) if type(output) is bytes: output = output.decode('utf-8') try: self.stdout = json.loads(output) except ValueError: self.stdout = output os.close(read) except subprocess.CalledProcessError as err: self.stderr = str(err) self.returncode = 1 ## Instruction: Add error output to exec error messages ## Code After: import os import json import threading import subprocess import sublime class ExecFlowCommand(threading.Thread): """Threaded class used for running flow commands in a different thread. The subprocess must be threaded so we don't lockup the UI. """ def __init__(self, cmd, content): """Initialize with the command and the file content to send.""" self.cmd = cmd self.content = content self.stdout = None self.returncode = 0 self.stderr = None threading.Thread.__init__(self) def run(self): """Execute the command in a subprocess.""" read, write = os.pipe() os.write(write, str.encode(self.content)) os.close(write) try: output = subprocess.check_output( self.cmd, shell=sublime.platform() == 'windows', stdin=read, stderr=subprocess.STDOUT ) if type(output) is bytes: output = output.decode('utf-8') try: self.stdout = json.loads(output) except ValueError: self.stdout = output os.close(read) except subprocess.CalledProcessError as err: if type(err.output) is bytes: output = err.output.decode('utf-8') else: output = err.output self.stderr = str(err) + ': ' + str(output) self.returncode = 1
c43820a2e26dd4f87c36b986a9a0af80b409f659
sentence_extractor.py
sentence_extractor.py
import textract import sys import os import re import random ################################### # Extracts text from a pdf file and # selects one sentence, which it # then prints. # # Created by Fredrik Omstedt. ################################### # Extracts texts from pdf files. If given a directory, the # program will return texts from all pdf files in that directory. def extractTexts(): file = sys.argv[1] texts = [] if os.path.isdir(file): for f in os.listdir(file): if re.match(r'^.*\.pdf$', f): texts.append(textract.process(file + "/" + f)) else: texts.append(textract.process(file)) return texts # Chooses one sentence randomly from each of the given texts. def selectSentences(texts): chosen_sentences = [] for text in texts: sentence_structure = re.compile(r'([A-Z][^\.!?]*[\.!?])', re.M) sentences = sentence_structure.findall(text) chosen_sentences.append( sentences[random.randint(0, len(sentences)-1)].replace("\n", " ") ) return chosen_sentences def main(): texts = extractTexts() sentences = selectSentences(texts) for sentence in sentences: print(sentence) print("\n") if __name__ == '__main__': main()
import textract import sys import os import re import random ################################### # Extracts text from a pdf file and # selects one sentence, which it # then prints. # # Created by Fredrik Omstedt. ################################### # Extracts texts from pdf files. If given a directory, the # program will return texts from all pdf files in that directory. def extractTexts(): file = sys.argv[1] texts = [] if os.path.isdir(file): for f in os.listdir(file): if re.match(r'^.*\.pdf$', f): texts.append(textract.process(file + "/" + f)) else: texts.append(textract.process(file)) return texts # Chooses one sentence randomly from each of the given texts. def selectSentences(texts): chosen_sentences = [] for text in texts: sentence_structure = re.compile(r'([A-Z\xc4\xc5\xd6][^\.!?]*[\.!?])', re.M) sentences = sentence_structure.findall(text) chosen_sentences.append( sentences[random.randint(0, len(sentences)-1)].replace("\n", " ") ) return chosen_sentences def main(): texts = extractTexts() sentences = selectSentences(texts) for sentence in sentences: print(sentence) print("\n") if __name__ == '__main__': main()
Update regex to match sentences starting with ÅÄÖ
Update regex to match sentences starting with ÅÄÖ
Python
mit
Xaril/sentence-extractor,Xaril/sentence-extractor
import textract import sys import os import re import random ################################### # Extracts text from a pdf file and # selects one sentence, which it # then prints. # # Created by Fredrik Omstedt. ################################### # Extracts texts from pdf files. If given a directory, the # program will return texts from all pdf files in that directory. def extractTexts(): file = sys.argv[1] texts = [] if os.path.isdir(file): for f in os.listdir(file): if re.match(r'^.*\.pdf$', f): texts.append(textract.process(file + "/" + f)) else: texts.append(textract.process(file)) return texts # Chooses one sentence randomly from each of the given texts. def selectSentences(texts): chosen_sentences = [] for text in texts: - sentence_structure = re.compile(r'([A-Z][^\.!?]*[\.!?])', re.M) + sentence_structure = re.compile(r'([A-Z\xc4\xc5\xd6][^\.!?]*[\.!?])', re.M) sentences = sentence_structure.findall(text) chosen_sentences.append( sentences[random.randint(0, len(sentences)-1)].replace("\n", " ") ) return chosen_sentences def main(): texts = extractTexts() sentences = selectSentences(texts) for sentence in sentences: print(sentence) print("\n") if __name__ == '__main__': main()
Update regex to match sentences starting with ÅÄÖ
## Code Before: import textract import sys import os import re import random ################################### # Extracts text from a pdf file and # selects one sentence, which it # then prints. # # Created by Fredrik Omstedt. ################################### # Extracts texts from pdf files. If given a directory, the # program will return texts from all pdf files in that directory. def extractTexts(): file = sys.argv[1] texts = [] if os.path.isdir(file): for f in os.listdir(file): if re.match(r'^.*\.pdf$', f): texts.append(textract.process(file + "/" + f)) else: texts.append(textract.process(file)) return texts # Chooses one sentence randomly from each of the given texts. def selectSentences(texts): chosen_sentences = [] for text in texts: sentence_structure = re.compile(r'([A-Z][^\.!?]*[\.!?])', re.M) sentences = sentence_structure.findall(text) chosen_sentences.append( sentences[random.randint(0, len(sentences)-1)].replace("\n", " ") ) return chosen_sentences def main(): texts = extractTexts() sentences = selectSentences(texts) for sentence in sentences: print(sentence) print("\n") if __name__ == '__main__': main() ## Instruction: Update regex to match sentences starting with ÅÄÖ ## Code After: import textract import sys import os import re import random ################################### # Extracts text from a pdf file and # selects one sentence, which it # then prints. # # Created by Fredrik Omstedt. ################################### # Extracts texts from pdf files. If given a directory, the # program will return texts from all pdf files in that directory. def extractTexts(): file = sys.argv[1] texts = [] if os.path.isdir(file): for f in os.listdir(file): if re.match(r'^.*\.pdf$', f): texts.append(textract.process(file + "/" + f)) else: texts.append(textract.process(file)) return texts # Chooses one sentence randomly from each of the given texts. def selectSentences(texts): chosen_sentences = [] for text in texts: sentence_structure = re.compile(r'([A-Z\xc4\xc5\xd6][^\.!?]*[\.!?])', re.M) sentences = sentence_structure.findall(text) chosen_sentences.append( sentences[random.randint(0, len(sentences)-1)].replace("\n", " ") ) return chosen_sentences def main(): texts = extractTexts() sentences = selectSentences(texts) for sentence in sentences: print(sentence) print("\n") if __name__ == '__main__': main()
c8cc1f8e0e9b6d7dfb29ff9aef04bf2b5867cceb
genomediff/records.py
genomediff/records.py
class Metadata(object): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return "Metadata({}, {})".format(repr(self.name), repr(self.value)) def __eq__(self, other): return self.__dict__ == other.__dict__ class Record(object): def __init__(self, type, id, document=None, parent_ids=None, **extra): self.document = document self.type = type self.id = id self.parent_ids = parent_ids self._extra = extra @property def parents(self): if not self.parent_ids is None: return [self.document[pid] for pid in self.parent_ids] else: return [] def __getattr__(self, item): return self._extra[item] def __repr__(self): return "Record('{}', {}, {}, {})".format(self.type, self.id, self.parent_ids, ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) def __eq__(self, other): return self.__dict__ == other.__dict__
class Metadata(object): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return "Metadata({}, {})".format(repr(self.name), repr(self.value)) def __eq__(self, other): return self.__dict__ == other.__dict__ class Record(object): def __init__(self, type, id, document=None, parent_ids=None, **attributes): self.document = document self.type = type self.id = id self.parent_ids = parent_ids self.attributes = attributes @property def parents(self): if not self.parent_ids is None: return [self.document[pid] for pid in self.parent_ids] else: return [] def __getattr__(self, item): try: return self.attributes[item] except KeyError: raise AttributeError def __repr__(self): return "Record('{}', {}, {}, {})".format(self.type, self.id, self.parent_ids, ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) def __eq__(self, other): return self.__dict__ == other.__dict__
Raise AttributeError if key does not exist when trying to get it from a Record
Raise AttributeError if key does not exist when trying to get it from a Record
Python
mit
biosustain/genomediff-python
class Metadata(object): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return "Metadata({}, {})".format(repr(self.name), repr(self.value)) def __eq__(self, other): return self.__dict__ == other.__dict__ class Record(object): - def __init__(self, type, id, document=None, parent_ids=None, **extra): + def __init__(self, type, id, document=None, parent_ids=None, **attributes): self.document = document self.type = type self.id = id self.parent_ids = parent_ids - self._extra = extra + self.attributes = attributes @property def parents(self): if not self.parent_ids is None: return [self.document[pid] for pid in self.parent_ids] else: return [] def __getattr__(self, item): + try: - return self._extra[item] + return self.attributes[item] + except KeyError: + raise AttributeError - def __repr__(self): - return "Record('{}', {}, {}, {})".format(self.type, - self.id, - self.parent_ids, - ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) - def __eq__(self, other): - return self.__dict__ == other.__dict__ + def __repr__(self): + return "Record('{}', {}, {}, {})".format(self.type, + self.id, + self.parent_ids, + ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) + + def __eq__(self, other): + return self.__dict__ == other.__dict__ +
Raise AttributeError if key does not exist when trying to get it from a Record
## Code Before: class Metadata(object): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return "Metadata({}, {})".format(repr(self.name), repr(self.value)) def __eq__(self, other): return self.__dict__ == other.__dict__ class Record(object): def __init__(self, type, id, document=None, parent_ids=None, **extra): self.document = document self.type = type self.id = id self.parent_ids = parent_ids self._extra = extra @property def parents(self): if not self.parent_ids is None: return [self.document[pid] for pid in self.parent_ids] else: return [] def __getattr__(self, item): return self._extra[item] def __repr__(self): return "Record('{}', {}, {}, {})".format(self.type, self.id, self.parent_ids, ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) def __eq__(self, other): return self.__dict__ == other.__dict__ ## Instruction: Raise AttributeError if key does not exist when trying to get it from a Record ## Code After: class Metadata(object): def __init__(self, name, value): self.name = name self.value = value def __repr__(self): return "Metadata({}, {})".format(repr(self.name), repr(self.value)) def __eq__(self, other): return self.__dict__ == other.__dict__ class Record(object): def __init__(self, type, id, document=None, parent_ids=None, **attributes): self.document = document self.type = type self.id = id self.parent_ids = parent_ids self.attributes = attributes @property def parents(self): if not self.parent_ids is None: return [self.document[pid] for pid in self.parent_ids] else: return [] def __getattr__(self, item): try: return self.attributes[item] except KeyError: raise AttributeError def __repr__(self): return "Record('{}', {}, {}, {})".format(self.type, self.id, self.parent_ids, ', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items())) def __eq__(self, other): return self.__dict__ == other.__dict__
9aace6d89642e5025692b25e2c6253544ed580a6
social_auth/models.py
social_auth/models.py
"""Social auth models""" from django.db import models from django.contrib.auth.models import User class UserSocialAuth(models.Model): """Social Auth association model""" user = models.ForeignKey(User, related_name='social_auth') provider = models.CharField(max_length=32) uid = models.TextField() class Meta: """Meta data""" unique_together = ('provider', 'uid') class Nonce(models.Model): """One use numbers""" server_url = models.TextField() timestamp = models.IntegerField() salt = models.CharField(max_length=40) class Association(models.Model): """OpenId account association""" server_url = models.TextField(max_length=2047) handle = models.CharField(max_length=255) secret = models.TextField(max_length=255) # Stored base64 encoded issued = models.IntegerField() lifetime = models.IntegerField() assoc_type = models.TextField(max_length=64)
"""Social auth models""" from django.db import models from django.contrib.auth.models import User class UserSocialAuth(models.Model): """Social Auth association model""" user = models.ForeignKey(User, related_name='social_auth') provider = models.CharField(max_length=32) uid = models.TextField() class Meta: """Meta data""" unique_together = ('provider', 'uid') class Nonce(models.Model): """One use numbers""" server_url = models.TextField() timestamp = models.IntegerField() salt = models.CharField(max_length=40) class Association(models.Model): """OpenId account association""" server_url = models.TextField() handle = models.CharField(max_length=255) secret = models.CharField(max_length=255) # Stored base64 encoded issued = models.IntegerField() lifetime = models.IntegerField() assoc_type = models.CharField(max_length=64)
Remove max_length from TextFields and replace short text fields with CharFields
Remove max_length from TextFields and replace short text fields with CharFields
Python
bsd-3-clause
michael-borisov/django-social-auth,krvss/django-social-auth,thesealion/django-social-auth,lovehhf/django-social-auth,sk7/django-social-auth,dongguangming/django-social-auth,czpython/django-social-auth,beswarm/django-social-auth,adw0rd/django-social-auth,MjAbuz/django-social-auth,VishvajitP/django-social-auth,MjAbuz/django-social-auth,brianmckinneyrocks/django-social-auth,beswarm/django-social-auth,thesealion/django-social-auth,mayankcu/Django-social,vxvinh1511/django-social-auth,vuchau/django-social-auth,1st/django-social-auth,WW-Digital/django-social-auth,omab/django-social-auth,omab/django-social-auth,caktus/django-social-auth,vuchau/django-social-auth,limdauto/django-social-auth,antoviaque/django-social-auth-norel,vxvinh1511/django-social-auth,qas612820704/django-social-auth,gustavoam/django-social-auth,michael-borisov/django-social-auth,dongguangming/django-social-auth,limdauto/django-social-auth,duoduo369/django-social-auth,caktus/django-social-auth,gustavoam/django-social-auth,VishvajitP/django-social-auth,qas612820704/django-social-auth,getsentry/django-social-auth,lovehhf/django-social-auth,brianmckinneyrocks/django-social-auth
"""Social auth models""" from django.db import models from django.contrib.auth.models import User class UserSocialAuth(models.Model): """Social Auth association model""" user = models.ForeignKey(User, related_name='social_auth') provider = models.CharField(max_length=32) uid = models.TextField() class Meta: """Meta data""" unique_together = ('provider', 'uid') class Nonce(models.Model): """One use numbers""" server_url = models.TextField() timestamp = models.IntegerField() salt = models.CharField(max_length=40) class Association(models.Model): """OpenId account association""" - server_url = models.TextField(max_length=2047) + server_url = models.TextField() handle = models.CharField(max_length=255) - secret = models.TextField(max_length=255) # Stored base64 encoded + secret = models.CharField(max_length=255) # Stored base64 encoded issued = models.IntegerField() lifetime = models.IntegerField() - assoc_type = models.TextField(max_length=64) + assoc_type = models.CharField(max_length=64)
Remove max_length from TextFields and replace short text fields with CharFields
## Code Before: """Social auth models""" from django.db import models from django.contrib.auth.models import User class UserSocialAuth(models.Model): """Social Auth association model""" user = models.ForeignKey(User, related_name='social_auth') provider = models.CharField(max_length=32) uid = models.TextField() class Meta: """Meta data""" unique_together = ('provider', 'uid') class Nonce(models.Model): """One use numbers""" server_url = models.TextField() timestamp = models.IntegerField() salt = models.CharField(max_length=40) class Association(models.Model): """OpenId account association""" server_url = models.TextField(max_length=2047) handle = models.CharField(max_length=255) secret = models.TextField(max_length=255) # Stored base64 encoded issued = models.IntegerField() lifetime = models.IntegerField() assoc_type = models.TextField(max_length=64) ## Instruction: Remove max_length from TextFields and replace short text fields with CharFields ## Code After: """Social auth models""" from django.db import models from django.contrib.auth.models import User class UserSocialAuth(models.Model): """Social Auth association model""" user = models.ForeignKey(User, related_name='social_auth') provider = models.CharField(max_length=32) uid = models.TextField() class Meta: """Meta data""" unique_together = ('provider', 'uid') class Nonce(models.Model): """One use numbers""" server_url = models.TextField() timestamp = models.IntegerField() salt = models.CharField(max_length=40) class Association(models.Model): """OpenId account association""" server_url = models.TextField() handle = models.CharField(max_length=255) secret = models.CharField(max_length=255) # Stored base64 encoded issued = models.IntegerField() lifetime = models.IntegerField() assoc_type = models.CharField(max_length=64)
eca73e0c57042593f7e65446e26e63790c5cf2aa
notes/admin.py
notes/admin.py
from snowy.accounts.models import UserProfile from snowy.notes.models import Note, NoteTag from reversion.admin import VersionAdmin from django.contrib import admin class NoteAdmin(VersionAdmin): list_display = ('created', 'author', 'title') search_fields = ['content', 'title'] prepopulated_fields = {'slug': ('title',)} admin.site.register(Note, NoteAdmin) admin.site.register(NoteTag) admin.site.register(UserProfile)
from snowy.accounts.models import UserProfile from snowy.notes.models import Note, NoteTag #from reversion.admin import VersionAdmin from django.contrib import admin #class NoteAdmin(VersionAdmin): class NoteAdmin(admin.ModelAdmin): list_display = ('created', 'author', 'title') search_fields = ['content', 'title'] prepopulated_fields = {'slug': ('title',)} admin.site.register(Note, NoteAdmin) admin.site.register(NoteTag) admin.site.register(UserProfile)
Complete removal of reversion usage
Complete removal of reversion usage
Python
agpl-3.0
leonhandreke/snowy,NoUsername/PrivateNotesExperimental,jaredjennings/snowy,GNOME/snowy,sandyarmstrong/snowy,syskill/snowy,syskill/snowy,NoUsername/PrivateNotesExperimental,sandyarmstrong/snowy,jaredjennings/snowy,jaredjennings/snowy,widox/snowy,jaredjennings/snowy,nekohayo/snowy,nekohayo/snowy,widox/snowy,GNOME/snowy,leonhandreke/snowy
from snowy.accounts.models import UserProfile from snowy.notes.models import Note, NoteTag - from reversion.admin import VersionAdmin + #from reversion.admin import VersionAdmin from django.contrib import admin - class NoteAdmin(VersionAdmin): + #class NoteAdmin(VersionAdmin): + class NoteAdmin(admin.ModelAdmin): list_display = ('created', 'author', 'title') search_fields = ['content', 'title'] prepopulated_fields = {'slug': ('title',)} admin.site.register(Note, NoteAdmin) admin.site.register(NoteTag) admin.site.register(UserProfile)
Complete removal of reversion usage
## Code Before: from snowy.accounts.models import UserProfile from snowy.notes.models import Note, NoteTag from reversion.admin import VersionAdmin from django.contrib import admin class NoteAdmin(VersionAdmin): list_display = ('created', 'author', 'title') search_fields = ['content', 'title'] prepopulated_fields = {'slug': ('title',)} admin.site.register(Note, NoteAdmin) admin.site.register(NoteTag) admin.site.register(UserProfile) ## Instruction: Complete removal of reversion usage ## Code After: from snowy.accounts.models import UserProfile from snowy.notes.models import Note, NoteTag #from reversion.admin import VersionAdmin from django.contrib import admin #class NoteAdmin(VersionAdmin): class NoteAdmin(admin.ModelAdmin): list_display = ('created', 'author', 'title') search_fields = ['content', 'title'] prepopulated_fields = {'slug': ('title',)} admin.site.register(Note, NoteAdmin) admin.site.register(NoteTag) admin.site.register(UserProfile)
630ba21f3b08dcd2685297b057cbee4b6abee6f7
us_ignite/sections/models.py
us_ignite/sections/models.py
from django.db import models class Sponsor(models.Model): name = models.CharField(max_length=255) website = models.URLField(max_length=500) image = models.ImageField(upload_to="sponsor") order = models.IntegerField(default=0) class Meta: ordering = ('order', ) def __unicode__(self): return self.name
from django.db import models class Sponsor(models.Model): name = models.CharField(max_length=255) website = models.URLField(max_length=500) image = models.ImageField( upload_to="sponsor", help_text='This image is not post processed. ' 'Please make sure it has the right design specs.') order = models.IntegerField(default=0) class Meta: ordering = ('order', ) def __unicode__(self): return self.name
Add help text describing the image field functionality.
Add help text describing the image field functionality.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
from django.db import models class Sponsor(models.Model): name = models.CharField(max_length=255) website = models.URLField(max_length=500) - image = models.ImageField(upload_to="sponsor") + image = models.ImageField( + upload_to="sponsor", help_text='This image is not post processed. ' + 'Please make sure it has the right design specs.') order = models.IntegerField(default=0) class Meta: ordering = ('order', ) def __unicode__(self): return self.name
Add help text describing the image field functionality.
## Code Before: from django.db import models class Sponsor(models.Model): name = models.CharField(max_length=255) website = models.URLField(max_length=500) image = models.ImageField(upload_to="sponsor") order = models.IntegerField(default=0) class Meta: ordering = ('order', ) def __unicode__(self): return self.name ## Instruction: Add help text describing the image field functionality. ## Code After: from django.db import models class Sponsor(models.Model): name = models.CharField(max_length=255) website = models.URLField(max_length=500) image = models.ImageField( upload_to="sponsor", help_text='This image is not post processed. ' 'Please make sure it has the right design specs.') order = models.IntegerField(default=0) class Meta: ordering = ('order', ) def __unicode__(self): return self.name
9cfd402c8f95c016953eda752e1bd91302d6c8c0
translations/lantmateriet.py
translations/lantmateriet.py
def filterTags(attrs): res = {} if 'NAMN' in attrs: res['name'] = attrs['NAMN'] if 'TATNR' in attrs: res['ref:se:scb'] = attrs['TATNR'] if attrs.get('BEF') is not None: bef = int(attrs.get('BEF')) # This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place # and the observed values of nodes in OpenStreetMap itself for cities and towns # around Sweden. # This seems to be around where OSM sets city status for Sweden if bef >= 30000: res['place'] = 'city' elif bef >= 6000: res['place'] = 'town' elif bef >= 200: res['place'] = 'village' return res
def filterTags(attrs): res = {} if 'NAMN' in attrs: res['name'] = attrs['NAMN'] if 'TATNR' in attrs: res['ref:se:scb'] = attrs['TATNR'] if attrs.get('BEF') is not None: bef = int(attrs.get('BEF')) # This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place # and the observed values of nodes in OpenStreetMap itself for cities and towns # around Sweden. # This seems to be around where OSM sets city status for Sweden if bef >= 30000: res['place'] = 'city' elif bef >= 6000: res['place'] = 'town' elif bef >= 200: res['place'] = 'village' res['population'] = str(bef) return res
Add population to the tags
LM: Add population to the tags
Python
bsd-3-clause
andpe/swegov-to-osm
def filterTags(attrs): res = {} if 'NAMN' in attrs: res['name'] = attrs['NAMN'] if 'TATNR' in attrs: res['ref:se:scb'] = attrs['TATNR'] if attrs.get('BEF') is not None: bef = int(attrs.get('BEF')) # This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place # and the observed values of nodes in OpenStreetMap itself for cities and towns # around Sweden. # This seems to be around where OSM sets city status for Sweden if bef >= 30000: res['place'] = 'city' elif bef >= 6000: res['place'] = 'town' elif bef >= 200: res['place'] = 'village' + res['population'] = str(bef) + return res
Add population to the tags
## Code Before: def filterTags(attrs): res = {} if 'NAMN' in attrs: res['name'] = attrs['NAMN'] if 'TATNR' in attrs: res['ref:se:scb'] = attrs['TATNR'] if attrs.get('BEF') is not None: bef = int(attrs.get('BEF')) # This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place # and the observed values of nodes in OpenStreetMap itself for cities and towns # around Sweden. # This seems to be around where OSM sets city status for Sweden if bef >= 30000: res['place'] = 'city' elif bef >= 6000: res['place'] = 'town' elif bef >= 200: res['place'] = 'village' return res ## Instruction: Add population to the tags ## Code After: def filterTags(attrs): res = {} if 'NAMN' in attrs: res['name'] = attrs['NAMN'] if 'TATNR' in attrs: res['ref:se:scb'] = attrs['TATNR'] if attrs.get('BEF') is not None: bef = int(attrs.get('BEF')) # This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place # and the observed values of nodes in OpenStreetMap itself for cities and towns # around Sweden. # This seems to be around where OSM sets city status for Sweden if bef >= 30000: res['place'] = 'city' elif bef >= 6000: res['place'] = 'town' elif bef >= 200: res['place'] = 'village' res['population'] = str(bef) return res
d13c674a7286f1af9cd13babe2cb5c429b5b3bfa
scripts/update_guide_stats.py
scripts/update_guide_stats.py
from mica.stats import update_guide_stats update_guide_stats.main() import os table_file = mica.stats.guide_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 200e6: print(""" Warning: {tfile} is larger than 200MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
import os import argparse from mica.stats import update_guide_stats import mica.stats.guide_stats # Cheat and pass options directly. Needs entrypoint scripts opt = argparse.Namespace(datafile=mica.stats.guide_stats.TABLE_FILE, obsid=None, check_missing=False, start=None, stop=None) update_guide_stats.update(opt) table_file = mica.stats.guide_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 200e6: print(""" Warning: {tfile} is larger than 200MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
Update guide stat script to pass datafile
Update guide stat script to pass datafile
Python
bsd-3-clause
sot/mica,sot/mica
+ import os + import argparse from mica.stats import update_guide_stats - update_guide_stats.main() + import mica.stats.guide_stats - import os + # Cheat and pass options directly. Needs entrypoint scripts + opt = argparse.Namespace(datafile=mica.stats.guide_stats.TABLE_FILE, + obsid=None, check_missing=False, start=None, stop=None) + update_guide_stats.update(opt) + + table_file = mica.stats.guide_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 200e6: print(""" Warning: {tfile} is larger than 200MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
Update guide stat script to pass datafile
## Code Before: from mica.stats import update_guide_stats update_guide_stats.main() import os table_file = mica.stats.guide_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 200e6: print(""" Warning: {tfile} is larger than 200MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file)) ## Instruction: Update guide stat script to pass datafile ## Code After: import os import argparse from mica.stats import update_guide_stats import mica.stats.guide_stats # Cheat and pass options directly. Needs entrypoint scripts opt = argparse.Namespace(datafile=mica.stats.guide_stats.TABLE_FILE, obsid=None, check_missing=False, start=None, stop=None) update_guide_stats.update(opt) table_file = mica.stats.guide_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 200e6: print(""" Warning: {tfile} is larger than 200MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
5914b9a4d1d086f1a92309c0895aa7dd11761776
conf_site/accounts/tests/test_registration.py
conf_site/accounts/tests/test_registration.py
from factory import Faker, fuzzy from django.contrib.auth import get_user_model from django.test import TestCase from django.urls import reverse class UserRegistrationTestCase(TestCase): def test_registration_view(self): """Verify that user registration view loads properly.""" response = self.client.get(reverse("account_signup")) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "account/signup.html") def test_user_registration(self): """Ensure that user registration works properly.""" EMAIL = Faker("email").generate() PASSWORD = fuzzy.FuzzyText(length=16) test_user_data = { "password1": PASSWORD, "password2": PASSWORD, "email": EMAIL, "email2": EMAIL, } # Verify that POSTing user data to the registration view # succeeds / returns the right HTTP status code. response = self.client.post( reverse("account_signup"), test_user_data) # Successful form submission will cause the HTTP status code # to be "302 Found", not "200 OK". self.assertEqual(response.status_code, 302) # Verify that a User has been successfully created. user_model = get_user_model() user_model.objects.get(email=EMAIL)
from django.contrib.auth import get_user_model from django.test import TestCase from django.urls import reverse from factory import fuzzy from faker import Faker class UserRegistrationTestCase(TestCase): def test_registration_view(self): """Verify that user registration view loads properly.""" response = self.client.get(reverse("account_signup")) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "account/signup.html") def test_user_registration(self): """Ensure that user registration works properly.""" EMAIL = Faker().email() PASSWORD = fuzzy.FuzzyText(length=16) test_user_data = { "password1": PASSWORD, "password2": PASSWORD, "email": EMAIL, "email2": EMAIL, } # Verify that POSTing user data to the registration view # succeeds / returns the right HTTP status code. response = self.client.post( reverse("account_signup"), test_user_data) # Successful form submission will cause the HTTP status code # to be "302 Found", not "200 OK". self.assertEqual(response.status_code, 302) # Verify that a User has been successfully created. user_model = get_user_model() user_model.objects.get(email=EMAIL)
Change imports in user registration test.
Change imports in user registration test.
Python
mit
pydata/conf_site,pydata/conf_site,pydata/conf_site
- from factory import Faker, fuzzy - from django.contrib.auth import get_user_model from django.test import TestCase from django.urls import reverse + + from factory import fuzzy + from faker import Faker class UserRegistrationTestCase(TestCase): def test_registration_view(self): """Verify that user registration view loads properly.""" response = self.client.get(reverse("account_signup")) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "account/signup.html") def test_user_registration(self): """Ensure that user registration works properly.""" - EMAIL = Faker("email").generate() + EMAIL = Faker().email() PASSWORD = fuzzy.FuzzyText(length=16) test_user_data = { "password1": PASSWORD, "password2": PASSWORD, "email": EMAIL, "email2": EMAIL, } # Verify that POSTing user data to the registration view # succeeds / returns the right HTTP status code. response = self.client.post( reverse("account_signup"), test_user_data) # Successful form submission will cause the HTTP status code # to be "302 Found", not "200 OK". self.assertEqual(response.status_code, 302) # Verify that a User has been successfully created. user_model = get_user_model() user_model.objects.get(email=EMAIL)
Change imports in user registration test.
## Code Before: from factory import Faker, fuzzy from django.contrib.auth import get_user_model from django.test import TestCase from django.urls import reverse class UserRegistrationTestCase(TestCase): def test_registration_view(self): """Verify that user registration view loads properly.""" response = self.client.get(reverse("account_signup")) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "account/signup.html") def test_user_registration(self): """Ensure that user registration works properly.""" EMAIL = Faker("email").generate() PASSWORD = fuzzy.FuzzyText(length=16) test_user_data = { "password1": PASSWORD, "password2": PASSWORD, "email": EMAIL, "email2": EMAIL, } # Verify that POSTing user data to the registration view # succeeds / returns the right HTTP status code. response = self.client.post( reverse("account_signup"), test_user_data) # Successful form submission will cause the HTTP status code # to be "302 Found", not "200 OK". self.assertEqual(response.status_code, 302) # Verify that a User has been successfully created. user_model = get_user_model() user_model.objects.get(email=EMAIL) ## Instruction: Change imports in user registration test. ## Code After: from django.contrib.auth import get_user_model from django.test import TestCase from django.urls import reverse from factory import fuzzy from faker import Faker class UserRegistrationTestCase(TestCase): def test_registration_view(self): """Verify that user registration view loads properly.""" response = self.client.get(reverse("account_signup")) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "account/signup.html") def test_user_registration(self): """Ensure that user registration works properly.""" EMAIL = Faker().email() PASSWORD = fuzzy.FuzzyText(length=16) test_user_data = { "password1": PASSWORD, "password2": PASSWORD, "email": EMAIL, "email2": EMAIL, } # Verify that POSTing user data to the registration view # succeeds / returns the right HTTP status code. response = self.client.post( reverse("account_signup"), test_user_data) # Successful form submission will cause the HTTP status code # to be "302 Found", not "200 OK". self.assertEqual(response.status_code, 302) # Verify that a User has been successfully created. user_model = get_user_model() user_model.objects.get(email=EMAIL)
2479b4a51b733ce8ba989d8f01b48791492d9f21
cogs/utils/dataIO.py
cogs/utils/dataIO.py
import redis_collections import threading import time import __main__ class RedisDict(redis_collections.Dict): def __init__(self, **kwargs): super().__init__(**kwargs) self.die = False self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key']) self.thread.start() self.prev = None def update_loop(self): time.sleep(2) while not self.die: if self.prev != repr(self): self.prev = repr(self) self.sync() time.sleep(0.1) else: self.cache.clear() time.sleep(0.1) class dataIO: @staticmethod def save_json(filename, content): pass # "oops" @staticmethod def load_json(filename): return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
import redis_collections import threading import time # noinspection PyUnresolvedReferences import __main__ class RedisDict(redis_collections.Dict): def __init__(self, **kwargs): super().__init__(**kwargs) self.die = False self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key']) self.thread.start() self.rthread = threading.Thread(target=self.refresh_loop, daemon=True, name=kwargs['key']) self.rthread.start() self.prev = None db = str(self.redis.connection_pool.connection_kwargs['db']) self.pubsub_format = 'liara.{}.{}'.format(db, kwargs['key']) def update_loop(self): time.sleep(2) while not self.die: if self.prev != str(self.cache): self.prev = str(self.cache) self.sync() self.redis.publish(self.pubsub_format, 'update') time.sleep(0.01) else: time.sleep(0.01) def refresh_loop(self): time.sleep(2) pubsub = self.redis.pubsub() pubsub.subscribe([self.pubsub_format]) for message in pubsub.listen(): if message['type'] == 'message': self.cache.clear() self.cache = dict(self) self.prev = str(self.cache) class dataIO: @staticmethod def save_json(filename, content): pass # "oops" @staticmethod def load_json(filename): return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
Make config sync more efficient
Make config sync more efficient
Python
mit
Thessia/Liara
import redis_collections import threading import time + # noinspection PyUnresolvedReferences import __main__ class RedisDict(redis_collections.Dict): def __init__(self, **kwargs): super().__init__(**kwargs) self.die = False self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key']) self.thread.start() + self.rthread = threading.Thread(target=self.refresh_loop, daemon=True, name=kwargs['key']) + self.rthread.start() self.prev = None + db = str(self.redis.connection_pool.connection_kwargs['db']) + self.pubsub_format = 'liara.{}.{}'.format(db, kwargs['key']) def update_loop(self): time.sleep(2) while not self.die: - if self.prev != repr(self): + if self.prev != str(self.cache): - self.prev = repr(self) + self.prev = str(self.cache) self.sync() + self.redis.publish(self.pubsub_format, 'update') - time.sleep(0.1) + time.sleep(0.01) else: + time.sleep(0.01) + + def refresh_loop(self): + time.sleep(2) + pubsub = self.redis.pubsub() + pubsub.subscribe([self.pubsub_format]) + for message in pubsub.listen(): + if message['type'] == 'message': self.cache.clear() - time.sleep(0.1) + self.cache = dict(self) + self.prev = str(self.cache) class dataIO: @staticmethod def save_json(filename, content): pass # "oops" @staticmethod def load_json(filename): return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
Make config sync more efficient
## Code Before: import redis_collections import threading import time import __main__ class RedisDict(redis_collections.Dict): def __init__(self, **kwargs): super().__init__(**kwargs) self.die = False self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key']) self.thread.start() self.prev = None def update_loop(self): time.sleep(2) while not self.die: if self.prev != repr(self): self.prev = repr(self) self.sync() time.sleep(0.1) else: self.cache.clear() time.sleep(0.1) class dataIO: @staticmethod def save_json(filename, content): pass # "oops" @staticmethod def load_json(filename): return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True) ## Instruction: Make config sync more efficient ## Code After: import redis_collections import threading import time # noinspection PyUnresolvedReferences import __main__ class RedisDict(redis_collections.Dict): def __init__(self, **kwargs): super().__init__(**kwargs) self.die = False self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key']) self.thread.start() self.rthread = threading.Thread(target=self.refresh_loop, daemon=True, name=kwargs['key']) self.rthread.start() self.prev = None db = str(self.redis.connection_pool.connection_kwargs['db']) self.pubsub_format = 'liara.{}.{}'.format(db, kwargs['key']) def update_loop(self): time.sleep(2) while not self.die: if self.prev != str(self.cache): self.prev = str(self.cache) self.sync() self.redis.publish(self.pubsub_format, 'update') time.sleep(0.01) else: time.sleep(0.01) def refresh_loop(self): time.sleep(2) pubsub = self.redis.pubsub() pubsub.subscribe([self.pubsub_format]) for message in pubsub.listen(): if message['type'] == 'message': self.cache.clear() self.cache = dict(self) self.prev = str(self.cache) class dataIO: @staticmethod def save_json(filename, content): pass # "oops" @staticmethod def load_json(filename): return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
ed3c03ac4f213f3882e28f25ae0596a7021928cd
test/ParseableInterface/Inputs/make-unreadable.py
test/ParseableInterface/Inputs/make-unreadable.py
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(buffer.value)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) # For NetworkService, Host$ is returned, so we choose have to turn it back # into something that icacls understands. if not buffer.value.endswith('$'): user_name = buffer.value else: user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
Fix handling of Network Service username.
[windows] Fix handling of Network Service username. In Windows Server 2016 at least, the Network Service user (the one being used by the CI machine) is returned as Host$, which icacls doesn't understand. Turn the name into something that icacls if we get a name that ends with a dollar.
Python
apache-2.0
atrick/swift,hooman/swift,harlanhaskins/swift,shahmishal/swift,stephentyrone/swift,jmgc/swift,devincoughlin/swift,ahoppen/swift,tkremenek/swift,xedin/swift,shahmishal/swift,xwu/swift,xedin/swift,harlanhaskins/swift,harlanhaskins/swift,sschiau/swift,shajrawi/swift,karwa/swift,gribozavr/swift,apple/swift,CodaFi/swift,ahoppen/swift,lorentey/swift,nathawes/swift,JGiola/swift,allevato/swift,airspeedswift/swift,harlanhaskins/swift,hooman/swift,karwa/swift,rudkx/swift,CodaFi/swift,gregomni/swift,lorentey/swift,sschiau/swift,shajrawi/swift,karwa/swift,parkera/swift,tkremenek/swift,sschiau/swift,devincoughlin/swift,xedin/swift,aschwaighofer/swift,airspeedswift/swift,jmgc/swift,nathawes/swift,lorentey/swift,tkremenek/swift,allevato/swift,jmgc/swift,xwu/swift,JGiola/swift,ahoppen/swift,shahmishal/swift,tkremenek/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,hooman/swift,atrick/swift,gribozavr/swift,gregomni/swift,glessard/swift,xedin/swift,apple/swift,jckarter/swift,gregomni/swift,karwa/swift,benlangmuir/swift,sschiau/swift,xedin/swift,stephentyrone/swift,aschwaighofer/swift,jckarter/swift,CodaFi/swift,lorentey/swift,CodaFi/swift,lorentey/swift,harlanhaskins/swift,tkremenek/swift,karwa/swift,gribozavr/swift,nathawes/swift,gregomni/swift,tkremenek/swift,JGiola/swift,nathawes/swift,JGiola/swift,parkera/swift,gregomni/swift,aschwaighofer/swift,airspeedswift/swift,CodaFi/swift,karwa/swift,apple/swift,shajrawi/swift,atrick/swift,stephentyrone/swift,hooman/swift,apple/swift,xwu/swift,parkera/swift,CodaFi/swift,glessard/swift,devincoughlin/swift,glessard/swift,hooman/swift,rudkx/swift,ahoppen/swift,harlanhaskins/swift,sschiau/swift,hooman/swift,allevato/swift,shajrawi/swift,ahoppen/swift,allevato/swift,devincoughlin/swift,shajrawi/swift,devincoughlin/swift,nathawes/swift,JGiola/swift,rudkx/swift,devincoughlin/swift,benlangmuir/swift,parkera/swift,roambotics/swift,rudkx/swift,jmgc/swift,xwu/swift,xedin/swift,roambotics/swift,aschwaighofer/swift,jmgc/swift,airspeedswift/swift,shahmishal/swift,stephentyrone/swift,gribozavr/swift,karwa/swift,devincoughlin/swift,gribozavr/swift,nathawes/swift,roambotics/swift,benlangmuir/swift,rudkx/swift,shahmishal/swift,xwu/swift,glessard/swift,karwa/swift,aschwaighofer/swift,allevato/swift,parkera/swift,glessard/swift,atrick/swift,tkremenek/swift,gribozavr/swift,sschiau/swift,jmgc/swift,benlangmuir/swift,jckarter/swift,jckarter/swift,shahmishal/swift,allevato/swift,gregomni/swift,airspeedswift/swift,parkera/swift,shahmishal/swift,gribozavr/swift,sschiau/swift,jckarter/swift,JGiola/swift,harlanhaskins/swift,xedin/swift,allevato/swift,jckarter/swift,sschiau/swift,nathawes/swift,airspeedswift/swift,xedin/swift,lorentey/swift,glessard/swift,devincoughlin/swift,atrick/swift,lorentey/swift,aschwaighofer/swift,CodaFi/swift,gribozavr/swift,roambotics/swift,shajrawi/swift,rudkx/swift,airspeedswift/swift,lorentey/swift,stephentyrone/swift,apple/swift,aschwaighofer/swift,xwu/swift,xwu/swift,benlangmuir/swift,ahoppen/swift,atrick/swift,jckarter/swift,parkera/swift,parkera/swift,shajrawi/swift,hooman/swift,shahmishal/swift,stephentyrone/swift,shajrawi/swift,jmgc/swift,stephentyrone/swift,apple/swift
import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) + # For NetworkService, Host$ is returned, so we choose have to turn it back + # into something that icacls understands. + if not buffer.value.endswith('$'): + user_name = buffer.value + else: + user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', - '{}:(R)'.format(buffer.value)]) + '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
Fix handling of Network Service username.
## Code Before: import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(buffer.value)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path]) ## Instruction: Fix handling of Network Service username. ## Code After: import platform import subprocess import sys if platform.system() == 'Windows': import ctypes AdvAPI32 = ctypes.windll.Advapi32 from ctypes.wintypes import POINTER UNLEN = 256 GetUserNameW = AdvAPI32.GetUserNameW GetUserNameW.argtypes = ( ctypes.c_wchar_p, # _In_Out_ lpBuffer POINTER(ctypes.c_uint) # _In_out_ pcBuffer ) GetUserNameW.restype = ctypes.c_uint buffer = ctypes.create_unicode_buffer(UNLEN + 1) size = ctypes.c_uint(len(buffer)) GetUserNameW(buffer, ctypes.byref(size)) # For NetworkService, Host$ is returned, so we choose have to turn it back # into something that icacls understands. if not buffer.value.endswith('$'): user_name = buffer.value else: user_name = 'NT AUTHORITY\\NetworkService' for path in sys.argv[1:]: subprocess.call(['icacls', path, '/deny', '{}:(R)'.format(user_name)]) else: for path in sys.argv[1:]: subprocess.call(['chmod', 'a-r', path])
794a75ed410fe39ba2376ebcab75d21cc5e9fee0
common/safeprint.py
common/safeprint.py
import multiprocessing, sys, datetime print_lock = multiprocessing.Lock() def safeprint(content): with print_lock: sys.stdout.write(("[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n'))
import multiprocessing, sys, datetime print_lock = multiprocessing.RLock() def safeprint(content): string = "[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n' with print_lock: sys.stdout.write(string)
Reduce the amount of time locking
Reduce the amount of time locking
Python
mit
gappleto97/Senior-Project
import multiprocessing, sys, datetime - print_lock = multiprocessing.Lock() + print_lock = multiprocessing.RLock() def safeprint(content): + string = "[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n' with print_lock: - sys.stdout.write(("[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n')) + sys.stdout.write(string)
Reduce the amount of time locking
## Code Before: import multiprocessing, sys, datetime print_lock = multiprocessing.Lock() def safeprint(content): with print_lock: sys.stdout.write(("[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n')) ## Instruction: Reduce the amount of time locking ## Code After: import multiprocessing, sys, datetime print_lock = multiprocessing.RLock() def safeprint(content): string = "[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n' with print_lock: sys.stdout.write(string)
b79ed827f7211efbcdef95286bf2d4113d6e8b88
posts/views.py
posts/views.py
from django.shortcuts import get_object_or_404 from django.views.generic.dates import ArchiveIndexView from django.views.generic.edit import FormView from .models import Entry, Category from .forms import ContactForm class CategoryView(ArchiveIndexView): model = Entry date_field = 'date' paginate_by = 20 template_name = 'posts/entry_category.html' def get(self, request, slug, **kwargs): self.kwargs['category'] = get_object_or_404(Category, slug=slug) return super().get(request, kwargs) def get_queryset(self): return Entry.objects.filter(category=self.kwargs['category']) def get_context_data(self, **kwargs): result = super().get_context_data(**kwargs) result['category'] = self.kwargs['category'] return result class ContactView(FormView): template_name = 'contact.html' form_class = ContactForm success_url = '/kontakt/' def form_valid(self, form): # This method is called when valid form data has been POSTed. # It should return an HttpResponse. form.send_email() return super().form_valid(form)
from django.shortcuts import get_object_or_404 from django.views.generic.dates import ArchiveIndexView from django.views.generic.edit import FormView from .models import Entry, Category from .forms import ContactForm class CategoryView(ArchiveIndexView): model = Entry date_field = 'date' paginate_by = 20 template_name = 'posts/entry_category.html' def get(self, request, slug, **kwargs): self.kwargs['category'] = get_object_or_404(Category, slug=slug) return super().get(request, kwargs) def get_queryset(self): return super().get_queryset().filter(category=self.kwargs['category']) def get_context_data(self, **kwargs): result = super().get_context_data(**kwargs) result['category'] = self.kwargs['category'] return result class ContactView(FormView): template_name = 'contact.html' form_class = ContactForm success_url = '/kontakt/' def form_valid(self, form): # This method is called when valid form data has been POSTed. # It should return an HttpResponse. form.send_email() return super().form_valid(form)
Fix ordering of category view
Fix ordering of category view Signed-off-by: Michal Čihař <a2df1e659c9fd2578de0a26565357cb273292eeb@cihar.com>
Python
agpl-3.0
nijel/photoblog,nijel/photoblog
from django.shortcuts import get_object_or_404 from django.views.generic.dates import ArchiveIndexView from django.views.generic.edit import FormView from .models import Entry, Category from .forms import ContactForm class CategoryView(ArchiveIndexView): model = Entry date_field = 'date' paginate_by = 20 template_name = 'posts/entry_category.html' def get(self, request, slug, **kwargs): self.kwargs['category'] = get_object_or_404(Category, slug=slug) return super().get(request, kwargs) def get_queryset(self): - return Entry.objects.filter(category=self.kwargs['category']) + return super().get_queryset().filter(category=self.kwargs['category']) def get_context_data(self, **kwargs): result = super().get_context_data(**kwargs) result['category'] = self.kwargs['category'] return result class ContactView(FormView): template_name = 'contact.html' form_class = ContactForm success_url = '/kontakt/' def form_valid(self, form): # This method is called when valid form data has been POSTed. # It should return an HttpResponse. form.send_email() return super().form_valid(form)
Fix ordering of category view
## Code Before: from django.shortcuts import get_object_or_404 from django.views.generic.dates import ArchiveIndexView from django.views.generic.edit import FormView from .models import Entry, Category from .forms import ContactForm class CategoryView(ArchiveIndexView): model = Entry date_field = 'date' paginate_by = 20 template_name = 'posts/entry_category.html' def get(self, request, slug, **kwargs): self.kwargs['category'] = get_object_or_404(Category, slug=slug) return super().get(request, kwargs) def get_queryset(self): return Entry.objects.filter(category=self.kwargs['category']) def get_context_data(self, **kwargs): result = super().get_context_data(**kwargs) result['category'] = self.kwargs['category'] return result class ContactView(FormView): template_name = 'contact.html' form_class = ContactForm success_url = '/kontakt/' def form_valid(self, form): # This method is called when valid form data has been POSTed. # It should return an HttpResponse. form.send_email() return super().form_valid(form) ## Instruction: Fix ordering of category view ## Code After: from django.shortcuts import get_object_or_404 from django.views.generic.dates import ArchiveIndexView from django.views.generic.edit import FormView from .models import Entry, Category from .forms import ContactForm class CategoryView(ArchiveIndexView): model = Entry date_field = 'date' paginate_by = 20 template_name = 'posts/entry_category.html' def get(self, request, slug, **kwargs): self.kwargs['category'] = get_object_or_404(Category, slug=slug) return super().get(request, kwargs) def get_queryset(self): return super().get_queryset().filter(category=self.kwargs['category']) def get_context_data(self, **kwargs): result = super().get_context_data(**kwargs) result['category'] = self.kwargs['category'] return result class ContactView(FormView): template_name = 'contact.html' form_class = ContactForm success_url = '/kontakt/' def form_valid(self, form): # This method is called when valid form data has been POSTed. # It should return an HttpResponse. form.send_email() return super().form_valid(form)
3838e44a397fdb4b605ead875b7c6ebc5787644d
jal_stats/stats/serializers.py
jal_stats/stats/serializers.py
from rest_framework import serializers from .models import Activity, Datapoint class ActivitySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Activity fields = ('user', 'full_description', 'units', 'url') class DatapointSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Datapoint fields = ('user', 'activity', 'reps', 'timestamp', 'url')
from rest_framework import serializers from .models import Activity, Datapoint class ActivitySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Activity fields = ('id', 'user', 'full_description', 'units', 'url') class DatapointSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Datapoint fields = ('id', 'user', 'activity', 'reps', 'timestamp', 'url')
Add 'id' to both Serializers
Add 'id' to both Serializers
Python
mit
jal-stats/django
from rest_framework import serializers from .models import Activity, Datapoint class ActivitySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Activity - fields = ('user', 'full_description', 'units', 'url') + fields = ('id', 'user', 'full_description', 'units', 'url') class DatapointSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Datapoint - fields = ('user', 'activity', 'reps', 'timestamp', 'url') + fields = ('id', 'user', 'activity', 'reps', 'timestamp', 'url')
Add 'id' to both Serializers
## Code Before: from rest_framework import serializers from .models import Activity, Datapoint class ActivitySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Activity fields = ('user', 'full_description', 'units', 'url') class DatapointSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Datapoint fields = ('user', 'activity', 'reps', 'timestamp', 'url') ## Instruction: Add 'id' to both Serializers ## Code After: from rest_framework import serializers from .models import Activity, Datapoint class ActivitySerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Activity fields = ('id', 'user', 'full_description', 'units', 'url') class DatapointSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Datapoint fields = ('id', 'user', 'activity', 'reps', 'timestamp', 'url')
271b4cd3795cbe0e5e013ac53c3ea26ca08e7a1a
IPython/utils/importstring.py
IPython/utils/importstring.py
#----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Functions and classes #----------------------------------------------------------------------------- def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" % name) ## exec 'temp = %s' % obj ## return temp if package: module = __import__(package,fromlist=[obj]) return module.__dict__[obj] else: return __import__(obj)
#----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Functions and classes #----------------------------------------------------------------------------- def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" % name) ## exec 'temp = %s' % obj ## return temp if package: module = __import__(package,fromlist=[obj]) try: pak = module.__dict__[obj] except KeyError: raise ImportError('No module named %s' % obj) return pak else: return __import__(obj)
Fix error in test suite startup with dotted import names.
Fix error in test suite startup with dotted import names. Detected first on ubuntu 12.04, but the bug is generic, we just hadn't seen it before. Will push straight to master as this will begin causing problems as more people upgrade.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
#----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Functions and classes #----------------------------------------------------------------------------- def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" % name) ## exec 'temp = %s' % obj ## return temp if package: module = __import__(package,fromlist=[obj]) + try: - return module.__dict__[obj] + pak = module.__dict__[obj] + except KeyError: + raise ImportError('No module named %s' % obj) + return pak else: return __import__(obj)
Fix error in test suite startup with dotted import names.
## Code Before: #----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Functions and classes #----------------------------------------------------------------------------- def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" % name) ## exec 'temp = %s' % obj ## return temp if package: module = __import__(package,fromlist=[obj]) return module.__dict__[obj] else: return __import__(obj) ## Instruction: Fix error in test suite startup with dotted import names. ## Code After: #----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Functions and classes #----------------------------------------------------------------------------- def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" % name) ## exec 'temp = %s' % obj ## return temp if package: module = __import__(package,fromlist=[obj]) try: pak = module.__dict__[obj] except KeyError: raise ImportError('No module named %s' % obj) return pak else: return __import__(obj)
1c3c092afae1946e72a87cca8792bd012bee23e4
ktbs_bench/utils/decorators.py
ktbs_bench/utils/decorators.py
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) @wraps(f) def wrapped(*args, **kwargs): timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_signature(f, *args, **kwargs), timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Decorator to time a function. Parameters ---------- f : function The function to time. Returns ------- call_signature : str The signature of the function call, with parameter names and values. time : float The real time taken to execute the function, in second. Examples -------- >>> @bench ... def square_list(numbers): ... for ind_num in range(len(numbers)): ... numbers[ind_num] *= numbers[ind_num] ... return numbers >>> call_sig, time = square_list(range(10)) >>> call_sig 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s True """ @wraps(f) def wrapped(*args, **kwargs): """Actual benchmark takes place here.""" call_sig = call_signature(f, *args, **kwargs) timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_sig, timer.get_times()['real']] return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call. Parameters ---------- f : function The function to get the call signature from. args : list List of arguments. kwargs : dict Dictionary of argument names and values. Returns ------- out : str String representation of the function call Examples -------- >>> def square(num): ... return num*num >>> call_signature(square, 4) 'num=4' """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
Add docstrings and fix call of call_signature.
Add docstrings and fix call of call_signature. For the fix: call_signature has been moved before executing the actual call, if the call is made before then it might change arguments if they are references.
Python
mit
ktbs/ktbs-bench,ktbs/ktbs-bench
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): - """Times a function given specific arguments.""" + """Decorator to time a function. - # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) + Parameters + ---------- + f : function + The function to time. + + Returns + ------- + call_signature : str + The signature of the function call, with parameter names and values. + time : float + The real time taken to execute the function, in second. + + Examples + -------- + >>> @bench + ... def square_list(numbers): + ... for ind_num in range(len(numbers)): + ... numbers[ind_num] *= numbers[ind_num] + ... return numbers + >>> call_sig, time = square_list(range(10)) + >>> call_sig + 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' + >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s + True + """ + @wraps(f) def wrapped(*args, **kwargs): + """Actual benchmark takes place here.""" + call_sig = call_signature(f, *args, **kwargs) + timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() + res = [call_sig, timer.get_times()['real']] - res = [call_signature(f, *args, **kwargs), - timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): - """Return a string representation of a function call.""" + """Return a string representation of a function call. + + Parameters + ---------- + f : function + The function to get the call signature from. + args : list + List of arguments. + kwargs : dict + Dictionary of argument names and values. + + Returns + ------- + out : str + String representation of the function call + + Examples + -------- + >>> def square(num): + ... return num*num + >>> call_signature(square, 4) + 'num=4' + """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
Add docstrings and fix call of call_signature.
## Code Before: from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" # TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func) @wraps(f) def wrapped(*args, **kwargs): timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_signature(f, *args, **kwargs), timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()]) ## Instruction: Add docstrings and fix call of call_signature. ## Code After: from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Decorator to time a function. Parameters ---------- f : function The function to time. Returns ------- call_signature : str The signature of the function call, with parameter names and values. time : float The real time taken to execute the function, in second. Examples -------- >>> @bench ... def square_list(numbers): ... for ind_num in range(len(numbers)): ... numbers[ind_num] *= numbers[ind_num] ... return numbers >>> call_sig, time = square_list(range(10)) >>> call_sig 'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]' >>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s True """ @wraps(f) def wrapped(*args, **kwargs): """Actual benchmark takes place here.""" call_sig = call_signature(f, *args, **kwargs) timer = Timer(tick_now=False) timer.start() f(*args, **kwargs) timer.stop() res = [call_sig, timer.get_times()['real']] return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call. Parameters ---------- f : function The function to get the call signature from. args : list List of arguments. kwargs : dict Dictionary of argument names and values. Returns ------- out : str String representation of the function call Examples -------- >>> def square(num): ... return num*num >>> call_signature(square, 4) 'num=4' """ call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
15be3bd492a0808713c6ae6981ecb99acacd5297
allauth/socialaccount/providers/trello/provider.py
allauth/socialaccount/providers/trello/provider.py
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
Python
mit
AltSchool/django-allauth,AltSchool/django-allauth,AltSchool/django-allauth
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name + data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
## Code Before: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider] ## Instruction: Use 'scope' in TrelloProvider auth params. Allows overriding from django settings. ## Code After: from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
a35b6e46bd9d443f07391f37f5e0e384e37608bb
nbgrader/tests/test_nbgrader_feedback.py
nbgrader/tests/test_nbgrader_feedback.py
from .base import TestBase from nbgrader.api import Gradebook import os class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("Problem Set 1") gb.add_student("foo") gb.add_student("bar") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() self._run_command( 'nbgrader autograde submitted-unchanged.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) self._run_command( 'nbgrader feedback submitted-unchanged.nbconvert.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
from .base import TestBase from nbgrader.api import Gradebook import os import shutil class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("ps1") gb.add_student("foo") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() os.makedirs('source/ps1') shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) os.makedirs('submitted/foo/ps1') shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) assert os.path.exists('feedback/foo/ps1/p1.html')
Update tests for nbgrader feedback
Update tests for nbgrader feedback
Python
bsd-3-clause
jhamrick/nbgrader,alope107/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,alope107/nbgrader,jdfreder/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader
from .base import TestBase from nbgrader.api import Gradebook import os + import shutil class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) - gb.add_assignment("Problem Set 1") + gb.add_assignment("ps1") gb.add_student("foo") - gb.add_student("bar") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() - self._run_command( - 'nbgrader autograde submitted-unchanged.ipynb ' - '--db="{}" ' - '--assignment="Problem Set 1" ' - '--AssignmentExporter.notebook_id=teacher ' - '--student=foo'.format(dbpath)) + os.makedirs('source/ps1') + shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') + self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) - self._run_command( - 'nbgrader feedback submitted-unchanged.nbconvert.ipynb ' - '--db="{}" ' - '--assignment="Problem Set 1" ' - '--AssignmentExporter.notebook_id=teacher ' - '--student=foo'.format(dbpath)) - assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html') + os.makedirs('submitted/foo/ps1') + shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') + self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) + self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) + assert os.path.exists('feedback/foo/ps1/p1.html') +
Update tests for nbgrader feedback
## Code Before: from .base import TestBase from nbgrader.api import Gradebook import os class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("Problem Set 1") gb.add_student("foo") gb.add_student("bar") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() self._run_command( 'nbgrader autograde submitted-unchanged.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) self._run_command( 'nbgrader feedback submitted-unchanged.nbconvert.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html') ## Instruction: Update tests for nbgrader feedback ## Code After: from .base import TestBase from nbgrader.api import Gradebook import os import shutil class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("ps1") gb.add_student("foo") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() os.makedirs('source/ps1') shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) os.makedirs('submitted/foo/ps1') shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) assert os.path.exists('feedback/foo/ps1/p1.html')
0749c47bb280230ae5b1e2cda23773d3b10b2491
redis_check.py
redis_check.py
import sys import redis import redis.exceptions host = sys.argv[1] host = host.strip('\r\n') port = 6379 timeout = 5 try: db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout) i = db.info() ver = i.get('redis_version') siz = db.dbsize() print('[+] {0}:{1} - {2}({3})'.format(host, port, ver, siz)) except redis.exceptions.ResponseError as e: print('[+] {0}:{1} - {2}'.format(host, port, e)) except redis.exceptions.ConnectionError: print('[-] {0}:{1} - Connection Error'.format(host, port)) except redis.exceptions.TimeoutError: print('[-] {0}:{1} - Timeout'.format(host, port)) except redis.exceptions.InvalidResponse: print('[-] {0}:{1} - Invalid Response'.format(host, port))
import sys import redis import redis.exceptions host = sys.argv[1] host = host.strip('\r\n') port = 6379 timeout = 5 try: db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout) i = db.info() ver = i.get('redis_version') siz = db.dbsize() print('[+] {0}:{1}:{2}'.format(host, ver, siz)) except redis.exceptions.ResponseError as e: print('[+] {0}::{1}'.format(host, e)) except redis.exceptions.ConnectionError: print('[-] {0}::Connection Error'.format(host)) except redis.exceptions.TimeoutError: print('[-] {0}::Timeout'.format(host)) except redis.exceptions.InvalidResponse: print('[-] {0}::Invalid Response'.format(host))
Make output easier to parse with cli tools.
Make output easier to parse with cli tools.
Python
bsd-3-clause
averagesecurityguy/research
import sys import redis import redis.exceptions host = sys.argv[1] host = host.strip('\r\n') port = 6379 timeout = 5 try: db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout) i = db.info() ver = i.get('redis_version') siz = db.dbsize() - print('[+] {0}:{1} - {2}({3})'.format(host, port, ver, siz)) + print('[+] {0}:{1}:{2}'.format(host, ver, siz)) except redis.exceptions.ResponseError as e: - print('[+] {0}:{1} - {2}'.format(host, port, e)) + print('[+] {0}::{1}'.format(host, e)) except redis.exceptions.ConnectionError: - print('[-] {0}:{1} - Connection Error'.format(host, port)) + print('[-] {0}::Connection Error'.format(host)) except redis.exceptions.TimeoutError: - print('[-] {0}:{1} - Timeout'.format(host, port)) + print('[-] {0}::Timeout'.format(host)) except redis.exceptions.InvalidResponse: - print('[-] {0}:{1} - Invalid Response'.format(host, port)) + print('[-] {0}::Invalid Response'.format(host))
Make output easier to parse with cli tools.
## Code Before: import sys import redis import redis.exceptions host = sys.argv[1] host = host.strip('\r\n') port = 6379 timeout = 5 try: db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout) i = db.info() ver = i.get('redis_version') siz = db.dbsize() print('[+] {0}:{1} - {2}({3})'.format(host, port, ver, siz)) except redis.exceptions.ResponseError as e: print('[+] {0}:{1} - {2}'.format(host, port, e)) except redis.exceptions.ConnectionError: print('[-] {0}:{1} - Connection Error'.format(host, port)) except redis.exceptions.TimeoutError: print('[-] {0}:{1} - Timeout'.format(host, port)) except redis.exceptions.InvalidResponse: print('[-] {0}:{1} - Invalid Response'.format(host, port)) ## Instruction: Make output easier to parse with cli tools. ## Code After: import sys import redis import redis.exceptions host = sys.argv[1] host = host.strip('\r\n') port = 6379 timeout = 5 try: db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout) i = db.info() ver = i.get('redis_version') siz = db.dbsize() print('[+] {0}:{1}:{2}'.format(host, ver, siz)) except redis.exceptions.ResponseError as e: print('[+] {0}::{1}'.format(host, e)) except redis.exceptions.ConnectionError: print('[-] {0}::Connection Error'.format(host)) except redis.exceptions.TimeoutError: print('[-] {0}::Timeout'.format(host)) except redis.exceptions.InvalidResponse: print('[-] {0}::Invalid Response'.format(host))
8e53b65b5f28a02f8ee980b9f53a57e7cdd077bd
main.py
main.py
import places from character import Character import actions import options from multiple_choice import MultipleChoice def combat(character): """ takes in a character, returns outcome of fight """ return actions.Attack(character.person).get_outcome(character) def main(): """ The goal is to have the main function operate as follows: Set up the initial state Display the initial message Display the initial options Choose an action Get an outcome Display results of the outcomes Outcome changes game state """ character = Character() character.place = places.tavern choices = MultipleChoice() options.set_initial_actions(choices) print("\n---The St. George Game---\n") print("You are in a tavern. The local assassins hate you.") while character.alive and character.alone and not character.lose: action = choices.choose_action() if not character.threatened or action.combat_action: outcome = action.get_outcome(character) else: outcome = combat(character) if not character.alive: break outcome.execute() options.add_actions(choices, character, outcome) choices.generate_actions(character) if __name__ == "__main__": main()
import places from character import Character import actions import options from multiple_choice import MultipleChoice def main(): """ The goal is to have the main function operate as follows: Set up the initial state Display the initial message Display the initial options Choose an action Get an outcome Display results of the outcomes Outcome changes game state """ character = Character(place=places.tavern) choices = MultipleChoice() options.set_initial_actions(choices) print("\n---The St. George Game---\n") print("You are in a tavern. The local assassins hate you.") while character.alive and character.alone and not character.lose: action = choices.choose_action() if not character.threatened or action.combat_action: outcome = action.get_outcome(character) else: outcome = actions.Attack(character.person).get_outcome(character) outcome.execute() options.add_actions(choices, character, outcome) choices.generate_actions(character) if __name__ == "__main__": main()
Refactor combat code to be more concise
Refactor combat code to be more concise
Python
apache-2.0
SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame
import places from character import Character import actions import options from multiple_choice import MultipleChoice - - - def combat(character): - """ - takes in a character, returns outcome of fight - """ - return actions.Attack(character.person).get_outcome(character) def main(): """ The goal is to have the main function operate as follows: Set up the initial state Display the initial message Display the initial options Choose an action Get an outcome Display results of the outcomes Outcome changes game state """ - character = Character() - character.place = places.tavern + character = Character(place=places.tavern) choices = MultipleChoice() options.set_initial_actions(choices) print("\n---The St. George Game---\n") print("You are in a tavern. The local assassins hate you.") while character.alive and character.alone and not character.lose: action = choices.choose_action() if not character.threatened or action.combat_action: outcome = action.get_outcome(character) else: + outcome = actions.Attack(character.person).get_outcome(character) - outcome = combat(character) - if not character.alive: - break outcome.execute() options.add_actions(choices, character, outcome) choices.generate_actions(character) if __name__ == "__main__": main()
Refactor combat code to be more concise
## Code Before: import places from character import Character import actions import options from multiple_choice import MultipleChoice def combat(character): """ takes in a character, returns outcome of fight """ return actions.Attack(character.person).get_outcome(character) def main(): """ The goal is to have the main function operate as follows: Set up the initial state Display the initial message Display the initial options Choose an action Get an outcome Display results of the outcomes Outcome changes game state """ character = Character() character.place = places.tavern choices = MultipleChoice() options.set_initial_actions(choices) print("\n---The St. George Game---\n") print("You are in a tavern. The local assassins hate you.") while character.alive and character.alone and not character.lose: action = choices.choose_action() if not character.threatened or action.combat_action: outcome = action.get_outcome(character) else: outcome = combat(character) if not character.alive: break outcome.execute() options.add_actions(choices, character, outcome) choices.generate_actions(character) if __name__ == "__main__": main() ## Instruction: Refactor combat code to be more concise ## Code After: import places from character import Character import actions import options from multiple_choice import MultipleChoice def main(): """ The goal is to have the main function operate as follows: Set up the initial state Display the initial message Display the initial options Choose an action Get an outcome Display results of the outcomes Outcome changes game state """ character = Character(place=places.tavern) choices = MultipleChoice() options.set_initial_actions(choices) print("\n---The St. George Game---\n") print("You are in a tavern. The local assassins hate you.") while character.alive and character.alone and not character.lose: action = choices.choose_action() if not character.threatened or action.combat_action: outcome = action.get_outcome(character) else: outcome = actions.Attack(character.person).get_outcome(character) outcome.execute() options.add_actions(choices, character, outcome) choices.generate_actions(character) if __name__ == "__main__": main()
75d6e88de0ed8f8cb081de15ce0d3949a78c9ded
efselab/build.py
efselab/build.py
from distutils.core import setup, Extension MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"] for module in MODULES_TO_BUILD: setup( name=module, ext_modules=[ Extension( name=module, sources=['%s.c' % module], libraries=[], extra_compile_args=['-Wall', '-Wno-unused-function'], extra_link_args=[] ) ], script_args=['build_ext', '--inplace'] )
from distutils.core import setup, Extension MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"] def main(): for module in MODULES_TO_BUILD: setup( name=module, ext_modules=[ Extension( name=module, sources=['%s.c' % module], libraries=[], extra_compile_args=['-Wall', '-Wno-unused-function'], extra_link_args=[] ) ], script_args=['build_ext', '--inplace'] ) if __name__ == '__main__': main()
Put module in method to enable calls from libraries.
Put module in method to enable calls from libraries. Former-commit-id: e614cec07ee71723be5b114163fe835961f6811c
Python
mit
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
from distutils.core import setup, Extension MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"] + def main(): - for module in MODULES_TO_BUILD: + for module in MODULES_TO_BUILD: - setup( + setup( - name=module, + name=module, - ext_modules=[ + ext_modules=[ - Extension( + Extension( - name=module, + name=module, - sources=['%s.c' % module], + sources=['%s.c' % module], - libraries=[], + libraries=[], - extra_compile_args=['-Wall', '-Wno-unused-function'], + extra_compile_args=['-Wall', '-Wno-unused-function'], - extra_link_args=[] + extra_link_args=[] + ) - ) + ], - ], - script_args=['build_ext', '--inplace'] + script_args=['build_ext', '--inplace'] - ) + ) + if __name__ == '__main__': + main() +
Put module in method to enable calls from libraries.
## Code Before: from distutils.core import setup, Extension MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"] for module in MODULES_TO_BUILD: setup( name=module, ext_modules=[ Extension( name=module, sources=['%s.c' % module], libraries=[], extra_compile_args=['-Wall', '-Wno-unused-function'], extra_link_args=[] ) ], script_args=['build_ext', '--inplace'] ) ## Instruction: Put module in method to enable calls from libraries. ## Code After: from distutils.core import setup, Extension MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"] def main(): for module in MODULES_TO_BUILD: setup( name=module, ext_modules=[ Extension( name=module, sources=['%s.c' % module], libraries=[], extra_compile_args=['-Wall', '-Wno-unused-function'], extra_link_args=[] ) ], script_args=['build_ext', '--inplace'] ) if __name__ == '__main__': main()
967ec17d15f07191e6d42fc122eb5e731605ad67
git_code_debt/repo_parser.py
git_code_debt/repo_parser.py
import collections import contextlib import shutil import subprocess import tempfile from util.iter import chunk_iter Commit = collections.namedtuple('Commit', ['sha', 'date', 'name']) class RepoParser(object): def __init__(self, git_repo, ref): self.git_repo = git_repo self.ref = ref self.tempdir = None @contextlib.contextmanager def repo_checked_out(self): assert not self.tempdir self.tempdir = tempfile.mkdtemp(suffix='temp-repo') try: subprocess.call( ['git', 'clone', self.git_repo, self.tempdir], stdout=None, ) subprocess.call( ['git', 'checkout', self.ref], cwd=self.tempdir, stdout=None, ) yield finally: shutil.rmtree(self.tempdir) self.tempdir = None def get_commit_shas(self, since=None): """Returns a list of Commit objects. Args: since - (optional) A timestamp to look from. """ assert self.tempdir cmd = ['git', 'log', self.ref, '--topo-order', '--format=%H%n%at%n%cN'] if since: cmd += ['--after={0}'.format(since)] output = subprocess.check_output( cmd, cwd=self.tempdir, ) commits = [] for sha, date, name in chunk_iter(output.splitlines(), 3): commits.append(Commit(sha, int(date), name)) return commits
import collections import contextlib import shutil import subprocess import tempfile from util.iter import chunk_iter Commit = collections.namedtuple('Commit', ['sha', 'date', 'name']) class RepoParser(object): def __init__(self, git_repo): self.git_repo = git_repo self.tempdir = None @contextlib.contextmanager def repo_checked_out(self): assert not self.tempdir self.tempdir = tempfile.mkdtemp(suffix='temp-repo') try: subprocess.check_call( ['git', 'clone', self.git_repo, self.tempdir], stdout=None, ) yield finally: shutil.rmtree(self.tempdir) self.tempdir = None def get_commit_shas(self, since=None): """Returns a list of Commit objects. Args: since - (optional) A timestamp to look from. """ assert self.tempdir cmd = ['git', 'log', 'master', '--first-parent', '--format=%H%n%at%n%cN'] if since: cmd += ['--after={0}'.format(since)] output = subprocess.check_output( cmd, cwd=self.tempdir, ) commits = [] for sha, date, name in chunk_iter(output.splitlines(), 3): commits.append(Commit(sha, int(date), name)) return commits
Change sha fetching to use --parent-only and removed ref parameter
Change sha fetching to use --parent-only and removed ref parameter
Python
mit
ucarion/git-code-debt,Yelp/git-code-debt,ucarion/git-code-debt,ucarion/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt
import collections import contextlib import shutil import subprocess import tempfile from util.iter import chunk_iter Commit = collections.namedtuple('Commit', ['sha', 'date', 'name']) class RepoParser(object): - def __init__(self, git_repo, ref): + def __init__(self, git_repo): self.git_repo = git_repo - self.ref = ref self.tempdir = None @contextlib.contextmanager def repo_checked_out(self): assert not self.tempdir self.tempdir = tempfile.mkdtemp(suffix='temp-repo') try: - subprocess.call( + subprocess.check_call( ['git', 'clone', self.git_repo, self.tempdir], - stdout=None, - ) - subprocess.call( - ['git', 'checkout', self.ref], - cwd=self.tempdir, stdout=None, ) yield finally: shutil.rmtree(self.tempdir) self.tempdir = None def get_commit_shas(self, since=None): """Returns a list of Commit objects. Args: since - (optional) A timestamp to look from. """ assert self.tempdir - cmd = ['git', 'log', self.ref, '--topo-order', '--format=%H%n%at%n%cN'] + cmd = ['git', 'log', 'master', '--first-parent', '--format=%H%n%at%n%cN'] if since: cmd += ['--after={0}'.format(since)] output = subprocess.check_output( cmd, cwd=self.tempdir, ) commits = [] for sha, date, name in chunk_iter(output.splitlines(), 3): commits.append(Commit(sha, int(date), name)) return commits
Change sha fetching to use --parent-only and removed ref parameter
## Code Before: import collections import contextlib import shutil import subprocess import tempfile from util.iter import chunk_iter Commit = collections.namedtuple('Commit', ['sha', 'date', 'name']) class RepoParser(object): def __init__(self, git_repo, ref): self.git_repo = git_repo self.ref = ref self.tempdir = None @contextlib.contextmanager def repo_checked_out(self): assert not self.tempdir self.tempdir = tempfile.mkdtemp(suffix='temp-repo') try: subprocess.call( ['git', 'clone', self.git_repo, self.tempdir], stdout=None, ) subprocess.call( ['git', 'checkout', self.ref], cwd=self.tempdir, stdout=None, ) yield finally: shutil.rmtree(self.tempdir) self.tempdir = None def get_commit_shas(self, since=None): """Returns a list of Commit objects. Args: since - (optional) A timestamp to look from. """ assert self.tempdir cmd = ['git', 'log', self.ref, '--topo-order', '--format=%H%n%at%n%cN'] if since: cmd += ['--after={0}'.format(since)] output = subprocess.check_output( cmd, cwd=self.tempdir, ) commits = [] for sha, date, name in chunk_iter(output.splitlines(), 3): commits.append(Commit(sha, int(date), name)) return commits ## Instruction: Change sha fetching to use --parent-only and removed ref parameter ## Code After: import collections import contextlib import shutil import subprocess import tempfile from util.iter import chunk_iter Commit = collections.namedtuple('Commit', ['sha', 'date', 'name']) class RepoParser(object): def __init__(self, git_repo): self.git_repo = git_repo self.tempdir = None @contextlib.contextmanager def repo_checked_out(self): assert not self.tempdir self.tempdir = tempfile.mkdtemp(suffix='temp-repo') try: subprocess.check_call( ['git', 'clone', self.git_repo, self.tempdir], stdout=None, ) yield finally: shutil.rmtree(self.tempdir) self.tempdir = None def get_commit_shas(self, since=None): """Returns a list of Commit objects. Args: since - (optional) A timestamp to look from. """ assert self.tempdir cmd = ['git', 'log', 'master', '--first-parent', '--format=%H%n%at%n%cN'] if since: cmd += ['--after={0}'.format(since)] output = subprocess.check_output( cmd, cwd=self.tempdir, ) commits = [] for sha, date, name in chunk_iter(output.splitlines(), 3): commits.append(Commit(sha, int(date), name)) return commits
2c0ff93e3ef5e6914a85e4fc3443f0432337854e
text_processor.py
text_processor.py
from urllib.request import urlopen def fetch_words(): with urlopen('http://sixty-north.com/c/t.txt') as story: story_words = [] word_list = '' for line in story: line_words = line.decode('utf-8').split() for word in line_words: story_words.append(word) wordCursor = 0 print("Word Count", len(story_words)) while wordCursor < len(story_words): paragraphCursor = 0 while paragraphCursor < 6: if (wordCursor + paragraphCursor) == len(story_words): break word_list += story_words[wordCursor + paragraphCursor] + ' ' paragraphCursor += 1 wordCursor += paragraphCursor word_list += '\n' return word_list def print_words(word_list): print(word_list) if __name__ == '__main__': print(fetch_words())
from urllib.request import urlopen def fetch_words(): with urlopen('http://sixty-north.com/c/t.txt') as story: story_words = [] word_list = '' for line in story: line_words = line.decode('utf-8').split() for word in line_words: story_words.append(word) wordCursor = 0 print("Word Count", len(story_words)) while wordCursor < len(story_words): paragraphCursor = 0 while paragraphCursor < 6: if (wordCursor + paragraphCursor) == len(story_words): break word_list += story_words[wordCursor + paragraphCursor] + ' ' paragraphCursor += 1 wordCursor += paragraphCursor word_list += '\n' return word_list def print_words(word_list): print(word_list) def main(): print(fetch_words()) if __name__ == '__main__': main()
Move main execution to function
Move main execution to function
Python
mit
kentoj/python-fundamentals
from urllib.request import urlopen def fetch_words(): with urlopen('http://sixty-north.com/c/t.txt') as story: story_words = [] word_list = '' for line in story: line_words = line.decode('utf-8').split() for word in line_words: story_words.append(word) wordCursor = 0 print("Word Count", len(story_words)) while wordCursor < len(story_words): paragraphCursor = 0 while paragraphCursor < 6: if (wordCursor + paragraphCursor) == len(story_words): break word_list += story_words[wordCursor + paragraphCursor] + ' ' paragraphCursor += 1 wordCursor += paragraphCursor word_list += '\n' return word_list def print_words(word_list): print(word_list) - if __name__ == '__main__': + + def main(): print(fetch_words()) + if __name__ == '__main__': + main() +
Move main execution to function
## Code Before: from urllib.request import urlopen def fetch_words(): with urlopen('http://sixty-north.com/c/t.txt') as story: story_words = [] word_list = '' for line in story: line_words = line.decode('utf-8').split() for word in line_words: story_words.append(word) wordCursor = 0 print("Word Count", len(story_words)) while wordCursor < len(story_words): paragraphCursor = 0 while paragraphCursor < 6: if (wordCursor + paragraphCursor) == len(story_words): break word_list += story_words[wordCursor + paragraphCursor] + ' ' paragraphCursor += 1 wordCursor += paragraphCursor word_list += '\n' return word_list def print_words(word_list): print(word_list) if __name__ == '__main__': print(fetch_words()) ## Instruction: Move main execution to function ## Code After: from urllib.request import urlopen def fetch_words(): with urlopen('http://sixty-north.com/c/t.txt') as story: story_words = [] word_list = '' for line in story: line_words = line.decode('utf-8').split() for word in line_words: story_words.append(word) wordCursor = 0 print("Word Count", len(story_words)) while wordCursor < len(story_words): paragraphCursor = 0 while paragraphCursor < 6: if (wordCursor + paragraphCursor) == len(story_words): break word_list += story_words[wordCursor + paragraphCursor] + ' ' paragraphCursor += 1 wordCursor += paragraphCursor word_list += '\n' return word_list def print_words(word_list): print(word_list) def main(): print(fetch_words()) if __name__ == '__main__': main()
83ceca04758c6546c41d5bc7f96583d838f25e11
src/mmw/apps/user/backends.py
src/mmw/apps/user/backends.py
from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.backends import BaseBackend from django.contrib.auth.models import User from apps.user.models import ItsiUser, ConcordUser class SSOAuthenticationBackend(BaseBackend): """ A custom authentication back-end for Single Sign On providers. Before we can call django.contrib.auth.login on an SSO user, we must first authenticate them. This must be done using a custom authentication back- end, which sets the backend attribute on the user model. This class should be instantiated with an SSO provider user model, such as ItsiUser or ConcordUser, before it can be used. """ def __init__(self, model, field): self.SSOUserModel = model self.SSOField = field def authenticate(self, sso_id=None): if sso_id is not None: try: query = {self.SSOField: sso_id} user = self.SSOUserModel.objects.get(**query).user return user except ObjectDoesNotExist: return None return None def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class ItsiAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ItsiAuthenticationBackend, self).__init__( ItsiUser, 'itsi_id') class ConcordAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ConcordAuthenticationBackend, self).__init__( ConcordUser, 'concord_id')
from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.backends import BaseBackend from django.contrib.auth.models import User from apps.user.models import ItsiUser, ConcordUser class SSOAuthenticationBackend(BaseBackend): """ A custom authentication back-end for Single Sign On providers. Before we can call django.contrib.auth.login on an SSO user, we must first authenticate them. This must be done using a custom authentication back- end, which sets the backend attribute on the user model. This class should be instantiated with an SSO provider user model, such as ItsiUser or ConcordUser, before it can be used. """ def __init__(self, model, field): self.SSOUserModel = model self.SSOField = field def authenticate(self, request=None, sso_id=None): if sso_id is not None: try: query = {self.SSOField: sso_id} user = self.SSOUserModel.objects.get(**query).user return user except ObjectDoesNotExist: return None return None def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class ItsiAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ItsiAuthenticationBackend, self).__init__( ItsiUser, 'itsi_id') class ConcordAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ConcordAuthenticationBackend, self).__init__( ConcordUser, 'concord_id')
Add request parameter to backend.authenticate
Add request parameter to backend.authenticate Without this, the signature of our custom backend does not match that of the function call. This signature is tested in django.contrib.auth.authenticate here: https://github.com/django/django/blob/fdf209eab8949ddc345aa0212b349c79fc6fdebb/django/contrib/auth/__init__.py#L69 and `request` was added to that signature in Django 1.11 in https://github.com/django/django/commit/4b9330ccc04575f9e5126529ec355a450d12e77c. With this, the Concord users are authenticated correctly.
Python
apache-2.0
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.backends import BaseBackend from django.contrib.auth.models import User from apps.user.models import ItsiUser, ConcordUser class SSOAuthenticationBackend(BaseBackend): """ A custom authentication back-end for Single Sign On providers. Before we can call django.contrib.auth.login on an SSO user, we must first authenticate them. This must be done using a custom authentication back- end, which sets the backend attribute on the user model. This class should be instantiated with an SSO provider user model, such as ItsiUser or ConcordUser, before it can be used. """ def __init__(self, model, field): self.SSOUserModel = model self.SSOField = field - def authenticate(self, sso_id=None): + def authenticate(self, request=None, sso_id=None): if sso_id is not None: try: query = {self.SSOField: sso_id} user = self.SSOUserModel.objects.get(**query).user return user except ObjectDoesNotExist: return None return None def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class ItsiAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ItsiAuthenticationBackend, self).__init__( ItsiUser, 'itsi_id') class ConcordAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ConcordAuthenticationBackend, self).__init__( ConcordUser, 'concord_id')
Add request parameter to backend.authenticate
## Code Before: from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.backends import BaseBackend from django.contrib.auth.models import User from apps.user.models import ItsiUser, ConcordUser class SSOAuthenticationBackend(BaseBackend): """ A custom authentication back-end for Single Sign On providers. Before we can call django.contrib.auth.login on an SSO user, we must first authenticate them. This must be done using a custom authentication back- end, which sets the backend attribute on the user model. This class should be instantiated with an SSO provider user model, such as ItsiUser or ConcordUser, before it can be used. """ def __init__(self, model, field): self.SSOUserModel = model self.SSOField = field def authenticate(self, sso_id=None): if sso_id is not None: try: query = {self.SSOField: sso_id} user = self.SSOUserModel.objects.get(**query).user return user except ObjectDoesNotExist: return None return None def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class ItsiAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ItsiAuthenticationBackend, self).__init__( ItsiUser, 'itsi_id') class ConcordAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ConcordAuthenticationBackend, self).__init__( ConcordUser, 'concord_id') ## Instruction: Add request parameter to backend.authenticate ## Code After: from django.core.exceptions import ObjectDoesNotExist from django.contrib.auth.backends import BaseBackend from django.contrib.auth.models import User from apps.user.models import ItsiUser, ConcordUser class SSOAuthenticationBackend(BaseBackend): """ A custom authentication back-end for Single Sign On providers. Before we can call django.contrib.auth.login on an SSO user, we must first authenticate them. This must be done using a custom authentication back- end, which sets the backend attribute on the user model. This class should be instantiated with an SSO provider user model, such as ItsiUser or ConcordUser, before it can be used. """ def __init__(self, model, field): self.SSOUserModel = model self.SSOField = field def authenticate(self, request=None, sso_id=None): if sso_id is not None: try: query = {self.SSOField: sso_id} user = self.SSOUserModel.objects.get(**query).user return user except ObjectDoesNotExist: return None return None def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class ItsiAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ItsiAuthenticationBackend, self).__init__( ItsiUser, 'itsi_id') class ConcordAuthenticationBackend(SSOAuthenticationBackend): def __init__(self): super(ConcordAuthenticationBackend, self).__init__( ConcordUser, 'concord_id')
badba5070ac40a70de2be47b6d58afd0364ed7fe
staticassets/views.py
staticassets/views.py
import mimetypes from django.http import HttpResponse, HttpResponseNotModified, Http404 from django.contrib.staticfiles.views import serve as staticfiles_serve from django.views.static import was_modified_since, http_date from staticassets import finder, settings def serve(request, path, **kwargs): mimetype, encoding = mimetypes.guess_type(path) if not mimetype in settings.MIMETYPES.values(): return staticfiles_serve(request, path, **kwargs) bundle = request.GET.get('bundle') in ('1', 't', 'true') asset = finder.find(path, bundle=bundle) if not asset: raise Http404("Static asset not found") # Respect the If-Modified-Since header. modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if not was_modified_since(modified_since, asset.mtime, asset.size): return HttpResponseNotModified(mimetype=asset.attributes.content_type) response = HttpResponse(asset.content, mimetype=asset.attributes.content_type) response['Last-Modified'] = http_date(asset.mtime) response['Content-Length'] = asset.size return response
import mimetypes from django.http import HttpResponse, HttpResponseNotModified, Http404 from django.contrib.staticfiles.views import serve as staticfiles_serve from django.views.static import was_modified_since, http_date from staticassets import finder, settings def serve(request, path, **kwargs): mimetype, encoding = mimetypes.guess_type(path) if not mimetype in settings.MIMETYPES.values(): return staticfiles_serve(request, path, **kwargs) bundle = request.GET.get('bundle') in ('1', 't', 'true') asset = finder.find(path, bundle=bundle) if not asset: raise Http404("Static asset not found") # Respect the If-Modified-Since header. modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if not was_modified_since(modified_since, asset.mtime, asset.size): return HttpResponseNotModified(content_type=asset.attributes.content_type) response = HttpResponse(asset.content, content_type=asset.attributes.content_type) response['Last-Modified'] = http_date(asset.mtime) response['Content-Length'] = asset.size return response
Use correct argument for content type in serve view
Use correct argument for content type in serve view
Python
mit
davidelias/django-staticassets,davidelias/django-staticassets,davidelias/django-staticassets
import mimetypes from django.http import HttpResponse, HttpResponseNotModified, Http404 from django.contrib.staticfiles.views import serve as staticfiles_serve from django.views.static import was_modified_since, http_date from staticassets import finder, settings def serve(request, path, **kwargs): mimetype, encoding = mimetypes.guess_type(path) if not mimetype in settings.MIMETYPES.values(): return staticfiles_serve(request, path, **kwargs) bundle = request.GET.get('bundle') in ('1', 't', 'true') asset = finder.find(path, bundle=bundle) if not asset: raise Http404("Static asset not found") # Respect the If-Modified-Since header. modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if not was_modified_since(modified_since, asset.mtime, asset.size): - return HttpResponseNotModified(mimetype=asset.attributes.content_type) + return HttpResponseNotModified(content_type=asset.attributes.content_type) - response = HttpResponse(asset.content, mimetype=asset.attributes.content_type) + response = HttpResponse(asset.content, content_type=asset.attributes.content_type) response['Last-Modified'] = http_date(asset.mtime) response['Content-Length'] = asset.size return response
Use correct argument for content type in serve view
## Code Before: import mimetypes from django.http import HttpResponse, HttpResponseNotModified, Http404 from django.contrib.staticfiles.views import serve as staticfiles_serve from django.views.static import was_modified_since, http_date from staticassets import finder, settings def serve(request, path, **kwargs): mimetype, encoding = mimetypes.guess_type(path) if not mimetype in settings.MIMETYPES.values(): return staticfiles_serve(request, path, **kwargs) bundle = request.GET.get('bundle') in ('1', 't', 'true') asset = finder.find(path, bundle=bundle) if not asset: raise Http404("Static asset not found") # Respect the If-Modified-Since header. modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if not was_modified_since(modified_since, asset.mtime, asset.size): return HttpResponseNotModified(mimetype=asset.attributes.content_type) response = HttpResponse(asset.content, mimetype=asset.attributes.content_type) response['Last-Modified'] = http_date(asset.mtime) response['Content-Length'] = asset.size return response ## Instruction: Use correct argument for content type in serve view ## Code After: import mimetypes from django.http import HttpResponse, HttpResponseNotModified, Http404 from django.contrib.staticfiles.views import serve as staticfiles_serve from django.views.static import was_modified_since, http_date from staticassets import finder, settings def serve(request, path, **kwargs): mimetype, encoding = mimetypes.guess_type(path) if not mimetype in settings.MIMETYPES.values(): return staticfiles_serve(request, path, **kwargs) bundle = request.GET.get('bundle') in ('1', 't', 'true') asset = finder.find(path, bundle=bundle) if not asset: raise Http404("Static asset not found") # Respect the If-Modified-Since header. modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if not was_modified_since(modified_since, asset.mtime, asset.size): return HttpResponseNotModified(content_type=asset.attributes.content_type) response = HttpResponse(asset.content, content_type=asset.attributes.content_type) response['Last-Modified'] = http_date(asset.mtime) response['Content-Length'] = asset.size return response
297f42a2013428c2f6caefdf83735cc4a528e225
caching.py
caching.py
import os import cPickle as pickle try: DATA_DIR = os.path.dirname(os.path.realpath(__file__)) except: DATA_DIR = os.getcwd() cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name) def get_cache(name): return pickle.load(open(cache_path(name), 'r')) def save_cache(obj, name): pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
import os import cPickle as pickle home_dir = os.path.expanduser('~') DATA_DIR = os.path.join(home_dir, '.tax_resolve') if not os.path.exists(DATA_DIR): try: os.mkdir(DATA_DIR) except: DATA_DIR = os.getcwd() cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name) def get_cache(name): return pickle.load(open(cache_path(name), 'r')) def save_cache(obj, name): pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
Use user's local application data directory instead of the module path.
Use user's local application data directory instead of the module path.
Python
mit
bendmorris/tax_resolve
import os import cPickle as pickle - try: DATA_DIR = os.path.dirname(os.path.realpath(__file__)) - except: DATA_DIR = os.getcwd() + home_dir = os.path.expanduser('~') + DATA_DIR = os.path.join(home_dir, '.tax_resolve') + if not os.path.exists(DATA_DIR): + try: + os.mkdir(DATA_DIR) + except: DATA_DIR = os.getcwd() cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name) def get_cache(name): return pickle.load(open(cache_path(name), 'r')) def save_cache(obj, name): pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
Use user's local application data directory instead of the module path.
## Code Before: import os import cPickle as pickle try: DATA_DIR = os.path.dirname(os.path.realpath(__file__)) except: DATA_DIR = os.getcwd() cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name) def get_cache(name): return pickle.load(open(cache_path(name), 'r')) def save_cache(obj, name): pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1) ## Instruction: Use user's local application data directory instead of the module path. ## Code After: import os import cPickle as pickle home_dir = os.path.expanduser('~') DATA_DIR = os.path.join(home_dir, '.tax_resolve') if not os.path.exists(DATA_DIR): try: os.mkdir(DATA_DIR) except: DATA_DIR = os.getcwd() cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name) def get_cache(name): return pickle.load(open(cache_path(name), 'r')) def save_cache(obj, name): pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
310cebbe1f4a4d92c8f181d7e4de9cc4f75a14dc
indra/assemblers/__init__.py
indra/assemblers/__init__.py
try: from pysb_assembler import PysbAssembler except ImportError: pass try: from graph_assembler import GraphAssembler except ImportError: pass try: from sif_assembler import SifAssembler except ImportError: pass try: from cx_assembler import CxAssembler except ImportError: pass try: from english_assembler import EnglishAssembler except ImportError: pass try: from sbgn_assembler import SBGNAssembler except ImportError: pass try: from index_card_assembler import IndexCardAssembler except ImportError: pass
try: from indra.assemblers.pysb_assembler import PysbAssembler except ImportError: pass try: from indra.assemblers.graph_assembler import GraphAssembler except ImportError: pass try: from indra.assemblers.sif_assembler import SifAssembler except ImportError: pass try: from indra.assemblers.cx_assembler import CxAssembler except ImportError: pass try: from indra.assemblers.english_assembler import EnglishAssembler except ImportError: pass try: from indra.assemblers.sbgn_assembler import SBGNAssembler except ImportError: pass try: from indra.assemblers.index_card_assembler import IndexCardAssembler except ImportError: pass
Update to absolute imports in assemblers
Update to absolute imports in assemblers
Python
bsd-2-clause
johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,jmuhlich/indra,pvtodorov/indra,sorgerlab/indra,jmuhlich/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,jmuhlich/indra,bgyori/indra,sorgerlab/indra
try: - from pysb_assembler import PysbAssembler + from indra.assemblers.pysb_assembler import PysbAssembler except ImportError: pass try: - from graph_assembler import GraphAssembler + from indra.assemblers.graph_assembler import GraphAssembler except ImportError: pass try: - from sif_assembler import SifAssembler + from indra.assemblers.sif_assembler import SifAssembler except ImportError: pass try: - from cx_assembler import CxAssembler + from indra.assemblers.cx_assembler import CxAssembler except ImportError: pass try: - from english_assembler import EnglishAssembler + from indra.assemblers.english_assembler import EnglishAssembler except ImportError: pass try: - from sbgn_assembler import SBGNAssembler + from indra.assemblers.sbgn_assembler import SBGNAssembler except ImportError: pass try: - from index_card_assembler import IndexCardAssembler + from indra.assemblers.index_card_assembler import IndexCardAssembler except ImportError: pass
Update to absolute imports in assemblers
## Code Before: try: from pysb_assembler import PysbAssembler except ImportError: pass try: from graph_assembler import GraphAssembler except ImportError: pass try: from sif_assembler import SifAssembler except ImportError: pass try: from cx_assembler import CxAssembler except ImportError: pass try: from english_assembler import EnglishAssembler except ImportError: pass try: from sbgn_assembler import SBGNAssembler except ImportError: pass try: from index_card_assembler import IndexCardAssembler except ImportError: pass ## Instruction: Update to absolute imports in assemblers ## Code After: try: from indra.assemblers.pysb_assembler import PysbAssembler except ImportError: pass try: from indra.assemblers.graph_assembler import GraphAssembler except ImportError: pass try: from indra.assemblers.sif_assembler import SifAssembler except ImportError: pass try: from indra.assemblers.cx_assembler import CxAssembler except ImportError: pass try: from indra.assemblers.english_assembler import EnglishAssembler except ImportError: pass try: from indra.assemblers.sbgn_assembler import SBGNAssembler except ImportError: pass try: from indra.assemblers.index_card_assembler import IndexCardAssembler except ImportError: pass
d150db290a72590e0f7cf9dae485bf98901bb2c2
web_ui/helpers.py
web_ui/helpers.py
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
Add helper method for resetting user data
Add helper method for resetting user data
Python
apache-2.0
ciex/souma,ciex/souma,ciex/souma
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] + + def reset_userdata(): + """Reset all userdata files""" + import os + + for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: + try: + os.remove(app.config[fileid]) + except OSError: + app.logger.warning("RESET: {} not found".format(fileid)) + else: + app.logger.warning("RESET: {} deleted") +
Add helper method for resetting user data
## Code Before: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] ## Instruction: Add helper method for resetting user data ## Code After: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
186a72b91798b11d13ea7f2538141f620b0787a8
tests/test_metrics.py
tests/test_metrics.py
import json from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.data.decode('utf-8')), {'results': []})
from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, {'results': []}) def test_noop(self): url = '/dashboard/find' response = self.app.get(url) self.assertJSON(response, {'dashboards': []}) url = '/dashboard/load/foo' response = self.app.get(url) self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, status_code=404) url = '/events/get_data' response = self.app.get(url) self.assertJSON(response, [])
Add test for noop routes
Add test for noop routes
Python
apache-2.0
vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,GeorgeJahad/graphite-api,vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,alphapigger/graphite-api,raintank/graphite-api,hubrick/graphite-api,rackerlabs/graphite-api,Knewton/graphite-api,raintank/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,DaveBlooman/graphite-api,rackerlabs/graphite-api,brutasse/graphite-api,DaveBlooman/graphite-api,hubrick/graphite-api,raintank/graphite-api,tpeng/graphite-api,winguru/graphite-api,winguru/graphite-api,bogus-py/graphite-api,tpeng/graphite-api,cybem/graphite-api-iow,absalon-james/graphite-api,alphapigger/graphite-api,absalon-james/graphite-api,brutasse/graphite-api,GeorgeJahad/graphite-api
- import json - from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) + self.assertJSON(response, {'results': []}) - self.assertEqual(response.status_code, 200) - self.assertEqual(json.loads(response.data.decode('utf-8')), - {'results': []}) + def test_noop(self): + url = '/dashboard/find' + response = self.app.get(url) + self.assertJSON(response, {'dashboards': []}) + + url = '/dashboard/load/foo' + response = self.app.get(url) + self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, + status_code=404) + + url = '/events/get_data' + response = self.app.get(url) + self.assertJSON(response, []) +
Add test for noop routes
## Code Before: import json from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.data.decode('utf-8')), {'results': []}) ## Instruction: Add test for noop routes ## Code After: from . import TestCase class MetricsTests(TestCase): def test_find(self): url = '/metrics/find' response = self.app.get(url) self.assertEqual(response.status_code, 400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, []) def test_expand(self): url = '/metrics/expand' response = self.app.get(url) self.assertJSON(response, {'errors': {'query': 'this parameter is required.'}}, status_code=400) response = self.app.get(url, query_string={'query': 'test'}) self.assertJSON(response, {'results': []}) def test_noop(self): url = '/dashboard/find' response = self.app.get(url) self.assertJSON(response, {'dashboards': []}) url = '/dashboard/load/foo' response = self.app.get(url) self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."}, status_code=404) url = '/events/get_data' response = self.app.get(url) self.assertJSON(response, [])
2410255e846c5fbd756ed97868299e1674c89467
flash_example.py
flash_example.py
from BlinkyTape import BlinkyTape bb = BlinkyTape('/dev/tty.usbmodemfa131') while True: for x in range(60): bb.sendPixel(10, 10, 10) bb.show() for x in range(60): bb.sendPixel(0, 0, 0) bb.show()
from BlinkyTape import BlinkyTape import time #bb = BlinkyTape('/dev/tty.usbmodemfa131') bb = BlinkyTape('COM8') while True: for x in range(60): bb.sendPixel(100, 100, 100) bb.show() time.sleep(.5) for x in range(60): bb.sendPixel(0, 0, 0) bb.show() time.sleep(.5)
Set it to flash black and white every second
Set it to flash black and white every second
Python
mit
Blinkinlabs/BlinkyTape_Python,jpsingleton/BlinkyTape_Python,railsagainstignorance/blinkytape
from BlinkyTape import BlinkyTape + import time - bb = BlinkyTape('/dev/tty.usbmodemfa131') + #bb = BlinkyTape('/dev/tty.usbmodemfa131') + bb = BlinkyTape('COM8') while True: for x in range(60): - bb.sendPixel(10, 10, 10) + bb.sendPixel(100, 100, 100) bb.show() + + time.sleep(.5) for x in range(60): bb.sendPixel(0, 0, 0) bb.show() + time.sleep(.5) +
Set it to flash black and white every second
## Code Before: from BlinkyTape import BlinkyTape bb = BlinkyTape('/dev/tty.usbmodemfa131') while True: for x in range(60): bb.sendPixel(10, 10, 10) bb.show() for x in range(60): bb.sendPixel(0, 0, 0) bb.show() ## Instruction: Set it to flash black and white every second ## Code After: from BlinkyTape import BlinkyTape import time #bb = BlinkyTape('/dev/tty.usbmodemfa131') bb = BlinkyTape('COM8') while True: for x in range(60): bb.sendPixel(100, 100, 100) bb.show() time.sleep(.5) for x in range(60): bb.sendPixel(0, 0, 0) bb.show() time.sleep(.5)
251a0d1b1df0fd857a86878ecb7e4c6bc26a93ef
paci/helpers/display_helper.py
paci/helpers/display_helper.py
"""Helper to output stuff""" from tabulate import tabulate def print_list(header, entries): """Prints out a list""" print(tabulate(entries, header, tablefmt="grid")) def std_input(text, default): """Get input or return default if none is given.""" return input(text.format(default)) or default
"""Helper to output stuff""" from tabulate import tabulate def print_list(header, entries): """Prints out a list""" print(tabulate(entries, header, tablefmt="grid")) def print_table(entries): """Prints out a table""" print(tabulate(entries, tablefmt="plain")) def std_input(text, default): """Get input or return default if none is given.""" return input(text.format(default)) or default
Add function to just print a simple table
Add function to just print a simple table
Python
mit
tradebyte/paci,tradebyte/paci
"""Helper to output stuff""" from tabulate import tabulate def print_list(header, entries): """Prints out a list""" print(tabulate(entries, header, tablefmt="grid")) + def print_table(entries): + """Prints out a table""" + print(tabulate(entries, tablefmt="plain")) + + def std_input(text, default): """Get input or return default if none is given.""" return input(text.format(default)) or default
Add function to just print a simple table
## Code Before: """Helper to output stuff""" from tabulate import tabulate def print_list(header, entries): """Prints out a list""" print(tabulate(entries, header, tablefmt="grid")) def std_input(text, default): """Get input or return default if none is given.""" return input(text.format(default)) or default ## Instruction: Add function to just print a simple table ## Code After: """Helper to output stuff""" from tabulate import tabulate def print_list(header, entries): """Prints out a list""" print(tabulate(entries, header, tablefmt="grid")) def print_table(entries): """Prints out a table""" print(tabulate(entries, tablefmt="plain")) def std_input(text, default): """Get input or return default if none is given.""" return input(text.format(default)) or default
bb768ef543469395ccbd0b2761442d9dcfa8e0c5
testanalyzer/analyze_repos.py
testanalyzer/analyze_repos.py
import pandas as pd import shutil import utils as u import validators from analyzer import Analyzer from git import Repo if __name__ == "__main__": repos = pd.read_pickle("data/test.pkl") for _, repo in repos.iterrows(): if not validators.url(repo["url"]): print("Error: Invalid URL.") exit(1) project_name = u.get_name_from_url(repo["url"]) print("Cloning {}...".format(project_name)) Repo.clone_from(repo["url"], project_name) print("Analyzing...") analyzer = Analyzer(project_name) code_counts, test_counts = analyzer.run() print(code_counts) print(test_counts) shutil.rmtree(project_name)
import pandas as pd import shutil import utils as u import validators from analyzer import Analyzer from git import Repo if __name__ == "__main__": repos = pd.read_pickle("data/repos.pkl") repos["code_lines"] = 0 repos["code_classes"] = 0 repos["code_functions"] = 0 repos["test_lines"] = 0 repos["test_classes"] = 0 repos["test_functions"] = 0 for i, repo in repos.iterrows(): if not validators.url(repo["url"]): print("Error: Invalid URL.") exit(1) project_name = u.get_name_from_url(repo["url"]) print("Cloning {}...".format(project_name)) Repo.clone_from(repo["url"], project_name) print("Analyzing...") analyzer = Analyzer(project_name) code_counts, test_counts = analyzer.run() repos.set_value(i, "code_lines", code_counts["line_count"]) repos.set_value(i, "code_classes", code_counts["class_count"]) repos.set_value(i, "code_functions", code_counts["function_count"]) repos.set_value(i, "test_lines", test_counts["line_count"]) repos.set_value(i, "test_classes", test_counts["class_count"]) repos.set_value(i, "test_functions", test_counts["function_count"]) shutil.rmtree(project_name) repos.to_pickle("data/dataset.pkl")
Update dataframe with counts and serialize
Update dataframe with counts and serialize
Python
mpl-2.0
CheriPai/TestAnalyzer,CheriPai/TestAnalyzer,CheriPai/TestAnalyzer
import pandas as pd import shutil import utils as u import validators from analyzer import Analyzer from git import Repo if __name__ == "__main__": - repos = pd.read_pickle("data/test.pkl") + repos = pd.read_pickle("data/repos.pkl") + repos["code_lines"] = 0 + repos["code_classes"] = 0 + repos["code_functions"] = 0 + repos["test_lines"] = 0 + repos["test_classes"] = 0 + repos["test_functions"] = 0 - for _, repo in repos.iterrows(): + for i, repo in repos.iterrows(): if not validators.url(repo["url"]): print("Error: Invalid URL.") exit(1) project_name = u.get_name_from_url(repo["url"]) print("Cloning {}...".format(project_name)) Repo.clone_from(repo["url"], project_name) print("Analyzing...") analyzer = Analyzer(project_name) code_counts, test_counts = analyzer.run() - print(code_counts) - print(test_counts) + repos.set_value(i, "code_lines", code_counts["line_count"]) + repos.set_value(i, "code_classes", code_counts["class_count"]) + repos.set_value(i, "code_functions", code_counts["function_count"]) + repos.set_value(i, "test_lines", test_counts["line_count"]) + repos.set_value(i, "test_classes", test_counts["class_count"]) + repos.set_value(i, "test_functions", test_counts["function_count"]) shutil.rmtree(project_name) + repos.to_pickle("data/dataset.pkl") +
Update dataframe with counts and serialize
## Code Before: import pandas as pd import shutil import utils as u import validators from analyzer import Analyzer from git import Repo if __name__ == "__main__": repos = pd.read_pickle("data/test.pkl") for _, repo in repos.iterrows(): if not validators.url(repo["url"]): print("Error: Invalid URL.") exit(1) project_name = u.get_name_from_url(repo["url"]) print("Cloning {}...".format(project_name)) Repo.clone_from(repo["url"], project_name) print("Analyzing...") analyzer = Analyzer(project_name) code_counts, test_counts = analyzer.run() print(code_counts) print(test_counts) shutil.rmtree(project_name) ## Instruction: Update dataframe with counts and serialize ## Code After: import pandas as pd import shutil import utils as u import validators from analyzer import Analyzer from git import Repo if __name__ == "__main__": repos = pd.read_pickle("data/repos.pkl") repos["code_lines"] = 0 repos["code_classes"] = 0 repos["code_functions"] = 0 repos["test_lines"] = 0 repos["test_classes"] = 0 repos["test_functions"] = 0 for i, repo in repos.iterrows(): if not validators.url(repo["url"]): print("Error: Invalid URL.") exit(1) project_name = u.get_name_from_url(repo["url"]) print("Cloning {}...".format(project_name)) Repo.clone_from(repo["url"], project_name) print("Analyzing...") analyzer = Analyzer(project_name) code_counts, test_counts = analyzer.run() repos.set_value(i, "code_lines", code_counts["line_count"]) repos.set_value(i, "code_classes", code_counts["class_count"]) repos.set_value(i, "code_functions", code_counts["function_count"]) repos.set_value(i, "test_lines", test_counts["line_count"]) repos.set_value(i, "test_classes", test_counts["class_count"]) repos.set_value(i, "test_functions", test_counts["function_count"]) shutil.rmtree(project_name) repos.to_pickle("data/dataset.pkl")
fc904d8fd02cecfb2c3d69d6101caaab7b224e93
_bin/person_list_generator.py
_bin/person_list_generator.py
import os import csv with open('tmp/person_list_input.csv') as csvfile: csvreader = csv.reader(csvfile) for row in csvreader: print """ - role: {} name: {}""".format(row[0], row[1])
import os import csv with open('tmp/person_list_input.csv') as csvfile: csvreader = csv.reader(csvfile) for row in csvreader: stream = open('tmp/person_list_output.yml', 'a') stream.write( """ - role: {}\n name: {}\n""".format(row[0], row[1]) ) stream.close()
Make person list generator output to file
Make person list generator output to file The console was going beyond the terminal history limit for 14-15
Python
mit
johnathan99j/history-project,johnathan99j/history-project,newtheatre/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project,johnathan99j/history-project,newtheatre/history-project
import os import csv with open('tmp/person_list_input.csv') as csvfile: csvreader = csv.reader(csvfile) for row in csvreader: - print """ - role: {} - name: {}""".format(row[0], row[1]) + stream = open('tmp/person_list_output.yml', 'a') + stream.write( """ - role: {}\n name: {}\n""".format(row[0], row[1]) + ) + stream.close()
Make person list generator output to file
## Code Before: import os import csv with open('tmp/person_list_input.csv') as csvfile: csvreader = csv.reader(csvfile) for row in csvreader: print """ - role: {} name: {}""".format(row[0], row[1]) ## Instruction: Make person list generator output to file ## Code After: import os import csv with open('tmp/person_list_input.csv') as csvfile: csvreader = csv.reader(csvfile) for row in csvreader: stream = open('tmp/person_list_output.yml', 'a') stream.write( """ - role: {}\n name: {}\n""".format(row[0], row[1]) ) stream.close()
10b9d412c26b90bb86fe1abd04c3fe0f86826104
pelicanconf_with_pagination.py
pelicanconf_with_pagination.py
from pelicanconf import * # Over-ride so there is paging. DEFAULT_PAGINATION = 5
import sys # Hack for Travis, where local imports don't work. if '' not in sys.path: sys.path.insert(0, '') from pelicanconf import * # Over-ride so there is paging. DEFAULT_PAGINATION = 5
Fix Python import path on Travis.
Fix Python import path on Travis.
Python
apache-2.0
dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog,dhermes/bossylobster-blog
+ import sys + # Hack for Travis, where local imports don't work. + if '' not in sys.path: + sys.path.insert(0, '') + from pelicanconf import * # Over-ride so there is paging. DEFAULT_PAGINATION = 5
Fix Python import path on Travis.
## Code Before: from pelicanconf import * # Over-ride so there is paging. DEFAULT_PAGINATION = 5 ## Instruction: Fix Python import path on Travis. ## Code After: import sys # Hack for Travis, where local imports don't work. if '' not in sys.path: sys.path.insert(0, '') from pelicanconf import * # Over-ride so there is paging. DEFAULT_PAGINATION = 5
c6ecf6160664bc61cf6dc213af1f2fe3fd6a3617
editorsnotes/djotero/models.py
editorsnotes/djotero/models.py
from django.db import models from editorsnotes.main.models import Document import utils import json class ZoteroLink(models.Model): doc = models.OneToOneField(Document, related_name='_zotero_link') zotero_url = models.URLField() zotero_data = models.TextField(blank=True) date_information = models.TextField(blank=True) def __str__(self): return 'Zotero data: %s' % self.doc.__str__() def get_zotero_fields(self): z = json.loads(self.zotero_data) z['itemType'] = utils.type_map['readable'][z['itemType']] if self.date_information: date_parts = json.loads(self.date_information) for part in date_parts: z[part] = date_parts[part] if z['creators']: names = utils.resolve_names(z, 'facets') z.pop('creators') output = z.items() for name in names: for creator_type, creator_value in name.items(): output.append((creator_type, creator_value)) else: output = z.items() return output
from django.db import models from editorsnotes.main.models import Document import utils import json class ZoteroLink(models.Model): doc = models.OneToOneField(Document, related_name='_zotero_link') zotero_url = models.URLField(blank=True) zotero_data = models.TextField() date_information = models.TextField(blank=True) def __str__(self): return 'Zotero data: %s' % self.doc.__str__() def get_zotero_fields(self): z = json.loads(self.zotero_data) z['itemType'] = utils.type_map['readable'][z['itemType']] if self.date_information: date_parts = json.loads(self.date_information) for part in date_parts: z[part] = date_parts[part] if z['creators']: names = utils.resolve_names(z, 'facets') z.pop('creators') output = z.items() for name in names: for creator_type, creator_value in name.items(): output.append((creator_type, creator_value)) else: output = z.items() return output
Allow blank zotero url reference, but require zotero json data
Allow blank zotero url reference, but require zotero json data
Python
agpl-3.0
editorsnotes/editorsnotes,editorsnotes/editorsnotes
from django.db import models from editorsnotes.main.models import Document import utils import json class ZoteroLink(models.Model): doc = models.OneToOneField(Document, related_name='_zotero_link') - zotero_url = models.URLField() + zotero_url = models.URLField(blank=True) - zotero_data = models.TextField(blank=True) + zotero_data = models.TextField() date_information = models.TextField(blank=True) def __str__(self): return 'Zotero data: %s' % self.doc.__str__() def get_zotero_fields(self): z = json.loads(self.zotero_data) z['itemType'] = utils.type_map['readable'][z['itemType']] if self.date_information: date_parts = json.loads(self.date_information) for part in date_parts: z[part] = date_parts[part] if z['creators']: names = utils.resolve_names(z, 'facets') z.pop('creators') output = z.items() for name in names: for creator_type, creator_value in name.items(): output.append((creator_type, creator_value)) else: output = z.items() return output
Allow blank zotero url reference, but require zotero json data
## Code Before: from django.db import models from editorsnotes.main.models import Document import utils import json class ZoteroLink(models.Model): doc = models.OneToOneField(Document, related_name='_zotero_link') zotero_url = models.URLField() zotero_data = models.TextField(blank=True) date_information = models.TextField(blank=True) def __str__(self): return 'Zotero data: %s' % self.doc.__str__() def get_zotero_fields(self): z = json.loads(self.zotero_data) z['itemType'] = utils.type_map['readable'][z['itemType']] if self.date_information: date_parts = json.loads(self.date_information) for part in date_parts: z[part] = date_parts[part] if z['creators']: names = utils.resolve_names(z, 'facets') z.pop('creators') output = z.items() for name in names: for creator_type, creator_value in name.items(): output.append((creator_type, creator_value)) else: output = z.items() return output ## Instruction: Allow blank zotero url reference, but require zotero json data ## Code After: from django.db import models from editorsnotes.main.models import Document import utils import json class ZoteroLink(models.Model): doc = models.OneToOneField(Document, related_name='_zotero_link') zotero_url = models.URLField(blank=True) zotero_data = models.TextField() date_information = models.TextField(blank=True) def __str__(self): return 'Zotero data: %s' % self.doc.__str__() def get_zotero_fields(self): z = json.loads(self.zotero_data) z['itemType'] = utils.type_map['readable'][z['itemType']] if self.date_information: date_parts = json.loads(self.date_information) for part in date_parts: z[part] = date_parts[part] if z['creators']: names = utils.resolve_names(z, 'facets') z.pop('creators') output = z.items() for name in names: for creator_type, creator_value in name.items(): output.append((creator_type, creator_value)) else: output = z.items() return output
36b8ec51dc6e1caca90db41d83d4dc21d70005a5
app/task.py
app/task.py
from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField import datetime, enum class Priority(enum.IntEnum): LOW = 0, MIDDLE = 1, HIGH = 2 """ This defines the basic model for a Task as we want it to be stored in the MongoDB. """ class Task(Document): title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) status = IntField(default=0, required=True) priority = IntField(default=Priority.LOW, required=True)
from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField, ValidationError import datetime, enum, Exception from app import logger class Priority(enum.IntEnum): """ This defines the priority levels a Task can have. """ LOW = 0, MIDDLE = 1, HIGH = 2 class Status(enum.IntEnum): """ This defines statuses a Task can have. """ OPEN = 0 IN_PROGRESS = 1 CLOSED = 2 class Task(Document): """ This defines the basic model for a Task as we want it to be stored in the MongoDB. title (str): The title of the Task. description (str): A description of the Task. creator (str): The task creators email address. assigne (str): The email address of the person the Task is assigned to. created_at (datetime): The point in the time when the Task was created. status (Status): The current status of the Task. priority(Priority): The priority level of the Task. """ title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) status = IntField(default=Status.OPEN, required=True) priority = IntField(default=Priority.LOW, required=True)
Add a Status enum and documentation
Add a Status enum and documentation
Python
mit
Zillolo/lazy-todo
from mongoengine import Document, DateTimeField, EmailField, IntField, \ - ReferenceField, StringField + ReferenceField, StringField, ValidationError - import datetime, enum + import datetime, enum, Exception + + from app import logger class Priority(enum.IntEnum): + """ + This defines the priority levels a Task can have. + """ LOW = 0, MIDDLE = 1, HIGH = 2 - """ - This defines the basic model for a Task as we want it to be stored in the - MongoDB. - """ + class Status(enum.IntEnum): + """ + This defines statuses a Task can have. + """ + OPEN = 0 + IN_PROGRESS = 1 + CLOSED = 2 + class Task(Document): + """ + This defines the basic model for a Task as we want it to be stored in the + MongoDB. + + title (str): The title of the Task. + description (str): A description of the Task. + creator (str): The task creators email address. + assigne (str): The email address of the person the Task is assigned to. + created_at (datetime): The point in the time when the Task was created. + status (Status): The current status of the Task. + priority(Priority): The priority level of the Task. + """ title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) - status = IntField(default=0, required=True) + status = IntField(default=Status.OPEN, required=True) priority = IntField(default=Priority.LOW, required=True)
Add a Status enum and documentation
## Code Before: from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField import datetime, enum class Priority(enum.IntEnum): LOW = 0, MIDDLE = 1, HIGH = 2 """ This defines the basic model for a Task as we want it to be stored in the MongoDB. """ class Task(Document): title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) status = IntField(default=0, required=True) priority = IntField(default=Priority.LOW, required=True) ## Instruction: Add a Status enum and documentation ## Code After: from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField, ValidationError import datetime, enum, Exception from app import logger class Priority(enum.IntEnum): """ This defines the priority levels a Task can have. """ LOW = 0, MIDDLE = 1, HIGH = 2 class Status(enum.IntEnum): """ This defines statuses a Task can have. """ OPEN = 0 IN_PROGRESS = 1 CLOSED = 2 class Task(Document): """ This defines the basic model for a Task as we want it to be stored in the MongoDB. title (str): The title of the Task. description (str): A description of the Task. creator (str): The task creators email address. assigne (str): The email address of the person the Task is assigned to. created_at (datetime): The point in the time when the Task was created. status (Status): The current status of the Task. priority(Priority): The priority level of the Task. """ title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) status = IntField(default=Status.OPEN, required=True) priority = IntField(default=Priority.LOW, required=True)
acf3819d433f3ebc3d3eed17c61f2542f7429f8e
trimesh/resources/__init__.py
trimesh/resources/__init__.py
import os import inspect # find the current absolute path using inspect _pwd = os.path.dirname( os.path.abspath( inspect.getfile( inspect.currentframe()))) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
import os # find the current absolute path to this directory _pwd = os.path.dirname(__file__) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
Use __file__ instead of inspect, for compatibility with frozen environments
RF: Use __file__ instead of inspect, for compatibility with frozen environments
Python
mit
mikedh/trimesh,mikedh/trimesh,dajusc/trimesh,mikedh/trimesh,mikedh/trimesh,dajusc/trimesh
import os - import inspect + - # find the current absolute path using inspect + # find the current absolute path to this directory - _pwd = os.path.dirname( + _pwd = os.path.dirname(__file__) - os.path.abspath( - inspect.getfile( - inspect.currentframe()))) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
Use __file__ instead of inspect, for compatibility with frozen environments
## Code Before: import os import inspect # find the current absolute path using inspect _pwd = os.path.dirname( os.path.abspath( inspect.getfile( inspect.currentframe()))) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource ## Instruction: Use __file__ instead of inspect, for compatibility with frozen environments ## Code After: import os # find the current absolute path to this directory _pwd = os.path.dirname(__file__) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
83dabc9fc1142e1575843d3a68c6241185543936
fabtastic/db/__init__.py
fabtastic/db/__init__.py
from django.conf import settings from fabtastic.db import util db_engine = util.get_db_setting('ENGINE') if 'postgresql_psycopg2' in db_engine: from fabtastic.db.postgres import * else: raise NotImplementedError("Fabtastic: DB engine '%s' is not supported" % db_engine)
from django.conf import settings from fabtastic.db import util db_engine = util.get_db_setting('ENGINE') if 'postgresql_psycopg2' in db_engine: from fabtastic.db.postgres import * else: print("Fabtastic WARNING: DB engine '%s' is not supported" % db_engine)
Make the warning for SQLite not being supported a print instead of an exception.
Make the warning for SQLite not being supported a print instead of an exception.
Python
bsd-3-clause
duointeractive/django-fabtastic
from django.conf import settings from fabtastic.db import util db_engine = util.get_db_setting('ENGINE') if 'postgresql_psycopg2' in db_engine: from fabtastic.db.postgres import * else: - raise NotImplementedError("Fabtastic: DB engine '%s' is not supported" % db_engine) + print("Fabtastic WARNING: DB engine '%s' is not supported" % db_engine) +
Make the warning for SQLite not being supported a print instead of an exception.
## Code Before: from django.conf import settings from fabtastic.db import util db_engine = util.get_db_setting('ENGINE') if 'postgresql_psycopg2' in db_engine: from fabtastic.db.postgres import * else: raise NotImplementedError("Fabtastic: DB engine '%s' is not supported" % db_engine) ## Instruction: Make the warning for SQLite not being supported a print instead of an exception. ## Code After: from django.conf import settings from fabtastic.db import util db_engine = util.get_db_setting('ENGINE') if 'postgresql_psycopg2' in db_engine: from fabtastic.db.postgres import * else: print("Fabtastic WARNING: DB engine '%s' is not supported" % db_engine)
881e693d16d12109c3ececffda61336b020c172a
portable_mds/tests/conftest.py
portable_mds/tests/conftest.py
import os import tempfile import shutil import tzlocal import pytest from ..mongoquery.mds import MDS @pytest.fixture(params=[1], scope='function') def mds_all(request): '''Provide a function level scoped FileStore instance talking to temporary database on localhost:27017 with both v0 and v1. ''' ver = request.param tempdirname = tempfile.mkdtemp() mds = MDS({'directory': tempdirname, 'timezone': tzlocal.get_localzone().zone}, version=ver) filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json', 'events.json'] for fn in filenames: with open(os.path.join(tempdirname, fn), 'w') as f: f.write('[]') def delete_dm(): shutil.rmtree(tempdirname) request.addfinalizer(delete_dm) return mds
import os import tempfile import shutil import tzlocal import pytest import portable_mds.mongoquery.mds import portable_mds.sqlite.mds variations = [portable_mds.mongoquery.mds, portable_mds.sqlite.mds] @pytest.fixture(params=variations, scope='function') def mds_all(request): '''Provide a function level scoped FileStore instance talking to temporary database on localhost:27017 with both v0 and v1. ''' tempdirname = tempfile.mkdtemp() mds = request.param.MDS({'directory': tempdirname, 'timezone': tzlocal.get_localzone().zone}, version=1) filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json', 'events.json'] for fn in filenames: with open(os.path.join(tempdirname, fn), 'w') as f: f.write('[]') def delete_dm(): shutil.rmtree(tempdirname) request.addfinalizer(delete_dm) return mds
Test sqlite and mongoquery variations.
TST: Test sqlite and mongoquery variations.
Python
bsd-3-clause
ericdill/databroker,ericdill/databroker
import os import tempfile import shutil import tzlocal import pytest - from ..mongoquery.mds import MDS + import portable_mds.mongoquery.mds + import portable_mds.sqlite.mds + variations = [portable_mds.mongoquery.mds, + portable_mds.sqlite.mds] - @pytest.fixture(params=[1], scope='function') + @pytest.fixture(params=variations, scope='function') def mds_all(request): '''Provide a function level scoped FileStore instance talking to temporary database on localhost:27017 with both v0 and v1. ''' - ver = request.param tempdirname = tempfile.mkdtemp() - mds = MDS({'directory': tempdirname, + mds = request.param.MDS({'directory': tempdirname, - 'timezone': tzlocal.get_localzone().zone}, version=ver) + 'timezone': tzlocal.get_localzone().zone}, version=1) filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json', 'events.json'] for fn in filenames: with open(os.path.join(tempdirname, fn), 'w') as f: f.write('[]') def delete_dm(): shutil.rmtree(tempdirname) request.addfinalizer(delete_dm) return mds
Test sqlite and mongoquery variations.
## Code Before: import os import tempfile import shutil import tzlocal import pytest from ..mongoquery.mds import MDS @pytest.fixture(params=[1], scope='function') def mds_all(request): '''Provide a function level scoped FileStore instance talking to temporary database on localhost:27017 with both v0 and v1. ''' ver = request.param tempdirname = tempfile.mkdtemp() mds = MDS({'directory': tempdirname, 'timezone': tzlocal.get_localzone().zone}, version=ver) filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json', 'events.json'] for fn in filenames: with open(os.path.join(tempdirname, fn), 'w') as f: f.write('[]') def delete_dm(): shutil.rmtree(tempdirname) request.addfinalizer(delete_dm) return mds ## Instruction: Test sqlite and mongoquery variations. ## Code After: import os import tempfile import shutil import tzlocal import pytest import portable_mds.mongoquery.mds import portable_mds.sqlite.mds variations = [portable_mds.mongoquery.mds, portable_mds.sqlite.mds] @pytest.fixture(params=variations, scope='function') def mds_all(request): '''Provide a function level scoped FileStore instance talking to temporary database on localhost:27017 with both v0 and v1. ''' tempdirname = tempfile.mkdtemp() mds = request.param.MDS({'directory': tempdirname, 'timezone': tzlocal.get_localzone().zone}, version=1) filenames = ['run_starts.json', 'run_stops.json', 'event_descriptors.json', 'events.json'] for fn in filenames: with open(os.path.join(tempdirname, fn), 'w') as f: f.write('[]') def delete_dm(): shutil.rmtree(tempdirname) request.addfinalizer(delete_dm) return mds
bd5c215c1c481f3811753412bca6b509bb00591a
me_api/app.py
me_api/app.py
from __future__ import absolute_import, unicode_literals from flask import Flask from .middleware.me import me from .cache import cache def _register_module(app, module): if module == 'douban': from .middleware import douban app.register_blueprint(douban.douban_api) elif module == 'github': from .middleware import github app.register_blueprint(github.github_api) elif module == 'instagram': from .middleware import instagram app.register_blueprint(instagram.instagram_api) elif module == 'keybase': from .middleware import keybase app.register_blueprint(keybase.keybase_api) elif module == 'medium': from .middleware import medium app.register_blueprint(medium.medium_api) elif module == 'stackoverflow': from .middleware import stackoverflow app.register_blueprint(stackoverflow.stackoverflow_api) def create_app(config): app = Flask(__name__) app.config.from_object(config) cache.init_app(app) modules = config.modules['modules'] app.register_blueprint(me) for module in modules.keys(): _register_module(app, module) return app
from __future__ import absolute_import, unicode_literals from flask import Flask from werkzeug.utils import import_string from me_api.middleware.me import me from me_api.cache import cache middlewares = { 'douban': 'me_api.middleware.douban:douban_api', 'github': 'me_api.middleware.github:github_api', 'instagram': 'me_api.middleware.instagram:instagram_api', 'keybase': 'me_api.middleware.keybase:keybase_api', 'medium': 'me_api.middleware.medium:medium_api', 'stackoverflow': 'me_api.middleware.stackoverflow:stackoverflow_api', } def create_app(config): app = Flask(__name__) app.config.from_object(config) cache.init_app(app) modules = config.modules['modules'] app.register_blueprint(me) for module in modules.keys(): blueprint = import_string(middlewares[module]) app.register_blueprint(blueprint) return app
Improve the way that import middlewares
Improve the way that import middlewares
Python
mit
lord63/me-api
from __future__ import absolute_import, unicode_literals from flask import Flask + from werkzeug.utils import import_string - from .middleware.me import me + from me_api.middleware.me import me - from .cache import cache + from me_api.cache import cache + middlewares = { + 'douban': 'me_api.middleware.douban:douban_api', + 'github': 'me_api.middleware.github:github_api', + 'instagram': 'me_api.middleware.instagram:instagram_api', + 'keybase': 'me_api.middleware.keybase:keybase_api', + 'medium': 'me_api.middleware.medium:medium_api', + 'stackoverflow': 'me_api.middleware.stackoverflow:stackoverflow_api', + } - def _register_module(app, module): - if module == 'douban': - from .middleware import douban - app.register_blueprint(douban.douban_api) - elif module == 'github': - from .middleware import github - app.register_blueprint(github.github_api) - elif module == 'instagram': - from .middleware import instagram - app.register_blueprint(instagram.instagram_api) - elif module == 'keybase': - from .middleware import keybase - app.register_blueprint(keybase.keybase_api) - elif module == 'medium': - from .middleware import medium - app.register_blueprint(medium.medium_api) - elif module == 'stackoverflow': - from .middleware import stackoverflow - app.register_blueprint(stackoverflow.stackoverflow_api) def create_app(config): app = Flask(__name__) app.config.from_object(config) cache.init_app(app) modules = config.modules['modules'] app.register_blueprint(me) for module in modules.keys(): - _register_module(app, module) + blueprint = import_string(middlewares[module]) + app.register_blueprint(blueprint) return app
Improve the way that import middlewares
## Code Before: from __future__ import absolute_import, unicode_literals from flask import Flask from .middleware.me import me from .cache import cache def _register_module(app, module): if module == 'douban': from .middleware import douban app.register_blueprint(douban.douban_api) elif module == 'github': from .middleware import github app.register_blueprint(github.github_api) elif module == 'instagram': from .middleware import instagram app.register_blueprint(instagram.instagram_api) elif module == 'keybase': from .middleware import keybase app.register_blueprint(keybase.keybase_api) elif module == 'medium': from .middleware import medium app.register_blueprint(medium.medium_api) elif module == 'stackoverflow': from .middleware import stackoverflow app.register_blueprint(stackoverflow.stackoverflow_api) def create_app(config): app = Flask(__name__) app.config.from_object(config) cache.init_app(app) modules = config.modules['modules'] app.register_blueprint(me) for module in modules.keys(): _register_module(app, module) return app ## Instruction: Improve the way that import middlewares ## Code After: from __future__ import absolute_import, unicode_literals from flask import Flask from werkzeug.utils import import_string from me_api.middleware.me import me from me_api.cache import cache middlewares = { 'douban': 'me_api.middleware.douban:douban_api', 'github': 'me_api.middleware.github:github_api', 'instagram': 'me_api.middleware.instagram:instagram_api', 'keybase': 'me_api.middleware.keybase:keybase_api', 'medium': 'me_api.middleware.medium:medium_api', 'stackoverflow': 'me_api.middleware.stackoverflow:stackoverflow_api', } def create_app(config): app = Flask(__name__) app.config.from_object(config) cache.init_app(app) modules = config.modules['modules'] app.register_blueprint(me) for module in modules.keys(): blueprint = import_string(middlewares[module]) app.register_blueprint(blueprint) return app
af6f4868f4329fec75e43fe0cdcd1a7665c5238a
contentcuration/manage.py
contentcuration/manage.py
import os import sys # Attach Python Cloud Debugger if __name__ == "__main__": #import warnings #warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys if __name__ == "__main__": #import warnings #warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Remove comment on attaching cloud debugger
Remove comment on attaching cloud debugger
Python
mit
DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation
import os import sys - # Attach Python Cloud Debugger if __name__ == "__main__": #import warnings #warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Remove comment on attaching cloud debugger
## Code Before: import os import sys # Attach Python Cloud Debugger if __name__ == "__main__": #import warnings #warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) ## Instruction: Remove comment on attaching cloud debugger ## Code After: import os import sys if __name__ == "__main__": #import warnings #warnings.filterwarnings('ignore', message=r'Module .*? is being added to sys\.path', append=True) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contentcuration.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
948b9987afa95d7a69bd61f3d8f9fea822323b01
wagtaildraftail/draft_text.py
wagtaildraftail/draft_text.py
from __future__ import absolute_import, unicode_literals import json from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source def __html__(self): return self.exporter.render(json.loads(self.source))
from __future__ import absolute_import, unicode_literals import json from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source @cached_property def _html(self): return self.exporter.render(json.loads(self.source)) def __html__(self): return self._html def __eq__(self, other): return self.__html__() == other.__html__()
Implement equality check for DraftText nodes
Implement equality check for DraftText nodes Compare the (cached) rendered html of a node
Python
mit
gasman/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail
from __future__ import absolute_import, unicode_literals import json + + from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source + @cached_property - def __html__(self): + def _html(self): return self.exporter.render(json.loads(self.source)) + def __html__(self): + return self._html + + def __eq__(self, other): + return self.__html__() == other.__html__() +
Implement equality check for DraftText nodes
## Code Before: from __future__ import absolute_import, unicode_literals import json from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source def __html__(self): return self.exporter.render(json.loads(self.source)) ## Instruction: Implement equality check for DraftText nodes ## Code After: from __future__ import absolute_import, unicode_literals import json from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source @cached_property def _html(self): return self.exporter.render(json.loads(self.source)) def __html__(self): return self._html def __eq__(self, other): return self.__html__() == other.__html__()
5c851ee3d333518829ce26bfc06fd1038e70651c
corehq/util/decorators.py
corehq/util/decorators.py
from functools import wraps import logging from corehq.util.global_request import get_request from dimagi.utils.logging import notify_exception def handle_uncaught_exceptions(mail_admins=True): """Decorator to log uncaught exceptions and prevent them from bubbling up the call chain. """ def _outer(fn): @wraps(fn) def _handle_exceptions(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__) if mail_admins: notify_exception(get_request(), msg) else: logging.exception(msg) return _handle_exceptions return _outer
from functools import wraps import logging from corehq.util.global_request import get_request from dimagi.utils.logging import notify_exception class ContextDecorator(object): """ A base class that enables a context manager to also be used as a decorator. https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator """ def __call__(self, fn): @wraps(fn) def decorated(*args, **kwds): with self: return fn(*args, **kwds) return decorated def handle_uncaught_exceptions(mail_admins=True): """Decorator to log uncaught exceptions and prevent them from bubbling up the call chain. """ def _outer(fn): @wraps(fn) def _handle_exceptions(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__) if mail_admins: notify_exception(get_request(), msg) else: logging.exception(msg) return _handle_exceptions return _outer class change_log_level(ContextDecorator): """ Temporarily change the log level of a specific logger. Can be used as either a context manager or decorator. """ def __init__(self, logger, level): self.logger = logging.getLogger(logger) self.new_level = level self.original_level = self.logger.level def __enter__(self): self.logger.setLevel(self.new_level) def __exit__(self, exc_type, exc_val, exc_tb): self.logger.setLevel(self.original_level)
Add util to temporarily alter log levels
Add util to temporarily alter log levels Also backport ContextDecorator from python 3. I saw this just the other day and it looks like an awesome pattern, and a much clearer way to write decorators.
Python
bsd-3-clause
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
from functools import wraps import logging from corehq.util.global_request import get_request from dimagi.utils.logging import notify_exception + + + class ContextDecorator(object): + """ + A base class that enables a context manager to also be used as a decorator. + https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator + """ + def __call__(self, fn): + @wraps(fn) + def decorated(*args, **kwds): + with self: + return fn(*args, **kwds) + return decorated def handle_uncaught_exceptions(mail_admins=True): """Decorator to log uncaught exceptions and prevent them from bubbling up the call chain. """ def _outer(fn): @wraps(fn) def _handle_exceptions(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__) if mail_admins: notify_exception(get_request(), msg) else: logging.exception(msg) return _handle_exceptions return _outer + + class change_log_level(ContextDecorator): + """ + Temporarily change the log level of a specific logger. + Can be used as either a context manager or decorator. + """ + def __init__(self, logger, level): + self.logger = logging.getLogger(logger) + self.new_level = level + self.original_level = self.logger.level + + def __enter__(self): + self.logger.setLevel(self.new_level) + + def __exit__(self, exc_type, exc_val, exc_tb): + self.logger.setLevel(self.original_level) +
Add util to temporarily alter log levels
## Code Before: from functools import wraps import logging from corehq.util.global_request import get_request from dimagi.utils.logging import notify_exception def handle_uncaught_exceptions(mail_admins=True): """Decorator to log uncaught exceptions and prevent them from bubbling up the call chain. """ def _outer(fn): @wraps(fn) def _handle_exceptions(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__) if mail_admins: notify_exception(get_request(), msg) else: logging.exception(msg) return _handle_exceptions return _outer ## Instruction: Add util to temporarily alter log levels ## Code After: from functools import wraps import logging from corehq.util.global_request import get_request from dimagi.utils.logging import notify_exception class ContextDecorator(object): """ A base class that enables a context manager to also be used as a decorator. https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator """ def __call__(self, fn): @wraps(fn) def decorated(*args, **kwds): with self: return fn(*args, **kwds) return decorated def handle_uncaught_exceptions(mail_admins=True): """Decorator to log uncaught exceptions and prevent them from bubbling up the call chain. """ def _outer(fn): @wraps(fn) def _handle_exceptions(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: msg = "Uncaught exception from {}.{}".format(fn.__module__, fn.__name__) if mail_admins: notify_exception(get_request(), msg) else: logging.exception(msg) return _handle_exceptions return _outer class change_log_level(ContextDecorator): """ Temporarily change the log level of a specific logger. Can be used as either a context manager or decorator. """ def __init__(self, logger, level): self.logger = logging.getLogger(logger) self.new_level = level self.original_level = self.logger.level def __enter__(self): self.logger.setLevel(self.new_level) def __exit__(self, exc_type, exc_val, exc_tb): self.logger.setLevel(self.original_level)
a35d6f59d214741f554dde1363d2eac7addb04cb
crypto_enigma/__init__.py
crypto_enigma/__init__.py
"""An Enigma machine simulator with rich textual display functionality.""" from ._version import __version__, __author__ #__all__ = ['machine', 'components'] from .components import * from .machine import *
from ._version import __version__, __author__ #__all__ = ['machine', 'components'] from .components import * from .machine import *
Add limitations to package documentation
Add limitations to package documentation
Python
bsd-3-clause
orome/crypto-enigma-py
- - """An Enigma machine simulator with rich textual display functionality.""" from ._version import __version__, __author__ #__all__ = ['machine', 'components'] from .components import * from .machine import *
Add limitations to package documentation
## Code Before: """An Enigma machine simulator with rich textual display functionality.""" from ._version import __version__, __author__ #__all__ = ['machine', 'components'] from .components import * from .machine import * ## Instruction: Add limitations to package documentation ## Code After: from ._version import __version__, __author__ #__all__ = ['machine', 'components'] from .components import * from .machine import *
08291f3948108da15b9832c495fade04cf2e22c4
tests/tests.py
tests/tests.py
from selenium import webdriver import unittest class AdminPageTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_visit_admin_page(self): # Visit admin page self.browser.get('http://localhost:8000/admin') # Check page title self.assertIn('Django site admin', self.browser.title) class API_fetch_tests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_Ingredient_JSON(self): pass def test_fetch_Drink_JSON(self): pass if __name__ == '__main__': print('test') unittest.main()
from selenium import webdriver import unittest class AdminPageTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_visit_admin_page(self): # Visit admin page self.browser.get('http://localhost:8000/admin') # Check page title self.assertIn('Django site admin', self.browser.title) class API_fetch_tests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_Ingredient_JSON(self): pass def test_fetch_Drink_JSON(self): pass class ReactAppTests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_index(self): self.browser.get('http://localhost:8000/index') self.assertIn('Cocktails', self.browser.title) if __name__ == '__main__': print('test') unittest.main()
Add test to check title of index
Add test to check title of index
Python
mit
jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails,jake-jake-jake/cocktails
from selenium import webdriver import unittest class AdminPageTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_visit_admin_page(self): # Visit admin page self.browser.get('http://localhost:8000/admin') # Check page title self.assertIn('Django site admin', self.browser.title) + class API_fetch_tests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_Ingredient_JSON(self): pass def test_fetch_Drink_JSON(self): pass + class ReactAppTests(unittest.TestCase): + + def setUp(self): + self.browser = webdriver.Firefox() + + def tearDown(self): + self.browser.quit() + + def test_fetch_index(self): + self.browser.get('http://localhost:8000/index') + self.assertIn('Cocktails', self.browser.title) + if __name__ == '__main__': print('test') unittest.main()
Add test to check title of index
## Code Before: from selenium import webdriver import unittest class AdminPageTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_visit_admin_page(self): # Visit admin page self.browser.get('http://localhost:8000/admin') # Check page title self.assertIn('Django site admin', self.browser.title) class API_fetch_tests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_Ingredient_JSON(self): pass def test_fetch_Drink_JSON(self): pass if __name__ == '__main__': print('test') unittest.main() ## Instruction: Add test to check title of index ## Code After: from selenium import webdriver import unittest class AdminPageTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_visit_admin_page(self): # Visit admin page self.browser.get('http://localhost:8000/admin') # Check page title self.assertIn('Django site admin', self.browser.title) class API_fetch_tests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_Ingredient_JSON(self): pass def test_fetch_Drink_JSON(self): pass class ReactAppTests(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() def tearDown(self): self.browser.quit() def test_fetch_index(self): self.browser.get('http://localhost:8000/index') self.assertIn('Cocktails', self.browser.title) if __name__ == '__main__': print('test') unittest.main()
44520918dc0fad40f3afcfc2cdfde6f3208543cd
garden_lighting/MCP23017/raspberry.py
garden_lighting/MCP23017/raspberry.py
import time import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode # We don't need performance, don't want root and don't want to interfere with # other wiringpi instances -> sysfspy wiringpi.wiringPiSetupSys() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
import time import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' # Set pin numbering mode # wiringPiSetupSys() did not work because pins were low after booting and running the write commands # This requires root! wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
Python
mit
ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting,ammannbros/garden-lighting
import time - import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' - os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode - # We don't need performance, don't want root and don't want to interfere with - # other wiringpi instances -> sysfspy + # wiringPiSetupSys() did not work because pins were low after booting and running the write commands + # This requires root! - wiringpi.wiringPiSetupSys() + wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot.
## Code Before: import time import os import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' os.system("gpio export " + str(self.RstPin) + " out") # Set pin numbering mode # We don't need performance, don't want root and don't want to interfere with # other wiringpi instances -> sysfspy wiringpi.wiringPiSetupSys() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH) ## Instruction: Use wiringPiSetupGpio, which required root. With wiringPiSetupSys some gpios stayed on low after boot. ## Code After: import time import wiringpi from garden_lighting.MCP23017.MCP23017 import MCP23017 class RaspberryMCP23017(MCP23017): def __init__(self, dev_addr, rst_pin=0xFF, i2cport=1): super().__init__(dev_addr, rst_pin, i2cport) def initDevice(self): ''' Does a reset to put all registers in initial state ''' # Set pin numbering mode # wiringPiSetupSys() did not work because pins were low after booting and running the write commands # This requires root! wiringpi.wiringPiSetupGpio() # Define the reset pin as output wiringpi.pinMode(self.RstPin, wiringpi.GPIO.OUTPUT) # Create a reset impulse wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.LOW) # wait for 50 ms time.sleep(.050) wiringpi.digitalWrite(self.RstPin, wiringpi.GPIO.HIGH)
e30e5e9780cfe674a70856609ad6010056936263
picdump/webadapter.py
picdump/webadapter.py
import urllib.request class WebAdapter: def get(self, urllike): url = self.mk_url(urllike) try: res = urllib.request.urlopen(url) return res.read() except Exception as e: raise e def open(self, urllike): url = self.mk_url(urllike) try: return urllib.request.urlopen(url) except Exception as e: raise e def mk_url(self, urllike): return str(urllike)
import requests class WebAdapter: def __init__(self): self.cookies = {} def get(self, urllike): res = requests.get(str(urllike), cookies=self.cookies) self.cookies = res.cookies return res.text
Use requests instead of urllib.request
Use requests instead of urllib.request
Python
mit
kanosaki/PicDump,kanosaki/PicDump
- import urllib.request + import requests class WebAdapter: + def __init__(self): + self.cookies = {} + def get(self, urllike): + res = requests.get(str(urllike), cookies=self.cookies) + self.cookies = res.cookies - url = self.mk_url(urllike) - try: - res = urllib.request.urlopen(url) - return res.read() + return res.text - except Exception as e: - raise e - def open(self, urllike): - url = self.mk_url(urllike) - try: - return urllib.request.urlopen(url) - except Exception as e: - raise e - - def mk_url(self, urllike): - return str(urllike) -
Use requests instead of urllib.request
## Code Before: import urllib.request class WebAdapter: def get(self, urllike): url = self.mk_url(urllike) try: res = urllib.request.urlopen(url) return res.read() except Exception as e: raise e def open(self, urllike): url = self.mk_url(urllike) try: return urllib.request.urlopen(url) except Exception as e: raise e def mk_url(self, urllike): return str(urllike) ## Instruction: Use requests instead of urllib.request ## Code After: import requests class WebAdapter: def __init__(self): self.cookies = {} def get(self, urllike): res = requests.get(str(urllike), cookies=self.cookies) self.cookies = res.cookies return res.text
b38f465e512f9b7e79935c156c60ef56d6122387
aiohttp_middlewares/constants.py
aiohttp_middlewares/constants.py
#: Set of idempotent HTTP methods IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'}) #: Set of non-idempotent HTTP methods NON_IDEMPOTENT_METHODS = frozenset({'POST', 'PUT', 'PATCH', 'DELETE'})
#: Set of idempotent HTTP methods IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'}) #: Set of non-idempotent HTTP methods NON_IDEMPOTENT_METHODS = frozenset({'DELETE', 'PATCH', 'POST', 'PUT'})
Order HTTP methods in constant.
chore: Order HTTP methods in constant.
Python
bsd-3-clause
playpauseandstop/aiohttp-middlewares,playpauseandstop/aiohttp-middlewares
#: Set of idempotent HTTP methods IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'}) #: Set of non-idempotent HTTP methods - NON_IDEMPOTENT_METHODS = frozenset({'POST', 'PUT', 'PATCH', 'DELETE'}) + NON_IDEMPOTENT_METHODS = frozenset({'DELETE', 'PATCH', 'POST', 'PUT'})
Order HTTP methods in constant.
## Code Before: #: Set of idempotent HTTP methods IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'}) #: Set of non-idempotent HTTP methods NON_IDEMPOTENT_METHODS = frozenset({'POST', 'PUT', 'PATCH', 'DELETE'}) ## Instruction: Order HTTP methods in constant. ## Code After: #: Set of idempotent HTTP methods IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'}) #: Set of non-idempotent HTTP methods NON_IDEMPOTENT_METHODS = frozenset({'DELETE', 'PATCH', 'POST', 'PUT'})
fbb2c05aef76c02094c13f5edeaecd9b7428ff11
alignak_backend/models/uipref.py
alignak_backend/models/uipref.py
def get_name(): """ Get name of this resource :return: name of this resource :rtype: str """ return 'uipref' def get_schema(): """ Schema structure of this resource :return: schema dictionnary :rtype: dict """ return { 'allow_unknown': True, 'schema': { 'type': { 'type': 'string', 'ui': { 'title': "Preference's type", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'user': { 'type': 'string', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'data': { 'type': 'list', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': [] }, } }
def get_name(): """ Get name of this resource :return: name of this resource :rtype: str """ return 'uipref' def get_schema(): """ Schema structure of this resource :return: schema dictionnary :rtype: dict """ return { 'allow_unknown': True, 'schema': { 'type': { 'type': 'string', 'ui': { 'title': "Preference's type", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'user': { 'type': 'string', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'data': { 'type': 'dict', 'ui': { 'title': "Preference's dictionary", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': [] }, } }
Update UI preferences model (dict)
Update UI preferences model (dict)
Python
agpl-3.0
Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend
def get_name(): """ Get name of this resource :return: name of this resource :rtype: str """ return 'uipref' def get_schema(): """ Schema structure of this resource :return: schema dictionnary :rtype: dict """ return { 'allow_unknown': True, 'schema': { 'type': { 'type': 'string', 'ui': { 'title': "Preference's type", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'user': { 'type': 'string', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'data': { - 'type': 'list', + 'type': 'dict', 'ui': { - 'title': "User name", + 'title': "Preference's dictionary", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': [] }, } }
Update UI preferences model (dict)
## Code Before: def get_name(): """ Get name of this resource :return: name of this resource :rtype: str """ return 'uipref' def get_schema(): """ Schema structure of this resource :return: schema dictionnary :rtype: dict """ return { 'allow_unknown': True, 'schema': { 'type': { 'type': 'string', 'ui': { 'title': "Preference's type", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'user': { 'type': 'string', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'data': { 'type': 'list', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': [] }, } } ## Instruction: Update UI preferences model (dict) ## Code After: def get_name(): """ Get name of this resource :return: name of this resource :rtype: str """ return 'uipref' def get_schema(): """ Schema structure of this resource :return: schema dictionnary :rtype: dict """ return { 'allow_unknown': True, 'schema': { 'type': { 'type': 'string', 'ui': { 'title': "Preference's type", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'user': { 'type': 'string', 'ui': { 'title': "User name", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': '' }, 'data': { 'type': 'dict', 'ui': { 'title': "Preference's dictionary", 'visible': True, 'orderable': True, 'searchable': True, "format": None }, 'default': [] }, } }
53b9eff3ffc1768d3503021e7248351e24d59af7
tests/httpd.py
tests/httpd.py
import SimpleHTTPServer import BaseHTTPServer class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(s): s.send_response(200) s.end_headers() if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
import BaseHTTPServer class Handler(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): content_type = self.headers.getheader('content-type') content_length = int(self.headers.getheader('content-length')) self.send_response(200) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(content_length)) self.end_headers() self.wfile.write(self.rfile.read(content_length)) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
Fix test http server, change to echo back request body
Fix test http server, change to echo back request body
Python
bsd-2-clause
chop-dbhi/django-webhooks,pombredanne/django-webhooks,pombredanne/django-webhooks,chop-dbhi/django-webhooks
- import SimpleHTTPServer import BaseHTTPServer - class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): + class Handler(BaseHTTPServer.BaseHTTPRequestHandler): - def do_POST(s): + def do_POST(self): + content_type = self.headers.getheader('content-type') + content_length = int(self.headers.getheader('content-length')) - s.send_response(200) + self.send_response(200) + self.send_header('Content-Type', content_type) + self.send_header('Content-Length', str(content_length)) - s.end_headers() + self.end_headers() + self.wfile.write(self.rfile.read(content_length)) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
Fix test http server, change to echo back request body
## Code Before: import SimpleHTTPServer import BaseHTTPServer class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(s): s.send_response(200) s.end_headers() if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close() ## Instruction: Fix test http server, change to echo back request body ## Code After: import BaseHTTPServer class Handler(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): content_type = self.headers.getheader('content-type') content_length = int(self.headers.getheader('content-length')) self.send_response(200) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(content_length)) self.end_headers() self.wfile.write(self.rfile.read(content_length)) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
b9143462c004af7d18a66fa92ad94585468751b9
IndexedRedis/fields/classic.py
IndexedRedis/fields/classic.py
from . import IRField, IR_NULL_STRINGS, irNull from ..compat_str import tobytes class IRClassicField(IRField): ''' IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields. This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding) and have a default value of empty string. ''' CAN_INDEX = True def __init__(self, name='', hashIndex=False): IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='') def __new__(self, name='', hashIndex=False): return IRField.__new__(self, name) # vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
from . import IRField, IR_NULL_STRINGS, irNull from ..compat_str import tobytes, encoded_str_type class IRClassicField(IRField): ''' IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields. This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding) and have a default value of empty string. ''' CAN_INDEX = True def __init__(self, name='', hashIndex=False): IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='') def __new__(self, name='', hashIndex=False): return IRField.__new__(self, name) # vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
Change IRFieldClassic to use 'encoded_str_type'
Change IRFieldClassic to use 'encoded_str_type'
Python
lgpl-2.1
kata198/indexedredis,kata198/indexedredis
from . import IRField, IR_NULL_STRINGS, irNull - from ..compat_str import tobytes + from ..compat_str import tobytes, encoded_str_type class IRClassicField(IRField): ''' IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields. This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding) and have a default value of empty string. ''' CAN_INDEX = True def __init__(self, name='', hashIndex=False): - IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='') + IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='') def __new__(self, name='', hashIndex=False): return IRField.__new__(self, name) # vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
Change IRFieldClassic to use 'encoded_str_type'
## Code Before: from . import IRField, IR_NULL_STRINGS, irNull from ..compat_str import tobytes class IRClassicField(IRField): ''' IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields. This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding) and have a default value of empty string. ''' CAN_INDEX = True def __init__(self, name='', hashIndex=False): IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='') def __new__(self, name='', hashIndex=False): return IRField.__new__(self, name) # vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab : ## Instruction: Change IRFieldClassic to use 'encoded_str_type' ## Code After: from . import IRField, IR_NULL_STRINGS, irNull from ..compat_str import tobytes, encoded_str_type class IRClassicField(IRField): ''' IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields. This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding) and have a default value of empty string. ''' CAN_INDEX = True def __init__(self, name='', hashIndex=False): IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='') def __new__(self, name='', hashIndex=False): return IRField.__new__(self, name) # vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
effa5f84fc93ced38ad9e5d3b0a16bea2d3914ef
caminae/common/templatetags/field_verbose_name.py
caminae/common/templatetags/field_verbose_name.py
from django import template register = template.Library() def field_verbose_name(obj, field): """Usage: {{ object|get_object_field }}""" return obj._meta.get_field(field).verbose_name register.filter(field_verbose_name) register.filter('verbose', field_verbose_name)
from django import template from django.db.models.fields.related import FieldDoesNotExist register = template.Library() def field_verbose_name(obj, field): """Usage: {{ object|get_object_field }}""" try: return obj._meta.get_field(field).verbose_name except FieldDoesNotExist: a = getattr(obj, '%s_verbose_name' % field) if a is None: raise return unicode(a) register.filter(field_verbose_name) register.filter('verbose', field_verbose_name)
Allow column to be a property
Allow column to be a property
Python
bsd-2-clause
makinacorpus/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,johan--/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,camillemonchicourt/Geotrek
from django import template + from django.db.models.fields.related import FieldDoesNotExist register = template.Library() def field_verbose_name(obj, field): """Usage: {{ object|get_object_field }}""" - + try: - return obj._meta.get_field(field).verbose_name + return obj._meta.get_field(field).verbose_name + except FieldDoesNotExist: + a = getattr(obj, '%s_verbose_name' % field) + if a is None: + raise + return unicode(a) register.filter(field_verbose_name) register.filter('verbose', field_verbose_name)
Allow column to be a property
## Code Before: from django import template register = template.Library() def field_verbose_name(obj, field): """Usage: {{ object|get_object_field }}""" return obj._meta.get_field(field).verbose_name register.filter(field_verbose_name) register.filter('verbose', field_verbose_name) ## Instruction: Allow column to be a property ## Code After: from django import template from django.db.models.fields.related import FieldDoesNotExist register = template.Library() def field_verbose_name(obj, field): """Usage: {{ object|get_object_field }}""" try: return obj._meta.get_field(field).verbose_name except FieldDoesNotExist: a = getattr(obj, '%s_verbose_name' % field) if a is None: raise return unicode(a) register.filter(field_verbose_name) register.filter('verbose', field_verbose_name)
ca2b02d551e9bb4c8625ae79f7878892673fa731
corehq/apps/es/domains.py
corehq/apps/es/domains.py
from .es_query import HQESQuery from . import filters class DomainES(HQESQuery): index = 'domains' @property def builtin_filters(self): return [ real_domains, commconnect_domains, created, ] + super(DomainES, self).builtin_filters def real_domains(): return filters.term("is_test", False) def commconnect_domains(): return filters.term("commconnect_enabled", True) def created(gt=None, gte=None, lt=None, lte=None): return filters.date_range('date_created', gt, gte, lt, lte)
from .es_query import HQESQuery from . import filters class DomainES(HQESQuery): index = 'domains' @property def builtin_filters(self): return [ real_domains, commcare_domains, commconnect_domains, commtrack_domains, created, ] + super(DomainES, self).builtin_filters def real_domains(): return filters.term("is_test", False) def commcare_domains(): return filters.AND(filters.term("commconnect_enabled", False), filters.term("commtrack_enabled", False)) def commconnect_domains(): return filters.term("commconnect_enabled", True) def commtrack_domains(): return filters.term("commtrack_enabled", True) def created(gt=None, gte=None, lt=None, lte=None): return filters.date_range('date_created', gt, gte, lt, lte)
Add CommCare, CommTrack filters for DomainES
Add CommCare, CommTrack filters for DomainES
Python
bsd-3-clause
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq
from .es_query import HQESQuery from . import filters class DomainES(HQESQuery): index = 'domains' @property def builtin_filters(self): return [ real_domains, + commcare_domains, commconnect_domains, + commtrack_domains, created, ] + super(DomainES, self).builtin_filters def real_domains(): return filters.term("is_test", False) + def commcare_domains(): + return filters.AND(filters.term("commconnect_enabled", False), + filters.term("commtrack_enabled", False)) + + def commconnect_domains(): return filters.term("commconnect_enabled", True) + + + def commtrack_domains(): + return filters.term("commtrack_enabled", True) def created(gt=None, gte=None, lt=None, lte=None): return filters.date_range('date_created', gt, gte, lt, lte)
Add CommCare, CommTrack filters for DomainES
## Code Before: from .es_query import HQESQuery from . import filters class DomainES(HQESQuery): index = 'domains' @property def builtin_filters(self): return [ real_domains, commconnect_domains, created, ] + super(DomainES, self).builtin_filters def real_domains(): return filters.term("is_test", False) def commconnect_domains(): return filters.term("commconnect_enabled", True) def created(gt=None, gte=None, lt=None, lte=None): return filters.date_range('date_created', gt, gte, lt, lte) ## Instruction: Add CommCare, CommTrack filters for DomainES ## Code After: from .es_query import HQESQuery from . import filters class DomainES(HQESQuery): index = 'domains' @property def builtin_filters(self): return [ real_domains, commcare_domains, commconnect_domains, commtrack_domains, created, ] + super(DomainES, self).builtin_filters def real_domains(): return filters.term("is_test", False) def commcare_domains(): return filters.AND(filters.term("commconnect_enabled", False), filters.term("commtrack_enabled", False)) def commconnect_domains(): return filters.term("commconnect_enabled", True) def commtrack_domains(): return filters.term("commtrack_enabled", True) def created(gt=None, gte=None, lt=None, lte=None): return filters.date_range('date_created', gt, gte, lt, lte)
91ff0fcb40d5d5318b71f0eb4b0873fb470265a0
migrations/versions/f0c9c797c230_populate_application_settings_with_.py
migrations/versions/f0c9c797c230_populate_application_settings_with_.py
# revision identifiers, used by Alembic. revision = 'f0c9c797c230' down_revision = '31850461ed3' from alembic import op import sqlalchemy as sa from puffin.core import docker, applications def upgrade(): running_applications = docker.get_all_running_applications() for running_application in running_applications: user = running_application[0] application = running_application[1] applications.set_application_started(user, application, True) def downgrade(): pass
# revision identifiers, used by Alembic. revision = 'f0c9c797c230' down_revision = '31850461ed3' from alembic import op import sqlalchemy as sa from puffin.core import docker, applications def upgrade(): running_applications = docker.get_all_running_applications() for a in running_applications: user = a[0] application = a[1] applications.set_application_started(user, application, True) def downgrade(): started_applications = applications.get_all_started_applications() for a in started_applications: user = a[0] application = a[1] applications.set_application_started(user, application, False)
Add downgrade started applications migration
Add downgrade started applications migration
Python
agpl-3.0
loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin,loomchild/jenca-puffin,loomchild/puffin
# revision identifiers, used by Alembic. revision = 'f0c9c797c230' down_revision = '31850461ed3' from alembic import op import sqlalchemy as sa from puffin.core import docker, applications def upgrade(): running_applications = docker.get_all_running_applications() - for running_application in running_applications: - user = running_application[0] - application = running_application[1] + for a in running_applications: + user = a[0] + application = a[1] applications.set_application_started(user, application, True) def downgrade(): - pass + started_applications = applications.get_all_started_applications() + for a in started_applications: + user = a[0] + application = a[1] + applications.set_application_started(user, application, False)
Add downgrade started applications migration
## Code Before: # revision identifiers, used by Alembic. revision = 'f0c9c797c230' down_revision = '31850461ed3' from alembic import op import sqlalchemy as sa from puffin.core import docker, applications def upgrade(): running_applications = docker.get_all_running_applications() for running_application in running_applications: user = running_application[0] application = running_application[1] applications.set_application_started(user, application, True) def downgrade(): pass ## Instruction: Add downgrade started applications migration ## Code After: # revision identifiers, used by Alembic. revision = 'f0c9c797c230' down_revision = '31850461ed3' from alembic import op import sqlalchemy as sa from puffin.core import docker, applications def upgrade(): running_applications = docker.get_all_running_applications() for a in running_applications: user = a[0] application = a[1] applications.set_application_started(user, application, True) def downgrade(): started_applications = applications.get_all_started_applications() for a in started_applications: user = a[0] application = a[1] applications.set_application_started(user, application, False)
50ead4fe13eec7ad9760f0f577212beb8e8a51be
pombola/info/views.py
pombola/info/views.py
from django.views.generic import DetailView from models import InfoPage class InfoPageView(DetailView): """Show the page, or 'index' if no slug""" model = InfoPage
from django.views.generic import DetailView from models import InfoPage class InfoPageView(DetailView): """Show the page for the given slug""" model = InfoPage queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
Use a queryset to display only kind=page
Use a queryset to display only kind=page
Python
agpl-3.0
mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola
from django.views.generic import DetailView from models import InfoPage class InfoPageView(DetailView): - """Show the page, or 'index' if no slug""" + """Show the page for the given slug""" model = InfoPage + queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
Use a queryset to display only kind=page
## Code Before: from django.views.generic import DetailView from models import InfoPage class InfoPageView(DetailView): """Show the page, or 'index' if no slug""" model = InfoPage ## Instruction: Use a queryset to display only kind=page ## Code After: from django.views.generic import DetailView from models import InfoPage class InfoPageView(DetailView): """Show the page for the given slug""" model = InfoPage queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
411decbdb193b28bb3060e02e81bfa29483e85a9
staticgen_demo/blog/staticgen_views.py
staticgen_demo/blog/staticgen_views.py
from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView from .models import Post class BlogPostListView(StaticgenView): is_paginated = True i18n = True def items(self): return ('blog:posts_list', ) def _get_paginator(self, url): response = self.client.get(url) print 'status_code: %s' % response.status_code if not response.status_code == 200: pass else: context = {} if hasattr(response, 'context_data'): context = response.context_data elif hasattr(response, 'context'): context = response.context print context try: return context['paginator'], context['is_paginated'] except KeyError: pass return None, False class BlogPostDetailView(StaticgenView): i18n = True def items(self): return Post.objects.all() staticgen_pool.register(BlogPostListView) staticgen_pool.register(BlogPostDetailView)
from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView from .models import Post class BlogPostListView(StaticgenView): is_paginated = True i18n = True def items(self): return ('blog:posts_list', ) class BlogPostDetailView(StaticgenView): i18n = True def items(self): return Post.objects.all() staticgen_pool.register(BlogPostListView) staticgen_pool.register(BlogPostDetailView)
Remove debug code from staticgen views.
Remove debug code from staticgen views.
Python
bsd-3-clause
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView from .models import Post class BlogPostListView(StaticgenView): is_paginated = True i18n = True def items(self): return ('blog:posts_list', ) - def _get_paginator(self, url): - response = self.client.get(url) - print 'status_code: %s' % response.status_code - if not response.status_code == 200: - pass - else: - context = {} - if hasattr(response, 'context_data'): - context = response.context_data - elif hasattr(response, 'context'): - context = response.context - - print context - try: - return context['paginator'], context['is_paginated'] - except KeyError: - pass - return None, False - class BlogPostDetailView(StaticgenView): i18n = True def items(self): return Post.objects.all() staticgen_pool.register(BlogPostListView) staticgen_pool.register(BlogPostDetailView)
Remove debug code from staticgen views.
## Code Before: from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView from .models import Post class BlogPostListView(StaticgenView): is_paginated = True i18n = True def items(self): return ('blog:posts_list', ) def _get_paginator(self, url): response = self.client.get(url) print 'status_code: %s' % response.status_code if not response.status_code == 200: pass else: context = {} if hasattr(response, 'context_data'): context = response.context_data elif hasattr(response, 'context'): context = response.context print context try: return context['paginator'], context['is_paginated'] except KeyError: pass return None, False class BlogPostDetailView(StaticgenView): i18n = True def items(self): return Post.objects.all() staticgen_pool.register(BlogPostListView) staticgen_pool.register(BlogPostDetailView) ## Instruction: Remove debug code from staticgen views. ## Code After: from __future__ import unicode_literals from staticgen.staticgen_pool import staticgen_pool from staticgen.staticgen_views import StaticgenView from .models import Post class BlogPostListView(StaticgenView): is_paginated = True i18n = True def items(self): return ('blog:posts_list', ) class BlogPostDetailView(StaticgenView): i18n = True def items(self): return Post.objects.all() staticgen_pool.register(BlogPostListView) staticgen_pool.register(BlogPostDetailView)
673d6cecfaeb0e919f30997f793ee2bb18e399ee
tempest/api_schema/response/compute/v2/hypervisors.py
tempest/api_schema/response/compute/v2/hypervisors.py
import copy from tempest.api_schema.response.compute import hypervisors hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail) # Defining extra attributes for V3 show hypervisor schema hypervisors_servers['response_body']['properties']['hypervisors']['items'][ 'properties']['servers'] = { 'type': 'array', 'items': { 'type': 'object', 'properties': { # NOTE: Now the type of 'id' is integer, # but here allows 'string' also because we # will be able to change it to 'uuid' in # the future. 'id': {'type': ['integer', 'string']}, 'name': {'type': 'string'} } } } # In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers' # attribute will not be present in response body So it is not 'required'.
import copy from tempest.api_schema.response.compute import hypervisors hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail) # Defining extra attributes for V3 show hypervisor schema hypervisors_servers['response_body']['properties']['hypervisors']['items'][ 'properties']['servers'] = { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'uuid': {'type': 'string'}, 'name': {'type': 'string'} } } } # In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers' # attribute will not be present in response body So it is not 'required'.
Fix V2 hypervisor server schema attribute
Fix V2 hypervisor server schema attribute Nova v2 hypervisor server API return attribute "uuid" in response's server dict. Current response schema does not have this attribute instead it contain "id" which is wrong. This patch fix the above issue. NOTE- "uuid" attribute in this API response is always a uuid. Change-Id: I78c67834de930012b70874938f345524d69264ba
Python
apache-2.0
jaspreetw/tempest,openstack/tempest,Vaidyanath/tempest,vedujoshi/tempest,NexusIS/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,tonyli71/tempest,hayderimran7/tempest,xbezdick/tempest,akash1808/tempest,roopali8/tempest,tudorvio/tempest,alinbalutoiu/tempest,flyingfish007/tempest,manasi24/jiocloud-tempest-qatempest,flyingfish007/tempest,izadorozhna/tempest,afaheem88/tempest_neutron,queria/my-tempest,pczerkas/tempest,afaheem88/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,yamt/tempest,sebrandon1/tempest,bigswitch/tempest,masayukig/tempest,Tesora/tesora-tempest,manasi24/jiocloud-tempest-qatempest,hpcloud-mon/tempest,bigswitch/tempest,ebagdasa/tempest,openstack/tempest,neerja28/Tempest,izadorozhna/tempest,Tesora/tesora-tempest,NexusIS/tempest,jamielennox/tempest,eggmaster/tempest,roopali8/tempest,rzarzynski/tempest,yamt/tempest,queria/my-tempest,rzarzynski/tempest,vedujoshi/tempest,manasi24/tempest,redhat-cip/tempest,Juniper/tempest,varunarya10/tempest,redhat-cip/tempest,hpcloud-mon/tempest,rakeshmi/tempest,masayukig/tempest,JioCloud/tempest,Juniper/tempest,Juraci/tempest,cisco-openstack/tempest,dkalashnik/tempest,LIS/lis-tempest,rakeshmi/tempest,CiscoSystems/tempest,dkalashnik/tempest,nunogt/tempest,Lilywei123/tempest,tudorvio/tempest,tonyli71/tempest,pandeyop/tempest,danielmellado/tempest,neerja28/Tempest,Juraci/tempest,LIS/lis-tempest,JioCloud/tempest,danielmellado/tempest,zsoltdudas/lis-tempest,pczerkas/tempest,zsoltdudas/lis-tempest,eggmaster/tempest,manasi24/tempest,jamielennox/tempest,sebrandon1/tempest,afaheem88/tempest,varunarya10/tempest,afaheem88/tempest_neutron,Lilywei123/tempest,cisco-openstack/tempest,nunogt/tempest,pandeyop/tempest,hayderimran7/tempest,Vaidyanath/tempest,alinbalutoiu/tempest,ebagdasa/tempest,akash1808/tempest,xbezdick/tempest,jaspreetw/tempest,CiscoSystems/tempest
import copy from tempest.api_schema.response.compute import hypervisors hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail) # Defining extra attributes for V3 show hypervisor schema hypervisors_servers['response_body']['properties']['hypervisors']['items'][ 'properties']['servers'] = { 'type': 'array', 'items': { 'type': 'object', 'properties': { - # NOTE: Now the type of 'id' is integer, - # but here allows 'string' also because we - # will be able to change it to 'uuid' in - # the future. - 'id': {'type': ['integer', 'string']}, + 'uuid': {'type': 'string'}, 'name': {'type': 'string'} } } } # In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers' # attribute will not be present in response body So it is not 'required'.
Fix V2 hypervisor server schema attribute
## Code Before: import copy from tempest.api_schema.response.compute import hypervisors hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail) # Defining extra attributes for V3 show hypervisor schema hypervisors_servers['response_body']['properties']['hypervisors']['items'][ 'properties']['servers'] = { 'type': 'array', 'items': { 'type': 'object', 'properties': { # NOTE: Now the type of 'id' is integer, # but here allows 'string' also because we # will be able to change it to 'uuid' in # the future. 'id': {'type': ['integer', 'string']}, 'name': {'type': 'string'} } } } # In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers' # attribute will not be present in response body So it is not 'required'. ## Instruction: Fix V2 hypervisor server schema attribute ## Code After: import copy from tempest.api_schema.response.compute import hypervisors hypervisors_servers = copy.deepcopy(hypervisors.common_hypervisors_detail) # Defining extra attributes for V3 show hypervisor schema hypervisors_servers['response_body']['properties']['hypervisors']['items'][ 'properties']['servers'] = { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'uuid': {'type': 'string'}, 'name': {'type': 'string'} } } } # In V2 API, if there is no servers (VM) on the Hypervisor host then 'servers' # attribute will not be present in response body So it is not 'required'.