input
stringlengths 0
2.17k
| instruction
stringlengths 18
2.94k
| output
stringlengths 47
3.36k
|
---|---|---|
from django import forms
from .models import Department, Province, District
class DepartmentForm(forms.Form):
department = forms.ModelChoiceField(
queryset=Department.objects
)
class ProvinceForm(DepartmentForm):
province = forms.ModelChoiceField(
queryset=Province.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
department = self._get_field_value('department')
if department:
self.fields['province'].queryset = Province.objects.filter(
parent=department
)
def _get_field_value(self, name):
field = self.fields[name]
value = field.widget.value_from_datadict(
self.data,
self.files,
self.add_prefix(name)
)
try:
return field.clean(value)
except:
return None
class DistrictForm(ProvinceForm):
district = forms.ModelChoiceField(
queryset=District.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
province = self._get_field_value('province')
if province:
self.fields['district'].queryset = District.objects.filter(
parent=province
)
UbigeoForm = DistrictForm
| Add "all" to the queryset in DepartmentForm
| from django import forms
from .models import Department, Province, District
class DepartmentForm(forms.Form):
department = forms.ModelChoiceField(
queryset=Department.objects.all()
)
class ProvinceForm(DepartmentForm):
province = forms.ModelChoiceField(
queryset=Province.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
department = self._get_field_value('department')
if department:
self.fields['province'].queryset = Province.objects.filter(
parent=department
)
def _get_field_value(self, name):
field = self.fields[name]
value = field.widget.value_from_datadict(
self.data,
self.files,
self.add_prefix(name)
)
try:
return field.clean(value)
except:
return None
class DistrictForm(ProvinceForm):
district = forms.ModelChoiceField(
queryset=District.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
province = self._get_field_value('province')
if province:
self.fields['district'].queryset = District.objects.filter(
parent=province
)
UbigeoForm = DistrictForm
|
from werkzeug import routing
from werkzeug import serving
from werkzeug import wrappers
from . import routes
class Steinie(routes.Router):
def __init__(self, host="127.0.0.1", port=5151, debug=False):
self.host = host
self.port = port
self.debug = debug
super(Steinie, self).__init__()
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
request = wrappers.Request(environ)
response = self.handle(request)
return wrappers.Response(response)(environ, start_response)
def run(self):
serving.run_simple(self.host, self.port, self, use_debugger=self.debug)
def use(self, route, router):
# if not route.endswith('/'):
# route += '/'
if route.startswith('/'):
route = route[1:]
submount = route
if not submount.startswith('/'):
submount = '/' + submount
rules = [a for a in router.map.iter_rules()]
mount = routing.EndpointPrefix(route, [routes.Submount(submount, rules)])
self.map.add(mount)
# import ipdb; ipdb.set_trace()
| Remove some commented out code
| from werkzeug import routing
from werkzeug import serving
from werkzeug import wrappers
from . import routes
class Steinie(routes.Router):
def __init__(self, host="127.0.0.1", port=5151, debug=False):
self.host = host
self.port = port
self.debug = debug
super(Steinie, self).__init__()
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
request = wrappers.Request(environ)
response = self.handle(request)
return wrappers.Response(response)(environ, start_response)
def run(self):
serving.run_simple(self.host, self.port, self, use_debugger=self.debug)
def use(self, route, router):
if route.startswith('/'):
route = route[1:]
submount = route
if not submount.startswith('/'):
submount = '/' + submount
rules = [a for a in router.map.iter_rules()]
mount = routing.EndpointPrefix(route, [routes.Submount(submount, rules)])
self.map.add(mount)
|
# -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
@pytest.mark.skip
def test_hmap(items):
"""docstring for test"""
h = HashMap(10)
if len(items):
with pytest.raises(KeyError):
h[items[0]]
for item in items:
h.add(item)
assert len(h) == len(items)
for i, item in enumerate(items):
assert h[item] == i
assert repr(h) == "HashMap({0!r})".format(items)
if len(items):
item = items[0]
del h[item]
assert len(h) == len(items) - 1
assert item not in h
| Remove unused tests for hash map
| # -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
|
# Copyright (c) 2013 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.models import HyObject
import sys
if sys.version_info[0] >= 3:
str_type = str
else:
str_type = unicode
class HyString(HyObject, str_type):
"""
Generic Hy String object. Helpful to store string literals from Hy
scripts. It's either a ``str`` or a ``unicode``, depending on the
Python version.
"""
def __new__(cls, value):
obj = str_type.__new__(cls, value)
return obj
| Revert "Revert "Remove useless code""
This reverts commit 262da59c7790cdadd60ea9612bc9e3c1616863fd.
Conflicts:
hy/models/string.py
| # Copyright (c) 2013 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.models import HyObject
import sys
if sys.version_info[0] >= 3:
str_type = str
else:
str_type = unicode
class HyString(HyObject, str_type):
"""
Generic Hy String object. Helpful to store string literals from Hy
scripts. It's either a ``str`` or a ``unicode``, depending on the
Python version.
"""
pass
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
| Fix flake8 line length issue
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>
| from setuptools import setup, find_packages
jsonpatch_uri \
= 'jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip'
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': [
jsonpatch_uri, 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
import unittest
from calexicon.internal.julian import distant_julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
| Add a test for julian_to_gregorian.
| import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
|
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from general_mapper import General1_1Mapper
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
| Debug in importing deleted module.
|
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
|
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
if __name__ == "__main__":
build_modules()
| Update build script to work correctly on macOS and linux.
| import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
from voxel_native.scripts.common import is_macos, is_windows, is_linux
if is_windows():
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
elif is_macos() or is_linux():
shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
|
from uuid import uuid1
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
def get_auth_address(request, redirect_to, scope=''):
state = unicode(uuid1())
request.session['state'] = state
return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (
settings.FACEBOOK_APP_ID, redirect_to, scope, state
)
| Revert "Add support for server side authentication."
This reverts commit 10ae930f6f14c2840d0b87cbec17054b4cc318d2.
Change-Id: Ied52c31f6f28ad635a6e5dae2171df22dc91e42c
Reviewed-on: http://review.pozytywnie.pl:8080/5153
Reviewed-by: Tomasz Wysocki <f17c2dc043305c950a029074ababd304e0373387@gmail.com>
Tested-by: Tomasz Wysocki <f17c2dc043305c950a029074ababd304e0373387@gmail.com>
| from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
|
# coding=utf-8
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber, keepdb)
def _destroy_test_db(self, test_database_name, verbosity, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity, keepdb)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
| Fix DatabaseCreation from django 1.7
| # coding=utf-8
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
|
# coding: utf-8
"""
righteous.config
Settings object, lifted from https://github.com/kennethreitz/requests
"""
from requests.config import Settings
class RighteousSettings(Settings):
pass
settings = RighteousSettings()
settings.debug = False
settings.cookies = None
settings.username = None
settings.password = None
settings.account_id = None
| Copy the settings class from an old requests version
| # coding: utf-8
"""
righteous.config
Settings object, lifted from https://github.com/kennethreitz/requests
"""
class Settings(object):
_singleton = {}
# attributes with defaults
__attrs__ = []
def __init__(self, **kwargs):
super(Settings, self).__init__()
self.__dict__ = self._singleton
def __call__(self, *args, **kwargs):
# new instance of class to call
r = self.__class__()
# cache previous settings for __exit__
r.__cache = self.__dict__.copy()
map(self.__cache.setdefault, self.__attrs__)
# set new settings
self.__dict__.update(*args, **kwargs)
return r
def __enter__(self):
pass
def __exit__(self, *args):
# restore cached copy
self.__dict__.update(self.__cache.copy())
del self.__cache
def __getattribute__(self, key):
if key in object.__getattribute__(self, '__attrs__'):
try:
return object.__getattribute__(self, key)
except AttributeError:
return None
return object.__getattribute__(self, key)
settings = Settings()
settings.debug = False
settings.cookies = None
settings.username = None
settings.password = None
settings.account_id = None
|
# Needed to allow import
#
# Copyright (C) 2006 British Broadcasting Corporation and Kamaelia Contributors(1)
# All Rights Reserved.
#
# You may only modify and redistribute this under the terms of any of the
# following licenses(2): Mozilla Public License, V1.1, GNU General
# Public License, V2.0, GNU Lesser General Public License, V2.1
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://kamaelia.sourceforge.net/AUTHORS - please extend this file,
# not this notice.
# (2) Reproduced in the COPYING file, and at:
# http://kamaelia.sourceforge.net/COPYING
# Under section 3.5 of the MPL, we are using this text since we deem the MPL
# notice inappropriate for this file. As per MPL/GPL/LGPL removal of this
# notice is prohibited.
#
# Please contact us via: kamaelia-list-owner@lists.sourceforge.net
# to discuss alternative licensing.
# -------------------------------------------------------------------------
"""
This is a doc string, will it be of use?
"""
# RELEASE: MH, MPS | Change license to Apache 2 | # -*- coding: utf-8 -*-
# Needed to allow import
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""
This is a doc string, will it be of use?
"""
# RELEASE: MH, MPS |
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('debugger.views',
url(r'^$', 'index', name='index'),
url(r'^settings$', 'settings', name='settings'),
url(r'^scenario/(?P<scenario_id>[^/]+)$', 'show_scenario', name='scenario'),
url(r'^resource/(?P<package>[^/]+)/(?P<resource>.*)/?', 'package_resource', name='package_resource'),
url(r'^handler/(?P<usage_id>[^/]+)/(?P<handler>[^/]*)', 'handler', name='handler'),
# Examples:
# url(r'^$', 'debugger.views.home', name='home'),
# url(r'^debugger/', include('debugger.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| Fix trailing slashes in URLconfs
| from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('debugger.views',
url(r'^$', 'index', name='index'),
url(r'^settings/$', 'settings', name='settings'),
url(r'^scenario/(?P<scenario_id>[^/]+)/$', 'show_scenario', name='scenario'),
url(r'^resource/(?P<package>[^/]+)/(?P<resource>[^/]*)/$', 'package_resource', name='package_resource'),
url(r'^handler/(?P<usage_id>[^/]+)/(?P<handler>[^/]*)/$', 'handler', name='handler'),
# Examples:
# url(r'^$', 'debugger.views.home', name='home'),
# url(r'^debugger/', include('debugger.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
|
import os
sender = str(raw_input("Your Username: "))
target = str(raw_input("Target's Username: "))
message = str(raw_input("Message: "))
#Messages are encoded like so "senderProgramVx.x##target##sender##message"
#Example: "linuxV1.8##person87##NickGeek##Hey mate! What do you think of this WiN thing?"
formattedMessage = "linuxVpre.release##"+target+"##"+sender+"##"+message
#Write to file
messageFile = open('msg.txt', 'w+')
messageFile.write(formattedMessage)
messageFile.close()
os.system("python server.py") | Store your username in a file
| import os
if os.path.exists("account.conf") is False:
sender = str(raw_input("Your Username: "))
accountFile = open('account.conf', 'w+')
accountFile.write(sender)
accountFile.close()
else:
accountFile = open('account.conf', 'r')
sender = accountFile.read()
accountFile.close()
target = str(raw_input("Target's Username: "))
message = str(raw_input("Message: "))
#Messages are encoded like so "senderProgramVx.x##target##sender##message"
#Example: "linuxV1.8##person87##NickGeek##Hey mate! What do you think of this WiN thing?"
formattedMessage = "linuxVpre.release##"+target+"##"+sender+"##"+message
#Write to file
messageFile = open('msg.txt', 'w+')
messageFile.write(formattedMessage)
messageFile.close()
os.system("python server.py") |
class CommandsError(Exception):
pass
class CheckFailureError(Exception):
def __init__(self, ctx, check):
self.ctx = ctx
self.check = check
def __repr__(self):
if isinstance(self.check, list):
return "The checks for {.name} failed.".format(self.ctx)
return "The check {.__name__} for {.name} failed.".format(self.check, self.ctx)
__str__ = __repr__
class MissingArgumentError(Exception):
def __init__(self, ctx, arg):
self.ctx = ctx
self.arg = arg
def __repr__(self):
return "Missing required argument {} in {.name}.".format(self.arg, self.ctx)
__str__ = __repr__
class CommandInvokeError(Exception):
def __init__(self, ctx):
self.ctx = ctx
def __repr__(self):
return "Command {.name} failed to invoke with error {}".format(self.ctx, self.__cause__)
__str__ = __repr__
class ConversionFailedError(Exception):
def __init__(self, ctx, arg: str, to_type: type):
self.ctx = ctx
self.arg = arg
self.to_type = to_type
def __repr__(self):
return "Cannot convert {} to type {.__name__}".format(self.arg, self.to_type)
__str__ = __repr__
| Add better __repr__s for commands errors.
| class CommandsError(Exception):
pass
class CheckFailureError(Exception):
def __init__(self, ctx, check):
self.ctx = ctx
self.check = check
def __repr__(self):
if isinstance(self.check, list):
return "The checks for `{.name}` failed.".format(self.ctx)
return "The check `{.__name__}` for `{.name}` failed.".format(self.check, self.ctx)
__str__ = __repr__
class MissingArgumentError(Exception):
def __init__(self, ctx, arg):
self.ctx = ctx
self.arg = arg
def __repr__(self):
return "Missing required argument `{}` in `{.name}`.".format(self.arg, self.ctx)
__str__ = __repr__
class CommandInvokeError(Exception):
def __init__(self, ctx):
self.ctx = ctx
def __repr__(self):
return "Command {.name} failed to invoke with error `{}`.".format(self.ctx, self.__cause__)
__str__ = __repr__
class ConversionFailedError(Exception):
def __init__(self, ctx, arg: str, to_type: type):
self.ctx = ctx
self.arg = arg
self.to_type = to_type
def __repr__(self):
return "Cannot convert `{}` to type `{.__name__}`.".format(self.arg, self.to_type)
__str__ = __repr__
|
# -------------------------------------------------------
import socket, traceback
import time
import json
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
print(message)
json.loads(message.decode("utf-8"))
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
| Add computations of great roll, pitch and small yaw angle (kite angles)
| # -------------------------------------------------------
import socket, traceback
import time
import json
import numpy as np
from scipy.spatial.transform import Rotation as R
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
# Place IMU x-axis into wind going direction when launching script
is_init_done = False
wind_yaw = 0
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
#print(message)
msg = json.loads(message.decode("utf-8"))
if is_init_done==False:
wind_yaw = msg["Yaw"]
is_init_done = True
msg['Yaw'] = msg['Yaw']-wind_yaw
print(msg)
ypr = [msg['Yaw'], msg['Pitch'], msg['Roll']]
seq = 'ZYX' # small letters from intrinsic rotations
r = R.from_euler(seq, ypr, degrees=True)
# Compute coordinates in NED (could be useful to compare position with GPS position for example)
line_length = 10
base_to_kite = [0, 0, line_length]
base_to_kite_in_NED = r.apply(base_to_kite)
# Express kite coordinates as great roll, great pitch and small yaw angles
grpy=r.as_euler(seq="XYZ")
print(grpy*180/np.pi)
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
pass
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
| Add noqa comment so unused import does not get removed by code lint steps
| #!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml # noqa
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
from traitlets import TraitError
from ipywidgets import FileUpload
class TestFileUpload(TestCase):
def test_construction(self):
uploader = FileUpload()
# Default
assert uploader.accept == ''
assert not uploader.multiple
assert not uploader.disabled
def test_construction_with_params(self):
uploader = FileUpload(
accept='.txt', multiple=True, disabled=True)
assert uploader.accept == '.txt'
assert uploader.multiple
assert uploader.disabled
def test_empty_initial_value(self):
uploader = FileUpload()
assert uploader.value == []
| Test deserialization of comm message following upload
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
from traitlets import TraitError
from ipywidgets import FileUpload
class TestFileUpload(TestCase):
def test_construction(self):
uploader = FileUpload()
# Default
assert uploader.accept == ''
assert not uploader.multiple
assert not uploader.disabled
def test_construction_with_params(self):
uploader = FileUpload(
accept='.txt', multiple=True, disabled=True)
assert uploader.accept == '.txt'
assert uploader.multiple
assert uploader.disabled
def test_empty_initial_value(self):
uploader = FileUpload()
assert uploader.value == []
def test_receive_single_file(self):
uploader = FileUpload()
content = memoryview(b"file content")
message = {
"value": [
{
"name": "file-name.txt",
"type": "text/plain",
"size": 20760,
"lastModified": 1578578296434,
"error": "",
"content": content,
}
]
}
uploader.set_state(message)
assert len(uploader.value) == 1
[uploaded_file] = uploader.value
assert uploaded_file.name == "file-name.txt"
assert uploaded_file.type == "text/plain"
assert uploaded_file.size == 20760
assert uploaded_file.content.tobytes() == b"file content"
|
from tagging.models import Tag, TaggedItem
from django.contrib.contenttypes.models import ContentType
from auxiliary.models import TagSuggestion
from django.db import IntegrityError
def approve(admin, request, tag_suggestions):
for tag_suggestion in tag_suggestions:
object = tag_suggestion.object
try:
tag = Tag.objects.create(name=tag_suggestion.name)
TaggedItem.objects.create(tag=tag, object=object)
except IntegrityError as e:
if str(e) != 'column name is not unique':
raise
tag_suggestion.delete()
| Make tag_suggestions test less flaky
Failed on Python 2.7.6 as it was dependant on an error string returned
| from tagging.models import Tag, TaggedItem
from django.contrib.contenttypes.models import ContentType
def approve(admin, request, tag_suggestions):
for tag_suggestion in tag_suggestions:
obj = tag_suggestion.object
ct = ContentType.objects.get_for_model(obj)
tag, t_created = Tag.objects.get_or_create(name=tag_suggestion.name)
ti, ti_created = TaggedItem.objects.get_or_create(
tag=tag, object_id=obj.pk, content_type=ct)
tag_suggestion.delete()
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page | Add detection of wrong meta data
| #
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
if not hasattr(meta, "name") or not hasattr(meta, "content"):
raise RuntimeError("Meta elements must have attributes name and content : %s" % filename)
page[meta["name"].lower()] = meta["content"]
return page |
from defprogramming.settings import *
ALLOWED_HOSTS = ['*']
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# MIDDLEWARE_CLASSES += ('sslify.middleware.SSLifyMiddleware',)
PREPEND_WWW = True
| Add secret key env var
| import os
from defprogramming.settings import *
ALLOWED_HOSTS = ['*']
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# MIDDLEWARE_CLASSES += ('sslify.middleware.SSLifyMiddleware',)
PREPEND_WWW = True
SECRET_KEY = os.environ['SECRET_KEY']
|
import pagoda.cooper
class Base(object):
def setUp(self):
self.world = pagoda.cooper.World()
class TestMarkers(Base):
def setUp(self):
super(TestMarkers, self).setUp()
self.markers = pagoda.cooper.Markers(self.world)
def test_c3d(self):
self.markers.load_c3d('examples/cooper-motion.c3d')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
def test_csv(self):
return # TODO
self.markers.load_csv('examples/cooper-motion.csv')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
| Fix style in cooper test.
| import pagoda.cooper
class Base(object):
def setUp(self):
self.world = pagoda.cooper.World()
class TestMarkers(Base):
def setUp(self):
super(TestMarkers, self).setUp()
self.markers = pagoda.cooper.Markers(self.world)
def test_c3d(self):
self.markers.load_c3d('examples/cooper-motion.c3d')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
def test_csv(self):
return # TODO
self.markers.load_csv('examples/cooper-motion.csv')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
|
SYNTAX_BUILDABLE_EXPRESSIBLE_BY_CONFORMANCES = {
'ExpressibleByConditionElement': [
'ExpressibleByConditionElementList'
],
'ExpressibleByDeclBuildable': [
'ExpressibleByCodeBlockItem',
'ExpressibleByMemberDeclListItem',
'ExpressibleBySyntaxBuildable'
],
'ExpressibleByStmtBuildable': [
'ExpressibleByCodeBlockItem',
'ExpressibleBySyntaxBuildable'
],
'ExpressibleByExprList': [
'ExpressibleByConditionElement',
'ExpressibleBySyntaxBuildable'
]
}
| Revert "[SwiftSyntax] Replace ExpressibleAs protocols by ExpressibleBy protocols"
| SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'ExpressibleAsConditionElement': [
'ExpressibleAsConditionElementList'
],
'ExpressibleAsDeclBuildable': [
'ExpressibleAsCodeBlockItem',
'ExpressibleAsMemberDeclListItem',
'ExpressibleAsSyntaxBuildable'
],
'ExpressibleAsStmtBuildable': [
'ExpressibleAsCodeBlockItem',
'ExpressibleAsSyntaxBuildable'
],
'ExpressibleAsExprList': [
'ExpressibleAsConditionElement',
'ExpressibleAsSyntaxBuildable'
]
}
|
#!/usr/bin/env python
import os
import sys
path = os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')
sys.path.append(path)
import chromium_utils
slaves = []
for master in chromium_utils.ListMasters():
masterbase = os.path.basename(master)
master_slaves = {}
execfile(os.path.join(master, 'slaves.cfg'), master_slaves)
for slave in master_slaves.get('slaves', []):
slave['master'] = masterbase
slaves.extend(master_slaves.get('slaves', []))
for slave in sorted(slaves, cmp=None, key=lambda x : x.get('hostname', '')):
slavename = slave.get('hostname')
if not slavename:
continue
osname = slave.get('os', '?')
print '%-30s %-35s %-10s' % (slavename, slave.get('master', '?'), osname)
| Tweak import statement to satisfy presubmit checks.
Review URL: http://codereview.chromium.org/8292004
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@105578 0039d316-1c4b-4281-b951-d872f2087c98
| #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dumps a list of known slaves, along with their OS and master."""
import os
import sys
path = os.path.join(os.path.dirname(__file__), os.path.pardir)
sys.path.append(path)
from common import chromium_utils
slaves = []
for master in chromium_utils.ListMasters():
masterbase = os.path.basename(master)
master_slaves = {}
execfile(os.path.join(master, 'slaves.cfg'), master_slaves)
for slave in master_slaves.get('slaves', []):
slave['master'] = masterbase
slaves.extend(master_slaves.get('slaves', []))
for slave in sorted(slaves, cmp=None, key=lambda x : x.get('hostname', '')):
slavename = slave.get('hostname')
if not slavename:
continue
osname = slave.get('os', '?')
print '%-30s %-35s %-10s' % (slavename, slave.get('master', '?'), osname)
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
| Add test for LocalShell.run with update_env
| from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@istest
def environment_variables_can_be_added_for_run():
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
|
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| Print tracebacks that happened in tasks
| from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
LOGGING = {
'version': 1,
'handlers': {
'console': {
'class': 'logging.StreamHandler'},
},
'loggers': {'background_task': {'handlers': ['console'], 'level': 'INFO'}}}
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
| Make the personal condor config world readable
| from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
#! /usr/bin/env python3
# coding: utf-8
from collections import namedtuple
import matplotlib.pyplot as plt
BCand = namedtuple('BCand', ['m', 'merr', 'pt', 'p'])
bs = []
with open('B.txt') as f:
for line in f.readlines()[1:]:
bs.append(BCand(*[float(v) for v in line.strip().split(',')]))
masses = [b.m for b in bs]
plt.hist(masses, 60, histtype='stepfilled')
plt.xlabel(r'$m_B / \mathrm{GeV}$')
plt.savefig('mass.pdf')
| Use numpy for readin and add errorbars.
| #! /usr/bin/env python3
# coding: utf-8
from collections import namedtuple
import matplotlib.pyplot as plt
import numpy as np
BCand = namedtuple('BCand', ['m', 'merr', 'pt', 'p'])
bs = [BCand(*b) for b in np.genfromtxt('B.txt', skip_header=1, delimiter=',')]
masses = [b.m for b in bs]
ns, bins, _ = plt.hist(masses, 60, histtype='stepfilled', facecolor='r',
edgecolor='none')
centers = bins[:-1] + (bins[1:] - bins[:-1]) / 2
merr = np.sqrt(ns)
plt.errorbar(centers, ns, yerr=merr, fmt='b+')
plt.xlabel(r'$m_B / \mathrm{GeV}$')
plt.savefig('mass.pdf')
|
import webapp2
import settings
class SampleIndex(webapp2.RequestHandler):
"""Stub request handler"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write("helloworld")
application = webapp2.WSGIApplication([
('/', SampleIndex),
], debug=settings.DEBUG)
| Determine DEBUG flag at runtime - if we are under the SDK, we are debugging
| import webapp2
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
class SampleIndex(webapp2.RequestHandler):
"""Stub request handler"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write("helloworld")
application = webapp2.WSGIApplication([
('/', SampleIndex),
], debug=DEBUG)
|
# RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
# RUN: FileCheck < %t.results.out %s
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK-NEXT: "name": "test-data :: bad&name.ini",
# CHECK-NEXT: "output": "& < > \""
# CHECK-NEXT: },
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK-NEXT: "name": "test-data :: metrics.ini",
# CHECK-NEXT: "output": "Test passed."
# CHECK-NEXT: }
# CHECK-NEXT: ]
# CHECK-NEXT: }
| Refactor test incase results are backwards
Looks like results can come in either way in this file. Loosen the ordering constraints.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@331945 91177308-0d34-0410-b5e6-96231b3b80d8
| # RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
# RUN: FileCheck < %t.results.out %s
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK: "name": "test-data :: bad&name.ini",
# CHECK: "output": "& < > \""
# CHECK: ]
# CHECK-NEXT: }
|
#!/usr/bin/env python
"""
TODO: Modify module doc.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "7/30/14"
import os
os.environ["FLAMYNGO"] = os.path.join(os.environ["HOME"], ".flamyngo.yaml")
from flamyngo import app
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(debug=True, host='0.0.0.0', port=port)
| Use argparse for more flexible usage.
| #!/usr/bin/env python
"""
TODO: Modify module doc.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "7/30/14"
import os
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="""flamyngo is a basic Flask frontend for querying MongoDB collections""",
epilog="Author: Shyue Ping Ong")
parser.add_argument(
"-c", "--config", dest="config", type=str, nargs="?",
default=os.path.join(os.environ["HOME"], ".flamyngo.yaml"),
help="YAML file where the config is stored")
args = parser.parse_args()
port = int(os.environ.get("PORT", 5000))
os.environ["FLAMYNGO"] = args.config
from flamyngo import app
app.run(debug=True, host='0.0.0.0', port=port)
|
from wsme import types as wtypes
from mirantas.resource import Resource
class BaseAPI(Resource):
# TODO: Does URI need a custom type?
uri = wtypes.text
name = wtypes.text
description = wtypes.text
id = wtypes.text
| [STORM-1] Implement Staction Controller
* Fixing mis-typed name.
| from wsme import types as wtypes
from mirantis.resource import Resource
class BaseAPI(Resource):
# TODO: Does URI need a custom type?
uri = wtypes.text
name = wtypes.text
description = wtypes.text
id = wtypes.text
|
import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
'name',
'type',
'state',
'customer_name',
'project_name',
'created',
'modified',
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| Fix expert request filter by customer and project name.
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
|
# This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
def run(self):
self.run_command('egg_info') # ensure distro name is up-to-date
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version '+version
line24 = '%define version '+rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23,line24)
for line in spec
]
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
return spec
| Adjust to match modern style conventions.
| # This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
def run(self):
# ensure distro name is up-to-date
self.run_command('egg_info')
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version
line24 = '%define version ' + rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23, line24)
for line in spec
]
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, api, _
import stdnum.ar.cbu
def validate_cbu(cbu):
return stdnum.ar.cbu.validate(cbu)
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
@api.model
def _get_supported_account_types(self):
""" Add new account type named cbu used in Argentina """
res = super()._get_supported_account_types()
res.append(('cbu', _('CBU')))
return res
@api.model
def retrieve_acc_type(self, acc_number):
try:
validate_cbu(acc_number)
except Exception:
return super().retrieve_acc_type(acc_number)
return 'cbu'
| [FIX] l10n_ar: Fix ImportError: No module named 'stdnum.ar.cbu'
Since stdnum.ar.cbu is not available in odoo saas enviroment because is
using an old version of stdnum package, we add a try exept in order to
catch this and manage the error properly which is raise an exception and
leave a message in the log telling the user that the cbu was not able to
validate.
closes odoo/odoo#40383
X-original-commit: 25d483fc3fc05fd47c72c3d96c02fed12b998b0d
Signed-off-by: Josse Colpaert <1f46e7f017caa89a77c9557ed26b800e8d5d7700@openerp.com>
| # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, api, _
from odoo.exceptions import ValidationError
import stdnum.ar
import logging
_logger = logging.getLogger(__name__)
def validate_cbu(cbu):
try:
return stdnum.ar.cbu.validate(cbu)
except Exception as error:
msg = _("Argentinian CBU was not validated: %s" % repr(error))
_logger.log(25, msg)
raise ValidationError(msg)
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
@api.model
def _get_supported_account_types(self):
""" Add new account type named cbu used in Argentina """
res = super()._get_supported_account_types()
res.append(('cbu', _('CBU')))
return res
@api.model
def retrieve_acc_type(self, acc_number):
try:
validate_cbu(acc_number)
except Exception:
return super().retrieve_acc_type(acc_number)
return 'cbu'
|
# mode: run
# tag: generator
import cython
import sys
def test_generator_frame_cycle():
"""
>>> test_generator_frame_cycle()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
del g
if cython.compiled:
# FIXME: this should not be necessary, but I can't see how to do it...
import gc; gc.collect()
return tuple(testit)
def test_generator_frame_cycle_with_outer_exc():
"""
>>> test_generator_frame_cycle_with_outer_exc()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
try:
raise ValueError()
except ValueError as exc:
assert sys.exc_info()[1] is exc, sys.exc_info()
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
assert sys.exc_info()[1] is exc, sys.exc_info()
del g
assert sys.exc_info()[1] is exc, sys.exc_info()
if cython.compiled:
# FIXME: this should not be necessary, but I can't see how to do it...
import gc; gc.collect()
return tuple(testit)
| Fix a CPython comparison test in CPython 3.3 which was apparently fixed only in 3.4 and later.
| # mode: run
# tag: generator
import cython
import sys
def test_generator_frame_cycle():
"""
>>> test_generator_frame_cycle()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
del g
return tuple(testit)
def test_generator_frame_cycle_with_outer_exc():
"""
>>> test_generator_frame_cycle_with_outer_exc()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
try:
raise ValueError()
except ValueError as exc:
assert sys.exc_info()[1] is exc, sys.exc_info()
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
# CPython 3.3 handles this incorrectly itself :)
if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
assert sys.exc_info()[1] is exc, sys.exc_info()
del g
if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
assert sys.exc_info()[1] is exc, sys.exc_info()
return tuple(testit)
|
from __future__ import absolute_import, division, print_function
from panoptes_client.panoptes import PanoptesObject, LinkResolver
class User(PanoptesObject):
_api_slug = 'users'
_link_slug = 'users'
_edit_attributes = ()
def avatar(self):
return User.http_get('{}/avatar'.format(self.id))[0]
LinkResolver.register(User)
LinkResolver.register(User, 'owner')
| Change User.avatar to be a property
| from __future__ import absolute_import, division, print_function
from panoptes_client.panoptes import PanoptesObject, LinkResolver
class User(PanoptesObject):
_api_slug = 'users'
_link_slug = 'users'
_edit_attributes = ()
@property
def avatar(self):
return User.http_get('{}/avatar'.format(self.id))[0]
LinkResolver.register(User)
LinkResolver.register(User, 'owner')
|
# gnome-terminal script
import a11y
import speech
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
brlUpdateText (event.source)
| Call default.brlUpdateText instead of brlUpdateText (which was undefined)
| # gnome-terminal script
import a11y
import speech
import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
default.brlUpdateText (event.source)
|
from __future__ import absolute_import
from unittest import TestCase
| Add some :tiger2:s for `graph_objs_tools.py`. | from __future__ import absolute_import
from unittest import TestCase
from plotly.graph_objs import graph_objs as go
from plotly.graph_objs import graph_objs_tools as got
class TestGetRole(TestCase):
def test_get_role_no_value(self):
# this is a bit fragile, but we pick a few stable values
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
fig.layout.title = 'some-title'
parent_key_role_tuples = [
(fig.data[0], 'x', 'data'),
(fig.data[0], 'marker', 'object'),
(fig.data[0].marker, 'color', 'style'),
(fig.layout, 'title', 'info'),
(fig, 'data', 'object'),
]
for parent, key, role in parent_key_role_tuples:
self.assertEqual(got.get_role(parent, key), role, msg=key)
def test_get_role_with_value(self):
# some attributes are conditionally considered data if they're arrays
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
parent_key_value_role_tuples = [
(fig.data[0], 'x', 'wh0cares', 'data'),
(fig.data[0], 'marker', 'wh0cares', 'object'),
(fig.data[0].marker, 'color', 'red', 'style'),
(fig.data[0].marker, 'color', ['red'], 'data')
]
for parent, key, value, role in parent_key_value_role_tuples:
self.assertEqual(got.get_role(parent, key, value), role,
msg=(key, value))
|
from django import template
register = template.Library()
@register.inclusion_tag('speeches/_section_prev_next_links.html')
def section_prev_next_links(section):
next_section = section.get_next_node()
prev_section = section.get_previous_node()
return {
"next": next_section,
"previous": prev_section,
}
| [1119] Change next/prev finding logic to stay in same section
This uses code from speeches.models._get_next_previous_node.
Thanks to Matthew Somerville for that suggestion.
| import datetime
from django import template
from speeches.models import Section
register = template.Library()
# NOTE: this code is far from ideal. Sharing it with others in a pull request
# to get opinions about how to improve.
# TODO:
# - cache results of min_speech_datetime and section_prev_next_links (both of
# which will be called multiple times with same input)
@register.inclusion_tag('speeches/_section_prev_next_links.html')
def section_prev_next_links(section):
return {
"next": get_neighboring_section(section, +1),
"previous": get_neighboring_section(section, -1),
}
def get_neighboring_section(section, direction):
"""
This code is specific to the section hierachy that is used for the
questions and hansard in the SayIt for ZA.
This is essentially:
hansard
2012
March
13
Some section (has speeches)
and
Questions
Minister of Foo
16 Oct 2009 (has speeches)
"""
# These lines lightly modified from https://github.com/mysociety/sayit/blob/master/speeches/models.py#L356-L369
# 'root' is set to be the section's parent, and s/self/section/, some
# formatting changes
if not section.parent:
return None
tree = section.parent.get_descendants
idx = tree.index(section)
lvl = tree[idx].level
same_level = [ s for s in tree if s.level == lvl ]
idx = same_level.index(section)
if direction == -1 and idx == 0:
return None
try:
return same_level[idx+direction]
except:
return None
|
from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| Clear websocket data to try and fix Travis
| from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
|
import json
from .load_data_from_file import load_data_from_file
from .get_old_dict_values import get_old_dict_values
from .log import log
from .paths import make_course_path
def load_previous(course_path):
try:
prior_data = load_data_from_file(course_path)
prior = json.loads(prior_data)
except FileNotFoundError:
prior = None
revisions = []
# print(course_path, revisions)
if prior and ('revisions' in prior):
revisions = prior['revisions']
del prior['revisions']
return (prior, revisions or [])
def check_for_revisions(course):
prior, revisions = load_previous(make_course_path(course['clbid']))
if not prior:
return None
diff = get_old_dict_values(prior, course)
if diff:
revisions.append(diff)
log('revision in %d:' % (course['clbid']), diff)
if revisions and (('revisions' not in course) or (revisions != course.get('revisions'))):
return revisions
return None
| Use an ordereddict for sorting revisions
| from collections import OrderedDict
import json
from .load_data_from_file import load_data_from_file
from .get_old_dict_values import get_old_dict_values
from .log import log
from .paths import make_course_path
def load_previous(course_path):
try:
prior_data = load_data_from_file(course_path)
prior = json.loads(prior_data)
except FileNotFoundError:
prior = None
revisions = []
# print(course_path, revisions)
if prior and ('revisions' in prior):
revisions = prior['revisions']
del prior['revisions']
return (prior, revisions or [])
def check_for_revisions(course):
prior, revisions = load_previous(make_course_path(course['clbid']))
if not prior:
return None
diff = get_old_dict_values(prior, course)
ordered_diff = OrderedDict()
for key in sorted(diff.keys()):
ordered_diff[key] = diff[key]
if ordered_diff:
revisions.append(ordered_diff)
log('revision in %d:' % (course['clbid']), ordered_diff)
if revisions and (('revisions' not in course) or (revisions != course.get('revisions'))):
return revisions
return None
|
import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
| Update OpenJDK version to support both 8 and 9.
| import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
|
"""
Template tags and helper functions for displaying breadcrumbs in page titles
based on the current micro site.
"""
from django import template
from django.conf import settings
from microsite_configuration.middleware import MicrositeConfiguration
register = template.Library()
def page_title_breadcrumbs(*crumbs, **kwargs):
"""
This function creates a suitable page title in the form:
Specific | Less Specific | General | edX
It will output the correct platform name for the request.
Pass in a `separator` kwarg to override the default of " | "
"""
separator = kwargs.get("separator", " | ")
if crumbs:
return '{}{}{}'.format(separator.join(crumbs), separator, platform_name())
else:
return platform_name()
@register.simple_tag(name="page_title_breadcrumbs", takes_context=True)
def page_title_breadcrumbs_tag(context, *crumbs):
"""
Django template that creates breadcrumbs for page titles:
{% page_title_breadcrumbs "Specific" "Less Specific" General %}
"""
return page_title_breadcrumbs(*crumbs)
@register.simple_tag(name="platform_name")
def platform_name():
"""
Django template tag that outputs the current platform name:
{% platform_name %}
"""
return MicrositeConfiguration.get_microsite_configuration_value('platform_name', settings.PLATFORM_NAME) | Fix unicode error in subsection
| """
Template tags and helper functions for displaying breadcrumbs in page titles
based on the current micro site.
"""
from django import template
from django.conf import settings
from microsite_configuration.middleware import MicrositeConfiguration
register = template.Library()
def page_title_breadcrumbs(*crumbs, **kwargs):
"""
This function creates a suitable page title in the form:
Specific | Less Specific | General | edX
It will output the correct platform name for the request.
Pass in a `separator` kwarg to override the default of " | "
"""
separator = kwargs.get("separator", " | ")
if crumbs:
return u'{}{}{}'.format(separator.join(crumbs), separator, platform_name())
else:
return platform_name()
@register.simple_tag(name="page_title_breadcrumbs", takes_context=True)
def page_title_breadcrumbs_tag(context, *crumbs):
"""
Django template that creates breadcrumbs for page titles:
{% page_title_breadcrumbs "Specific" "Less Specific" General %}
"""
return page_title_breadcrumbs(*crumbs)
@register.simple_tag(name="platform_name")
def platform_name():
"""
Django template tag that outputs the current platform name:
{% platform_name %}
"""
return MicrositeConfiguration.get_microsite_configuration_value('platform_name', settings.PLATFORM_NAME)
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import PerObjectSettingsTool
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Settings Per Object Tool"),
"author": "Ultimaker",
"version": "1.0",
"description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Per Object Settings."),
"api": 2
},
"tool": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings"),
"description": i18n_catalog.i18nc("@info:tooltip", "Configure Settings Per Object"),
"icon": "setting_per_object",
"tool_panel": "PerObjectSettingsPanel.qml"
},
}
def register(app):
return { "tool": PerObjectSettingsTool.PerObjectSettingsTool() }
| Normalize strings for per object settings
| # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import PerObjectSettingsTool
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings Tool"),
"author": "Ultimaker",
"version": "1.0",
"description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Per Object Settings."),
"api": 2
},
"tool": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings"),
"description": i18n_catalog.i18nc("@info:tooltip", "Configure Per Object Settings"),
"icon": "setting_per_object",
"tool_panel": "PerObjectSettingsPanel.qml"
},
}
def register(app):
return { "tool": PerObjectSettingsTool.PerObjectSettingsTool() }
|
from datetime import datetime
from django.core.management.base import BaseCommand
from django.contrib.sessions.models import Session
class Command(BaseCommand):
args = '<count count ...>'
help = "Delete old sessions"
def handle(self, *args, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions):
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
| Add delete old sessions command
| from datetime import datetime
from django.core.management.base import NoArgsCommand
from django.contrib.sessions.models import Session
class Command(NoArgsCommand):
help = "Delete old sessions"
def handle_noargs(self, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions)[:10000]:
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
|
# -*- coding: utf-8 -*-
##############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
| Fix reading from hashed file names.
Bioformats expects file extensions to exist, so flag that we should
always appear as actual, fully-pathed files.
| # -*- coding: utf-8 -*-
##############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
def mayHaveAdjacentFiles(self, largeImageFile):
# bioformats uses extensions to determine how to open a file, so this
# needs to be set for all file formats.
return True
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
form = context['form']
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
| Raise a more specific error when form is not passed to the template.
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
if fnmatchcase(userAccount, mask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | Fix matching users against R: extbans
| from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
lowerMask = ircLower(mask)
if fnmatchcase(userAccount, lowerMask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() |
import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype
| import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
# -*- coding: utf-8 -*-
"""
Use behave4cmd0 step library (predecessor of behave4cmd).
"""
# -- REGISTER-STEPS:
import behave4cmd0.command_steps
| Use all behave4cmd0 steps now.
| # -*- coding: utf-8 -*-
"""
Use behave4cmd0 step library (predecessor of behave4cmd).
"""
# -- REGISTER-STEPS:
import behave4cmd0.__all_steps__
|
import os
import sys
# Disable django-pipeline when in test mode
PIPELINE_ENABLED = 'test' not in sys.argv
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| fix(set): Fix django-pipeline configuration for development/test
| import os
import sys
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
if 'test' in sys.argv:
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
|
from django.conf.urls import url
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='lti')
]
| Adjust URL configuration based on changes introduced in Django 1.9:
- URL application namespace required if setting an instance namespace:
https://docs.djangoproject.com/en/2.1/releases/1.9/#url-application-namespace-required-if-setting-an-instance-namespace
| from django.conf.urls import url
from django_lti_tool_provider import views as lti_views
app_name = 'django_lti_tool_provider'
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='lti')
]
|
from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [('documents', AgencyDocument)]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| Add new Agency resource fields
| from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from ...models.base import BaseModel
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class AgencyEmail(BaseModel):
_as_is_fields = ['type', 'address']
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude', 'transactional_email']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [
('documents', AgencyDocument),
('emails', AgencyEmail),
]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
|
# -*- coding: utf-8 -*-
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestProducts(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + self.device.product.name + ")") | stations: Correct the name of the devices' test case to TestDevices (copy&paste bug).
| # -*- coding: utf-8 -*-
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestDevices(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + str(self.device.product) + ")") |
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
| Move survey action celery task import to method scope.
| from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
# This is Django-only, but the module get imported in vumi-land.
from go.apps.surveys.tasks import export_vxpolls_data
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
|
from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
| Test some basic properties of iterable()
| from sympy.core.compatibility import default_sort_key, as_int, ordered, iterable
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_iterable():
assert iterable(0) == False
assert iterable(1) == False
assert iterable(None) == False
def test_ordered():
# Issue 7210 - this had been failing with python2/3 problems
assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
# warnings should not be raised for identical items
l = [1, 1]
assert list(ordered(l, warn=True)) == l
l = [[1], [2], [1]]
assert list(ordered(l, warn=True)) == [[1], [1], [2]]
raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]],
default=False, warn=True)))
|
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=512)
filefield = i18n.LocalizedFileField(null=True, upload_to='files')
imagefield = i18n.LocalizedImageField(null=True, upload_to='images')
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField()
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField()
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2)
integerfield = i18n.LocalizedIntegerField()
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
| Make fields in example app non required
| from django.db import models
import i18n
from i18n.models import TranslatableModel
class Document(TranslatableModel):
untranslated_charfield = models.CharField(max_length=50, blank=True)
charfield = i18n.LocalizedCharField(max_length=50)
textfield = i18n.LocalizedTextField(max_length=500, blank=True)
filefield = i18n.LocalizedFileField(null=True, upload_to='files', blank=True)
imagefield = i18n.LocalizedImageField(null=True, upload_to='images', blank=True)
booleanfield = i18n.LocalizedBooleanField()
datefield = i18n.LocalizedDateField(blank=True, null=True)
fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True,
related_name='+')
urlfied = i18n.LocalizedURLField(null=True, blank=True)
decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2, null=True,
blank=True)
integerfield = i18n.LocalizedIntegerField(null=True, blank=True)
def __str__(self):
return '%d, %s' % (self.pk, self.charfield)
class Meta:
app_label = 'example'
|
'''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
for label in feature_counts:
dict_for_label = {}
dict_for_label["label"] = label
dict_for_label["count"] = feature_counts[label]
feature_count_array.append(dict_for_label)
return feature_count_array | Add reverse sorting of count_array | '''
Created on May 3, 2016
@author: Martin Koerner <info@mkoerner.de>
'''
import operator
class CountGeneration(object):
def generate_counts(self,collected_features_array,feature_name):
feature_counts = {}
for instance in collected_features_array:
if feature_name in instance:
feature = instance[feature_name]
if feature in feature_counts:
feature_counts[feature] += 1
else:
feature_counts[feature] = 1
return feature_counts
def get_as_array(self,feature_counts):
feature_count_array = []
sorted_feature_counts = sorted(feature_counts.items(), key=operator.itemgetter(1),reverse=True)
for feature_count_tuple in sorted_feature_counts:
dict_for_label = {}
dict_for_label["label"] = feature_count_tuple[0]
dict_for_label["count"] = feature_count_tuple[1]
feature_count_array.append(dict_for_label)
return feature_count_array
|
"""Message broker that sends to Unix domain sockets."""
import os
import socket
import time
class MessageProducer(object):
"""Message broker that sends to Unix domain sockets."""
def __init__(self, message_type):
self._message_type = message_type
socket_address = os.sep.join(
('.', 'messaging', 'sockets', message_type)
)
if not os.path.exists(socket_address):
raise ValueError('Socket does not exist: {}'.format(socket_address))
self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self._socket.connect(socket_address)
def publish(self, message):
"""Publishes a message."""
self._socket.send(message.encode('utf-8'))
def kill(self):
"""Kills all listening consumers."""
try:
self._socket.send(b'QUIT')
except ConnectionRefusedError: # pylint: disable=undefined-variable
pass
| Use sendall instead of send for socket messages
I kept getting Errno 111 connection refused errors; I hope this fixes
it.
| """Message broker that sends to Unix domain sockets."""
import os
import socket
import time
class MessageProducer(object):
"""Message broker that sends to Unix domain sockets."""
def __init__(self, message_type):
self._message_type = message_type
socket_address = os.sep.join(
('.', 'messaging', 'sockets', message_type)
)
if not os.path.exists(socket_address):
raise ValueError('Socket does not exist: {}'.format(socket_address))
self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self._socket.connect(socket_address)
def publish(self, message):
"""Publishes a message."""
self._socket.sendall(message.encode('utf-8'))
def kill(self):
"""Kills all listening consumers."""
try:
self._socket.sendall(b'QUIT')
except ConnectionRefusedError: # pylint: disable=undefined-variable
pass
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
| Change help text wording to follow WorkflowStateMixin
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {win_servermanager: {}}
def test_install():
mock_out = {
"FeatureResult": {
}
}
with patch.object(win_servermanager, "_pshell_json", return_value=""):
| Add some unit tests for install
| import os
import pytest
import salt.modules.win_servermanager as win_servermanager
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {
win_servermanager: {
"__grains__": {"osversion": "6.2"}
}
}
def test_install():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': False,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": False,
"Restarted": False,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": False,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer")
assert result == expected
def test_install_restart():
mock_out = {
'Success': True,
'RestartNeeded': 1,
'FeatureResult': [
{
'Id': 338,
'Name': 'XPS-Viewer',
'DisplayName': 'XPS Viewer',
'Success': True,
'RestartNeeded': True,
'Message': '',
'SkipReason': 0
}
],
'ExitCode': 0
}
expected = {
"ExitCode": 0,
"RestartNeeded": True,
"Restarted": True,
"Features": {
"XPS-Viewer": {
"DisplayName": "XPS Viewer",
"Message": "",
"RestartNeeded": True,
"SkipReason": 0,
"Success": True
}
},
"Success": True}
mock_reboot = MagicMock(return_value=True)
with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \
patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}):
result = win_servermanager.install("XPS-Viewer", restart=True)
mock_reboot.assert_called_once()
assert result == expected
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0""") | Update delivery and billing status in SO
| # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered'
where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0 and docstatus = 0""")
frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed'
where billing_status = 'Billed' and ifnull(per_billed, 0) = 0 and docstatus = 0""") |
#
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) | Throw exception if no login credentials are provided
| #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
print colored("Authenticating user...", "blue")
# If no login credentials were provided
if not email or not password:
raise Exception(HamperError(HamperError.HECodeLogInError, "Either the email and/or password wasn't provided. Call 'hamper auth login' with the login credentials."))
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def update_total_milestones(apps, schema_editor):
Project = apps.get_model("projects", "Project")
for project in Project.objects.filter(total_milestones__isnull=True):
project.total_milestones = 0
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0005_membership_invitation_extra_text'),
]
operations = [
migrations.RunPython(update_total_milestones),
migrations.AlterField(
model_name='project',
name='total_milestones',
field=models.IntegerField(verbose_name='total of milestones', default=0),
),
]
| Make 0006 migration of project more efficient.
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def update_total_milestones(apps, schema_editor):
Project = apps.get_model("projects", "Project")
qs = Project.objects.filter(total_milestones__isnull=True)
qs.update(total_milestones=0)
class Migration(migrations.Migration):
dependencies = [
('projects', '0005_membership_invitation_extra_text'),
]
operations = [
migrations.RunPython(update_total_milestones),
migrations.AlterField(
model_name='project',
name='total_milestones',
field=models.IntegerField(verbose_name='total of milestones', default=0),
),
]
|
from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
| Make a few fields editable from the changelist
| from django.contrib import admin
from newswall.models import Source, Story
admin.site.register(Source,
list_display=('name', 'is_active', 'ordering'),
list_editable=('is_active', 'ordering'),
list_filter=('is_active',),
prepopulated_fields={'slug': ('name',)},
)
admin.site.register(Story,
date_hierarchy='timestamp',
list_display=('title', 'source', 'is_active', 'timestamp'),
list_editable=('is_active',),
list_filter=('source', 'is_active'),
search_fields=('object_url', 'title', 'author', 'content'),
)
|
#!/usr/bin/python
import ctypes
import errno
import os
import select
import traceback
import cepoll
import ctimerfd
def on_timer ():
pass
def main ():
spec = ctimerfd.itimerspec ()
spec.it_interval.tv_sec = 0
spec.it_interval.tv_nsec = long (1e9/60)
spec.it_value.tv_sec = 0
spec.it_value.tv_nsec = 1
t = ctimerfd.timerfd_create (ctimerfd.CLOCK_MONOTONIC, ctimerfd.TFD_CLOEXEC|ctimerfd.TFD_NONBLOCK)
ctimerfd.timerfd_settime (t, 0, ctypes.pointer (spec), None)
poll = select.epoll.fromfd (cepoll.epoll_create (cepoll.EPOLL_CLOEXEC))
poll.register (t, select.EPOLLIN)
while True:
try:
for fd, event in poll.poll ():
try:
if fd == t:
on_timer ()
except:
traceback.print_exc ()
except IOError, e:
if e.errno == errno.EINTR:
continue
raise
if __name__ == '__main__':
main ()
| Add wrapper functions to deal with EINTR and exceptions in dispatched-to-functions
| #!/usr/bin/python
import ctypes
import errno
import os
import select
import traceback
import cepoll
import ctimerfd
def on_timer ():
pass
def eintr_wrap (fn, *args, **kwargs):
while True:
try:
return fn (*args, **kwargs)
except IOError, e:
if e.errno == errno.EINTR:
continue
raise
def wrap (fn, *args, **kwargs):
try:
fn (*args, **kwargs)
except:
traceback.print_exc ()
def main ():
spec = ctimerfd.itimerspec ()
spec.it_interval.tv_sec = 0
spec.it_interval.tv_nsec = long (1e9/60)
spec.it_value.tv_sec = 0
spec.it_value.tv_nsec = 1
t = ctimerfd.timerfd_create (ctimerfd.CLOCK_MONOTONIC, ctimerfd.TFD_CLOEXEC|ctimerfd.TFD_NONBLOCK)
ctimerfd.timerfd_settime (t, 0, ctypes.pointer (spec), None)
epoll = select.epoll.fromfd (cepoll.epoll_create (cepoll.EPOLL_CLOEXEC))
epoll.register (t, select.EPOLLIN)
while True:
for fd, event in eintr_wrap (epoll.poll):
if fd == t:
os.read (t, 8)
wrap (on_timer)
if __name__ == '__main__':
main ()
|
import os
from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM,
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
| Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
|
import os
from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM
if BACKUP_SYSTEM == 'AmazonS3':
from norc.norc_utils.aws import set_s3_key
from norc.settings import (AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
def s3_backup(fp, target):
NUM_TRIES = 3
for i in range(NUM_TRIES):
try:
set_s3_key(target, fp)
return True
except:
if i == NUM_TRIES - 1:
raise
return False
BACKUP_SYSTEMS = {
'AmazonS3': s3_backup,
}
def backup_log(rel_log_path):
log_path = os.path.join(NORC_LOG_DIR, rel_log_path)
log_file = open(log_path, 'rb')
target = os.path.join('norc_logs/', rel_log_path)
try:
return _backup_file(log_file, target)
finally:
log_file.close()
def _backup_file(fp, target):
if BACKUP_SYSTEM:
return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target)
else:
return False
|
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
| Allow some compatibility between python2 and updated python 3 files
| from __future__ import print_function,division
from robotsim import *
import atexit
atexit.register(destroy)
__all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters',
'SimRobotController','SimRobotSensor','SimBody','Simulator',
'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid',
'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver',
'model','math','io','plan','sim']
|
def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate), render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
| FIx render output (missing crf value)
| def render_video(render_type,
out_frame_rate=30, overlay=None, extra_cmd=None):
import os
import subprocess
from rocketleaguereplayanalysis.render.do_render import get_video_prefix
from rocketleaguereplayanalysis.parser.frames import get_frames
from rocketleaguereplayanalysis.util.sync import get_sync_time_type
video_prefix = get_video_prefix()
cmd = ['ffmpeg',
'-loop', '1',
'-i', os.path.join('assets', overlay + '.png'),
'-t', str(get_frames()[-1]['time'][get_sync_time_type()])]
cmd += extra_cmd
cmd += ['-r', str(out_frame_rate),
'-crf', '18',
render_type + '.mp4', '-y']
print('FFmpeg Command:', cmd)
p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT)
p.communicate()
|
import enum
import json
from pyglab.exceptions import RequestError
import requests
@enum.unique
class RequestType(enum.Enum):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
| Make RequestType a normal class, not an enum.
This removes the restriction of needing Python >= 3.4. RequestType is
now a normal class with class variables (fixes #19).
| import json
from pyglab.exceptions import RequestError
import requests
class RequestType(object):
GET = 1
POST = 2
PUT = 3
DELETE = 4
class ApiRequest:
_request_creators = {
RequestType.GET: requests.get,
RequestType.POST: requests.post,
RequestType.PUT: requests.put,
RequestType.DELETE: requests.delete,
}
def __init__(self, request_type, url, token, params={}, sudo=None,
page=None, per_page=None):
# Build header
header = {'PRIVATE-TOKEN': token}
if sudo is not None:
header['SUDO', sudo]
# Build parameters
if page is not None:
params['page'] = page
if per_page is not None:
params['per_page'] = per_page
r = self._request_creators[request_type](url, params=params,
headers=header)
content = json.loads(r.text)
if RequestError.is_error(r.status_code):
raise RequestError.error_class(r.status_code)(content)
self._content = content
@property
def content(self):
return self._content
|
# encoding: utf-8
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
| Fix a bug where user-agents could specify their own session ID.
| # encoding: utf-8
from __future__ import unicode_literals
from web.auth import authenticate, deauthenticate
from web.core import config, url, session
from web.core.http import HTTPFound
from brave.api.client import API
log = __import__('logging').getLogger(__name__)
class AuthenticationMixIn(object):
def authorize(self):
# Perform the initial API call and direct the user.
api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public'])
success = str(url.complete('/authorized'))
failure = str(url.complete('/nolove'))
result = api.core.authorize(success=success, failure=failure)
raise HTTPFound(location=result.location)
def ciao(self):
deauthenticate(True)
raise HTTPFound(location='/')
def authorized(self, token):
# Capture the returned token and use it to look up the user details.
# If we don't have this character, create them.
# Store the token against this user account.
# Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'.
# (Though they can be shorter!)
# Prevent users from specifying their session IDs (Some user-agents were sending null ids, leading to users
# authenticated with a session id of null
session.regenerate_id()
# We request an authenticated session from the server.
authenticate(token)
raise HTTPFound(location='/')
def nolove(self, token):
return 'sound.irc.template.whynolove', dict()
|
#!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
| scripts: Fix typo in logging statement.
| #!/usr/bin/env python3
import logging
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, ZULIP_PATH)
from scripts.lib.setup_path import setup_path
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from
from version import ZULIP_VERSION as new_version
assert_not_running_as_root()
setup_path()
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
import django
from django.db import connection
from django.db.migrations.loader import MigrationLoader
django.setup()
loader = MigrationLoader(connection)
missing = set(loader.applied_migrations)
for key, migration in loader.disk_migrations.items():
missing.discard(key)
missing.difference_update(migration.replaces)
if not missing:
sys.exit(0)
current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current"))
logging.error(
"This is not an upgrade -- the current deployment (version %s) "
"contains %s database migrations which %s (version %s) does not.",
current_version,
len(missing),
ZULIP_PATH,
new_version,
)
sys.exit(1)
|
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all()
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) | Sort files by last modification
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all().order_by("-modified_at")
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) |
import os
from kokki import *
Package("erlang")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path)
)
| Install mercurial to install busket
|
import os
from kokki import *
Package("erlang")
Package("mercurial",
provider = "kokki.providers.package.easy_install.EasyInstallProvider")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path)
)
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["input"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_list',
'test_set',
'test_str',
'test_tuple',
]
| Disable builtin tests for input() as it hangs
| from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class InputTests(TranspileTestCase):
pass
# class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
# functions = ["input"]
# not_implemented = [
# 'test_bool',
# 'test_bytearray',
# 'test_bytes',
# 'test_class',
# 'test_complex',
# 'test_dict',
# 'test_float',
# 'test_frozenset',
# 'test_int',
# 'test_list',
# 'test_set',
# 'test_str',
# 'test_tuple',
# ]
|
from os import path
############# Configuration #############
DEBUG = True
SECRET_KEY = "development_key"
RESULTS_PATH = path.join(path.dirname(path.dirname(__file__)), 'results')
RESULTS_URL = '/upload'
NCBI_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
NCBI_URL += '?db=nucleotide&email="%s"&tool="antiSMASH"&id=%s&rettype=gbwithparts'
NCBI_URL += '&retmode=text'
NCBI_PROT_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
NCBI_PROT_URL += '?db=protein&email="%s"&tool="antiSMASH"&id=%s&rettype=fasta'
NCBI_PROT_URL += '&retmode=text'
# Flask-Mail settings
MAIL_SERVER = "smtpserv.uni-tuebingen.de"
DEFAULT_MAIL_SENDER = "kai.blin@biotech.uni-tuebingen.de"
DEFAULT_RECIPIENTS = ["kai.blin@biotech.uni-tuebingen.de"]
# Flask-SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../jobs.db'
# Flask-Downloader settings
DEFAULT_DOWNLOAD_DIR = RESULTS_PATH
#########################################
| settings: Add some error states for the NCBI download option
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de>
| from os import path
############# Configuration #############
DEBUG = True
SECRET_KEY = "development_key"
RESULTS_PATH = path.join(path.dirname(path.dirname(__file__)), 'results')
RESULTS_URL = '/upload'
NCBI_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
NCBI_URL += '?db=nucleotide&email="%s"&tool="antiSMASH"&id=%s&rettype=gbwithparts'
NCBI_URL += '&retmode=text'
NCBI_PROT_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
NCBI_PROT_URL += '?db=protein&email="%s"&tool="antiSMASH"&id=%s&rettype=fasta'
NCBI_PROT_URL += '&retmode=text'
# Flask-Mail settings
MAIL_SERVER = "smtpserv.uni-tuebingen.de"
DEFAULT_MAIL_SENDER = "kai.blin@biotech.uni-tuebingen.de"
DEFAULT_RECIPIENTS = ["kai.blin@biotech.uni-tuebingen.de"]
# Flask-SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../jobs.db'
# Flask-Downloader settings
DEFAULT_DOWNLOAD_DIR = RESULTS_PATH
BAD_CONTENT = ('Error reading from remote server', 'Bad gateway', 'Cannot process ID list', 'server is temporarily unable to service your request')
#########################################
|
#ODB2 datalogger
import obd
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value) | Handle ctrl+c with signal
Fix more typos
| #ODB2 datalogger
import obd
import signal
import sys
#What to do when we receive a signal
def signal_handler(signal, frame):
connection.close()
sys.exit(0)
#Register our signal handler
signal.signal(signal.SIGINT, signal_handler)
#Find and connect OBD adapter
connection = obd.OBD()
while True:
request = connection.query(obd.commands.RPM)
if not request.is_null():
print(request.value) |
# Copyright 2016 Camptocamp SA
# Copyright 2018 Lorenzo Battistini <https://github.com/eLBati>
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl).
{
"name": "Account Fiscal Year",
"summary": "Create a menu for Account Fiscal Year",
"version": "13.0.1.0.0",
"development_status": "Beta",
"category": "Accounting",
"website": "https://github.com/OCA/account-financial-tools",
"author": "Agile Business Group, Camptocamp SA, "
"Odoo Community Association (OCA)",
"maintainers": ["eLBati"],
"license": "LGPL-3",
"application": False,
"installable": True,
"depends": ["account", "date_range"],
"data": ["data/date_range_type.xml", "views/account_views.xml"],
}
| [FIX] account_fiscal_year: Use AGPL license, as it depends on `date_range` that uses that
| # Copyright 2016 Camptocamp SA
# Copyright 2018 Lorenzo Battistini <https://github.com/eLBati>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl).
{
"name": "Account Fiscal Year",
"summary": "Create a menu for Account Fiscal Year",
"version": "13.0.1.0.0",
"development_status": "Beta",
"category": "Accounting",
"website": "https://github.com/OCA/account-financial-tools",
"author": "Agile Business Group, Camptocamp SA, "
"Odoo Community Association (OCA)",
"maintainers": ["eLBati"],
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": ["account", "date_range"],
"data": ["data/date_range_type.xml", "views/account_views.xml"],
}
|
from rest_framework.views import exception_handler
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
| Build APIException all exceptions must be handled
| from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework_friendly_errors import settings
from rest_framework_friendly_errors.utils import is_pretty
def friendly_exception_handler(exc, context):
response = exception_handler(exc, context)
if not response and settings.FRIENDLY_CATCH_ALL_EXCEPTIONS:
response = exception_handler(APIException(exc), context)
if response is not None:
if is_pretty(response):
return response
error_message = response.data['detail']
error_code = settings.FRIENDLY_EXCEPTION_DICT.get(
exc.__class__.__name__)
response.data.pop('detail', {})
response.data['code'] = error_code
response.data['message'] = error_message
response.data['status_code'] = response.status_code
# response.data['exception'] = exc.__class__.__name__
return response
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
required=True,
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
| Remove required from disease_id in medical_prescription_disease
| # -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api
class MedicalPrescriptionOrderLine(models.Model):
_inherit = 'medical.prescription.order.line'
disease_id = fields.Many2one(
string='Disease',
comodel_name='medical.patient.disease',
help='Disease diagnosis related to prescription.',
)
@api.multi
@api.onchange('patient_id')
def _onchange_patient_id(self, ):
self.ensure_one()
return {
'domain': {
'disease_id': [('patient_id', '=', self.patient_id.id)],
'prescription_order_id': [
('patient_id', '=', self.patient_id.id)
],
}
}
@api.multi
@api.onchange('disease_id')
def _onchange_disease_id(self, ):
for rec_id in self:
rec_id.patient_id = rec_id.disease_id.patient_id.id
|
# TestREPLThrowReturn.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that the REPL correctly handles the case that a called function throws."""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.decorators as decorators
class REPLThrowReturnTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
@decorators.swiftTest
@decorators.skipUnlessDarwin
@decorators.no_debug_info_test
@decorators.expectedFlakeyDarwin
def testREPL(self):
REPLTest.testREPL(self)
def doTest(self):
self.sendline('import Foundation; Data()')
self.sendline('enum VagueProblem: Error { case SomethingWentWrong }; func foo() throws -> Int { throw VagueProblem.SomethingWentWrong }')
self.promptSync()
self.command('foo()', patterns=['\\$E0', 'SomethingWentWrong'])
| Mark this test as xfail
| # TestREPLThrowReturn.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that the REPL correctly handles the case that a called function throws."""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.decorators as decorators
class REPLThrowReturnTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
@decorators.swiftTest
@decorators.skipUnlessDarwin
@decorators.no_debug_info_test
@decorators.expectedFailureAll(oslist=["macosx"], bugnumber="rdar://27648290")
def testREPL(self):
REPLTest.testREPL(self)
def doTest(self):
self.sendline('import Foundation; Data()')
self.sendline('enum VagueProblem: Error { case SomethingWentWrong }; func foo() throws -> Int { throw VagueProblem.SomethingWentWrong }')
self.promptSync()
self.command('foo()', patterns=['\\$E0', 'SomethingWentWrong'])
|
from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
# Need to define a constructor as the implementation on the C++ side is
# static.
def __init__(self):
pass
def __call__(cls):
if cls._instance is None:
cls._instance = super(PipelineStateManager, cls).__call__()
return cls._instances | Fix singleton to work with wrapped manager class
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>
| from tomviz._wrapping import PipelineStateManagerBase
class PipelineStateManager(PipelineStateManagerBase):
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = PipelineStateManagerBase.__new__(cls, *args, **kwargs)
return cls._instance
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
sot = next(self.conn.telemetry.samples(meter))
assert isinstance(sot, sample.Sample)
| Fix the telemetry sample test
This test works fine on devstack, but on the test gate not all
the meters have samples, so only iterate over them if there are
samples.
Partial-bug: #1665495
Change-Id: I8f327737a53194aeba08925391f1976f1b506aa0
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.telemetry.v2 import sample
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestSample(base.BaseFunctionalTest):
def test_list(self):
for meter in self.conn.telemetry.meters():
for sot in self.conn.telemetry.samples(meter):
assert isinstance(sot, sample.Sample)
|
__all__ = ('version', '__version__')
version = (0, 14, 0)
__version__ = '.'.join(str(x) for x in version)
| Bump version to 0.14.999.1 (next release on this branch will be 0.15.0)
20080110143356-53eee-be816768f9cc7e023de858d0d314cbbec894ffa1.gz
| __all__ = ('version', '__version__')
version = (0, 14, 999, 1)
__version__ = '.'.join(str(x) for x in version)
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
| Make it clear that the user must implement generate_from_int
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class CheckDigitNumber:
def __init__ (self, number = None):
self.__set_number(number)
def generate_from_int (self, n):
raise NotImplementedError
def get_check_digit (self):
if self:
return self.generate_from_int(self.number)
else:
return None
def has_valid_check_digit (self):
if self:
digit = self.number % 10
static = self.number // 10
return digit == self.generate_from_int(static)
else:
return False
def __bool__ (self):
return self.number is not None
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.number))
def __set_number (self, number):
if isinstance(number, int):
self.number = number
elif isinstance(number, str):
self.__try_to_extract_number_from_str(number)
else:
self.number = None
def __try_to_extract_number_from_str (self, number):
try:
self.number = int(number)
except ValueError:
self.number = None
|
from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(name),
what will now behave well.
"""
name = []; path = str()
for i in input_tif:
name.append(i.split('/')[-1])
path_split = list(input_tif)[0].split('/')[0:-1]
for i in path_split:
path += i+'/'
return path, name | Fix tiffile hack to use os.path
| from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals
from skimage.external import tifffile
import os.path
def hack(input_tif):
"""
This method allows to bypass the strange faulty behaviour of
skimage.external.tifffile.imread() when it gets a list of paths or
a glob pattern. This function extracts the image names and the path.
Then, one can os.chdir(path) and call tifffile.imread(names),
what will now behave well.
"""
assert len(input_tif) > 0
names = []
path = str()
for i in input_tif:
names.append(os.path.basename(i))
path = os.path.dirname(input_tif[0])
return path, names |
"""Fix Folder, EASFolderSyncStatus unique constraints
Revision ID: 2525c5245cc2
Revises: 479b3b84a73e
Create Date: 2014-07-28 18:57:24.476123
"""
# revision identifiers, used by Alembic.
revision = '2525c5245cc2'
down_revision = '479b3b84a73e'
from alembic import op
import sqlalchemy as sa
from inbox.ignition import main_engine
engine = main_engine()
Base = sa.ext.declarative.declarative_base()
Base.metadata.reflect(engine)
def upgrade():
op.drop_constraint('folder_ibfk_1', 'folder', type_='foreignkey')
op.drop_constraint('account_id', 'folder', type_='unique')
op.create_foreign_key('folder_ibfk_1',
'folder', 'account',
['account_id'], ['id'])
op.create_unique_constraint('account_id',
'folder',
['account_id', 'name', 'canonical_name'])
if 'easfoldersyncstatus' in Base.metadata.tables:
op.create_unique_constraint('account_id_2',
'easfoldersyncstatus',
['account_id', 'eas_folder_id'])
def downgrade():
raise Exception('Unsupported, going back will break things.')
| Rename FK in migration 70 - For some reason, Gunks' db has it named differently than ours.
| """Fix Folder, EASFolderSyncStatus unique constraints
Revision ID: 2525c5245cc2
Revises: 479b3b84a73e
Create Date: 2014-07-28 18:57:24.476123
"""
# revision identifiers, used by Alembic.
revision = '2525c5245cc2'
down_revision = '479b3b84a73e'
from alembic import op
import sqlalchemy as sa
from inbox.ignition import main_engine
engine = main_engine()
Base = sa.ext.declarative.declarative_base()
Base.metadata.reflect(engine)
def upgrade():
op.drop_constraint('folder_fk1', 'folder', type_='foreignkey')
op.drop_constraint('account_id', 'folder', type_='unique')
op.create_foreign_key('folder_fk1',
'folder', 'account',
['account_id'], ['id'])
op.create_unique_constraint('account_id',
'folder',
['account_id', 'name', 'canonical_name'])
if 'easfoldersyncstatus' in Base.metadata.tables:
op.create_unique_constraint('account_id_2',
'easfoldersyncstatus',
['account_id', 'eas_folder_id'])
def downgrade():
raise Exception('Unsupported, going back will break things.')
|
# Generated by Django 2.2.13 on 2020-10-25 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0220_event_public_status'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
| Fix migrations conflict after rebase
| # Generated by Django 2.2.13 on 2020-10-25 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0221_auto_20201025_1113'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="django-sanitizer",
version="0.3",
description="Django template filter application for sanitizing user submitted HTML",
author="Calvin Spealman",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
| Make caktus the owner, listing myself as a maintainer.
| #!/usr/bin/env python
from distutils.core import setup
setup(name="django-sanitizer",
version="0.4",
description="Django template filter application for sanitizing user submitted HTML",
author="Caktus Consulting Group",
maintainer="Calvin Spealman",
maintainer_email="calvin@caktusgroup.com",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
|
from setuptools import setup
with open('README.md') as f:
description = f.read()
from beewarn import VERSION
setup(name='beewarn',
version=VERSION,
description='Utility for warning about bees',
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license='MIT',
long_description=description,
url='https://github.com/prophile/beewarn',
zip_safe=True,
setup_requires=['nose >=1.0, <2.0'],
entry_points = {
'console_scripts': [
'beewarn=beewarn.cli:run_cli'
]
},
packages=['beewarn'],
test_suite='nose.collector')
| Remove the README.md loading step
| from setuptools import setup
from beewarn import VERSION
setup(name='beewarn',
version=VERSION,
description='Utility for warning about bees',
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license='MIT',
url='https://github.com/prophile/beewarn',
zip_safe=True,
setup_requires=['nose >=1.0, <2.0'],
entry_points = {
'console_scripts': [
'beewarn=beewarn.cli:run_cli'
]
},
packages=['beewarn'],
test_suite='nose.collector')
|
from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.4',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
| Update for 1.6.0 - TODO: Add Windows | from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = 'timothyryanwalsh@gmail.com',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
|
# I prefer Markdown to reStructuredText. PyPi does not. This allows people to
# install and not get any errors.
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = (
"Tavi (as in `Rikki Tikki Tavi "
"<http://en.wikipedia.org/wiki/Rikki-Tikki-Tavi>`_) "
"is an extremely thin Mongo object mapper for Python. It is a thin "
"abstraction over `pymongo <http://api.mongodb.org/python/current/>`_ "
"that allows you to easily model your applications and persist your "
"data in MongoDB. See `README.md <http://github.com/bnadlerjr/tavi>`_ "
"for more details."
)
from setuptools import setup
setup(
name='Tavi',
version='0.0.1',
author='Bob Nadler Jr.',
author_email='bnadlerjr@gmail.com',
packages=['tavi', 'tavi.test'],
url='http://pypi.python.org/pypi/Tavi/',
license='LICENSE.txt',
description='Super thin Mongo object mapper for Python.',
long_description=long_description,
install_requires=[
"inflection >= 0.2.0",
"pymongo >= 2.5.2"
]
)
| Update project URL to point to GitHub.
| # I prefer Markdown to reStructuredText. PyPi does not. This allows people to
# install and not get any errors.
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = (
"Tavi (as in `Rikki Tikki Tavi "
"<http://en.wikipedia.org/wiki/Rikki-Tikki-Tavi>`_) "
"is an extremely thin Mongo object mapper for Python. It is a thin "
"abstraction over `pymongo <http://api.mongodb.org/python/current/>`_ "
"that allows you to easily model your applications and persist your "
"data in MongoDB. See `README.md <http://github.com/bnadlerjr/tavi>`_ "
"for more details."
)
from setuptools import setup
setup(
name='Tavi',
version='0.0.1',
author='Bob Nadler Jr.',
author_email='bnadlerjr@gmail.com',
packages=['tavi', 'tavi.test'],
url='https://github.com/bnadlerjr/tavi',
license='LICENSE.txt',
description='Super thin Mongo object mapper for Python.',
long_description=long_description,
install_requires=[
"inflection >= 0.2.0",
"pymongo >= 2.5.2"
]
)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field)
]
| Add noop to migration file
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def noop(*args, **kwargs):
pass
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field, reverse_code=noop)
]
|
from setuptools import setup
description = 'New testament greek app for django.'
long_desc = open('README.rst').read()
setup(
name='django-greekapp',
version='0.0.1',
url='https://github.com/honza/greekapp',
install_requires=['django', 'redis'],
description=description,
long_description=long_desc,
author='Honza Pokorny',
author_email='me@honza.ca',
maintainer='Honza Pokorny',
maintainer_email='me@honza.ca',
packages=['greekapp'],
package_data={
'greekapp': [
'templates/greekapp/index.html',
'static/greekapp.min.js',
'static/greekapp.css'
]
}
)
| Include nt.db with package data.
| from setuptools import setup
description = 'New testament greek app for django.'
long_desc = open('README.rst').read()
setup(
name='django-greekapp',
version='0.0.1',
url='https://github.com/honza/greekapp',
install_requires=['django', 'redis'],
description=description,
long_description=long_desc,
author='Honza Pokorny',
author_email='me@honza.ca',
maintainer='Honza Pokorny',
maintainer_email='me@honza.ca',
packages=['greekapp'],
package_data={
'greekapp': [
'templates/greekapp/index.html',
'static/greekapp.min.js',
'static/greekapp.css',
'managements/commands/nt.db'
]
}
)
|
import os
from setuptools import setup
readme_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'README.rst',
)
long_description = open(readme_path).read()
version_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'VERSION',
)
version = open(version_path).read()
setup(
name='flask-ldap3-login',
version=version,
packages=['flask_ldap3_login'],
author="Nick Whyte",
author_email='nick@nickwhyte.com',
description="LDAP Support for Flask in Python3/2",
long_description=long_description,
url='https://github.com/nickw444/flask-ldap3-login',
zip_safe=False,
install_requires=[
"ldap3",
"Flask",
"Flask-wtf",
"enum34"
],
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Flask',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2.6',
],
test_suite="flask_ldap3_login_tests",
)
| Fix for when using python3.5. Don't install enum34 if enum already exists (python35)
| import os
from setuptools import setup
readme_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'README.rst',
)
long_description = open(readme_path).read()
version_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'VERSION',
)
version = open(version_path).read()
requires = ['ldap3' ,'Flask', 'Flask-wtf']
try:
import enum
except Exception as e:
requires.append('enum34')
setup(
name='flask-ldap3-login',
version=version,
packages=['flask_ldap3_login'],
author="Nick Whyte",
author_email='nick@nickwhyte.com',
description="LDAP Support for Flask in Python3/2",
long_description=long_description,
url='https://github.com/nickw444/flask-ldap3-login',
zip_safe=False,
install_requires=requires,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Flask',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2.6',
],
test_suite="flask_ldap3_login_tests",
)
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's interaction with the python logging module"""
import unittest
import six
import grpc
import logging
class LoggingTest(unittest.TestCase):
def test_logger_not_occupied(self):
self.assertEqual(0, len(logging.getLogger().handlers))
if __name__ == '__main__':
unittest.main(verbosity=2)
| Add test for 'No handlers could be found' problem
| # Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's interaction with the python logging module"""
import unittest
import six
from six.moves import reload_module
import logging
import grpc
import functools
import sys
class LoggingTest(unittest.TestCase):
def test_logger_not_occupied(self):
self.assertEqual(0, len(logging.getLogger().handlers))
def test_handler_found(self):
old_stderr = sys.stderr
sys.stderr = six.StringIO()
try:
reload_module(logging)
logging.basicConfig()
reload_module(grpc)
self.assertFalse("No handlers could be found" in sys.stderr.getvalue())
finally:
sys.stderr = old_stderr
reload_module(logging)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import os
import unittest2 as unittest
from tkp.quality.restoringbeam import beam_invalid
from tkp.testutil.decorators import requires_data
from tkp import accessors
from tkp.testutil.data import DATAPATH
fits_file = os.path.join(DATAPATH,
'quality/noise/bad/home-pcarrol-msss-3C196a-analysis-band6.corr.fits')
@requires_data(fits_file)
class TestRestoringBeam(unittest.TestCase):
def test_header(self):
image = accessors.open(fits_file)
(semimaj, semimin, theta) = image.beam
self.assertFalse(beam_invalid(semimaj, semimin))
# TODO: this is for FOV calculation and checking
#data = tkp.quality.restoringbeam.parse_fits(image)
#frequency = image.freq_eff
#wavelength = scipy.constants.c/frequency
#d = 32.25
#fwhm = tkp.lofar.beam.fwhm(wavelength, d)
#fov = tkp.lofar.beam.fov(fwhm)
if __name__ == '__main__':
unittest.main()
| Test for infinite beam QC
| import os
import unittest2 as unittest
from tkp.quality.restoringbeam import beam_invalid
from tkp.testutil.decorators import requires_data
from tkp import accessors
from tkp.testutil.data import DATAPATH
fits_file = os.path.join(DATAPATH,
'quality/noise/bad/home-pcarrol-msss-3C196a-analysis-band6.corr.fits')
@requires_data(fits_file)
class TestRestoringBeam(unittest.TestCase):
def test_header(self):
image = accessors.open(fits_file)
(semimaj, semimin, theta) = image.beam
self.assertFalse(beam_invalid(semimaj, semimin))
# TODO: this is for FOV calculation and checking
#data = tkp.quality.restoringbeam.parse_fits(image)
#frequency = image.freq_eff
#wavelength = scipy.constants.c/frequency
#d = 32.25
#fwhm = tkp.lofar.beam.fwhm(wavelength, d)
#fov = tkp.lofar.beam.fov(fwhm)
def test_infinite(self):
smaj, smin, theta = float('inf'), float('inf'), float('inf')
self.assertTrue(beam_invalid(smaj, smin, theta))
if __name__ == '__main__':
unittest.main()
|
import unittest, argparse
from echolalia.formatter.csver import Formatter
class CsverTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)]
self.formatter = Formatter()
def test_add_args(self):
new_parser = self.formatter.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['--with_header'])
self.assertTrue(args.with_header)
args = new_parser.parse_args([])
self.assertFalse(args.with_header)
def test_marshall_no_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args([])
result = self.formatter.marshall(args, self.data)
expect = "a,1\r\nb,2\r\nc,3\r\n"
def test_marshall_with_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args(['--with_header'])
result = self.formatter.marshall(args, self.data)
expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
| Fix no header test for csv formatter
| import unittest, argparse
from echolalia.formatter.csver import Formatter
class CsverTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)]
self.formatter = Formatter()
def test_add_args(self):
new_parser = self.formatter.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['--with_header'])
self.assertTrue(args.with_header)
args = new_parser.parse_args([])
self.assertFalse(args.with_header)
def test_marshall_no_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args([])
result = self.formatter.marshall(args, self.data)
expect = "a,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
def test_marshall_with_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args(['--with_header'])
result = self.formatter.marshall(args, self.data)
expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
| Make features accessible from root
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
|
Subsets and Splits