input
stringlengths 0
2.17k
| instruction
stringlengths 18
2.94k
| output
stringlengths 47
3.36k
|
---|---|---|
# Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
| Set Mongoengine user for Django 1.5 in tests.
| # Django settings for test_project project
DEBUG = True
# We are not really using a relational database, but tests fail without
# defining it because flush command is being run, which expects it
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Make this unique, and don't share it with anybody
SECRET_KEY = 'sq=uf!nqw=aibl+y1&5pp=)b7pc=c$4hnh$om*_c48r)^t!ob)'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
TEST_RUNNER = 'tastypie_mongoengine.test_runner.MongoEngineTestSuiteRunner'
INSTALLED_APPS = (
'mongoengine.django.mongo_auth',
'tastypie',
'tastypie_mongoengine',
'test_project.test_app',
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
MONGO_DATABASE_NAME = 'test_project'
import mongoengine
mongoengine.connect(MONGO_DATABASE_NAME)
|
# -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.pipeline.python.virtualenv.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
| Test for getting virtualenv name, prompting the user
| # -*- coding: utf-8 -*-
"""
.. module:: tests.test_pipeline.test_python
:synopsis: Tests for bundled python pipelines
"""
from facio.pipeline.python.virtualenv import Virtualenv
from mock import patch, PropertyMock
from .. import BaseTestCase
class TestPythonVirtualenv(BaseTestCase):
def setUp(self):
# Mocking State
patcher = patch('facio.state.state.state',
new_callable=PropertyMock,
create=True)
self.mock_state = patcher.start()
self.mock_state.project_name = 'foo'
self.mock_state.context_variables = {
'PROJECT_NAME': 'foo'}
self.addCleanup(patcher.stop)
@patch('facio.base.input')
def test_get_name(self, mock_input):
mock_input.return_value = 'bar'
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'bar')
@patch('facio.base.input')
def test_get_name_default(self, mock_input):
mock_input.return_value = ''
i = Virtualenv()
name = i.get_name()
self.assertEqual(name, 'foo')
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
| Allow it to configure side
| # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from libqtile.bar import CALCULATED
from libqtile.widget import TextBox
from powerline import Powerline
class QTilePowerline(Powerline):
def do_setup(self, obj):
obj.powerline = self
class PowerlineTextBox(TextBox):
def __init__(self, timeout=2, text=' ', width=CALCULATED, side='right', **config):
super(PowerlineTextBox, self).__init__(text, width, **config)
self.timeout_add(timeout, self.update)
self.side = side
powerline = QTilePowerline(ext='wm', renderer_module='pango_markup')
powerline.setup(self)
def update(self):
if not self.configured:
return True
self.text = self.powerline.render(side=self.side)
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
super(PowerlineTextBox, self)._configure(qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True,
)
# TODO: Remove this at next major release
Powerline = PowerlineTextBox
|
import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info(hasattr(user, "auth_tokens"))
non_expired_tokens = user.auth_tokens.filter(only_current_tokens())
return len(non_expired_tokens) > 0
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info(hasattr(user, "auth_tokens"))
return user.auth_tokens.filter(only_current_tokens())
| Use exists() check from QuerySet; give logger-info context
| import logging
from troposphere.query import only_current_tokens
logger = logging.getLogger(__name__)
def has_valid_token(user):
"""
Returns boolean indicating if there are non-expired authentication
tokens associated with the user.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens()).exists()
def get_current_tokens(user):
"""
Returns the non-expired authentication tokens.
"""
logger.info("user has auth_tokens attributes? %s" %
(hasattr(user, "auth_tokens")))
return user.auth_tokens.filter(only_current_tokens())
|
from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
for t in ts:
t.rank()
return "Done"
| Remove ranking from denormalization command
| from django.core.management.base import (
BaseCommand,
)
from apps.api.models import (
Convention,
Contest,
Contestant,
Performance,
)
class Command(BaseCommand):
help = "Command to denormailze data."
def handle(self, *args, **options):
vs = Convention.objects.all()
for v in vs:
v.save()
ts = Contest.objects.all()
for t in ts:
t.save()
cs = Contestant.objects.all()
for c in cs:
c.save()
ps = Performance.objects.all()
for p in ps:
p.save()
return "Done"
|
import cgen
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
| Dimension: Add dtype of iteration variable
| import cgen
import numpy as np
from sympy import Symbol
__all__ = ['Dimension', 'x', 'y', 'z', 't', 'p']
class Dimension(Symbol):
"""Index object that represents a problem dimension and thus
defines a potential iteration space.
:param size: Optional, size of the array dimension.
:param buffered: Optional, boolean flag indicating whether to
buffer variables when iterating this dimension.
"""
def __new__(cls, name, **kwargs):
newobj = Symbol.__new__(cls, name)
newobj.size = kwargs.get('size', None)
newobj.buffered = kwargs.get('buffered', None)
newobj._count = 0
return newobj
def __str__(self):
return self.name
def get_varname(self):
"""Generates a new variables name based on an internal counter"""
name = "%s%d" % (self.name, self._count)
self._count += 1
return name
@property
def ccode(self):
"""C-level variable name of this dimension"""
return "%s_size" % self.name if self.size is None else "%d" % self.size
@property
def decl(self):
"""Variable declaration for C-level kernel headers"""
return cgen.Value("const int", self.ccode)
@property
def dtype(self):
"""The data type of the iteration variable"""
return np.int32
# Set of default dimensions for space and time
x = Dimension('x')
y = Dimension('y')
z = Dimension('z')
t = Dimension('t')
p = Dimension('p')
|
from django import forms
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = [] | Add validation on category parent field
| from django import forms
from django.utils.translation import ugettext_lazy as _
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
# Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.2.0'
| Create a pre-release version for PyPI, to test the new readme format.
| # Package versioning solution originally found here:
# http://stackoverflow.com/q/458550
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into your module
__version__ = '1.3.0dev0'
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.native import Bot as NativeBot
from updatebot.config import UpdateBotConfig
| Remove import of missing module
| #
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
UpdateBot is a module for the automated creation and updating of a conary
packages from a yum or apt repository.
"""
from updatebot.bot import Bot
from updatebot.current import Bot as CurrentBot
from updatebot.config import UpdateBotConfig
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
| Use better exception for AUTH_USER_MODEL
If AUTH_USER_MODEL is improperly configured as 'project.customer.User',
the error is:
ValueError: too many values to unpack
Use rather standard Django's error:
ImproperlyConfigured: AUTH_USER_MODEL must be of the form
'app_label.model_name'
| from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
try:
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
| Add closing paren to tuple expression
Under Python 2.7.6, this file didn't compile for me as-is. I still need to clone and rerun the test suite, but I thought I'd try Github's nifty "fork and edit online" feature. Will comment again when the tests pass. |
SINGLE_BIT_MASK = [1 << x for x in range(7, -1, -1)]
class BitString(object):
def __init__(self, bytes, length=None, offset=None):
self.bytes = bytes
self.offset = offset if offset is not None else 0
self.length = length if length is not None else 8 * len(data) - self.offset
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
if key < 0 or key >= length:
raise IndexError()
byte_index, bit_offset = (divmod(self.offset + key), 8)
return self.bytes[byte_index] & SINGLE_BIT_MASK[bit_offset]
|
from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status__exact='delivered')
| Change filter syntax to be more direct.
| from django.db.models import Manager
class ShipmentManager(Manager):
"""Returns Shipments that are not delivered"""
def undelivered(self):
return self.get_query_set().exclude(status='delivered')
|
from setmagic.wrapper import SettingsWrapper
# Initialize the magic
settings = SettingsWrapper()
| Rename the built-in wrapper from "settings" to "setmagic"
| from setmagic.wrapper import SettingsWrapper
# Initialize the magic
setmagic = SettingsWrapper()
# Support for backwards compatibility
# @TODO: Drop at 0.4
settings = setmagic
|
# code support tractometric statistical analysis for dipy
| Add a warning about future changes that will happen in dipy.stats.
| # code support tractometric statistical analysis for dipy
import warnings
w_string = "The `dipy.stats` module is still under heavy development "
w_string += "and functionality, as well as the API is likely to change "
w_string += "in future versions of the software"
warnings.warn(w_string) |
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
| Add note in hog test doc string
| """Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
# -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
| Add tests for cube and isis encoders.
| # -*- coding: utf-8 -*-
import os
import glob
import pvl
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data/')
PDS_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data', 'pds3')
def test_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
assert label == pvl.loads(pvl.dumps(label))
def test_cube_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.IsisCubeLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
def test_pds_dump():
files = glob.glob(os.path.join(PDS_DATA_DIR, "*.lbl"))
for infile in files:
label = pvl.load(infile)
encoder = pvl.encoder.PDSLabelEncoder
assert label == pvl.loads(pvl.dumps(label, cls=encoder))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
| Make client.write_parquet_dataset available for export
This commit adds write_parquet_dataset to the imports from .client in
__init__.py
Previously, `from json2parquet import write_parquet_dataset` would
result in an error: `ImportError: cannot import name
'write_parquet_dataset' from 'json2parquet' `
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .client import load_json, ingest_data, write_parquet, convert_json, write_parquet_dataset
__title__ = 'json2parquet'
__version__ = '0.0.24'
__all__ = ['load_json', 'ingest_data', 'write_parquet', 'convert_json', 'write_parquet_dataset']
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
| Add missing cascade deletes on user/roles
| from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
| Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@863 78cadc50-ecff-11dd-a971-7dbc132099af
| #!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
# Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
| Add a note about caching
| # Copyright (c) 2015 Ansible, Inc..
# All Rights Reserved.
import json
from django.conf import settings as django_settings
from awx.main.models.configuration import TowerSettings
class TowerConfiguration(object):
# TODO: Caching so we don't have to hit the database every time for settings
def __getattr__(self, key):
ts = TowerSettings.objects.filter(key=key)
if not ts.exists():
return getattr(django_settings, key)
return ts[0].value_converted
def create(key, value):
settings_manifest = django_settings.TOWER_SETTINGS_MANIFEST
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
setting_actual = TowerSettings.objects.filter(key=key)
if not settings_actual.exists():
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
value=value,
value_type=settings_entry['type'])
else:
settings_actual['value'] = value
settings_actual.save()
tower_settings = TowerConfiguration()
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
| Add support for interactive rebase
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not entry.summary:
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
| Check if field exists, not if it's empty
| from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Diesel Sweeties (web)'
language = 'en'
url = 'http://www.dieselsweeties.com/'
start_date = '2000-01-01'
rights = 'Richard Stevens'
class Crawler(CrawlerBase):
history_capable_date = '2000-01-01'
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
def crawl(self, pub_date):
feed = self.parse_feed('http://www.dieselsweeties.com/ds-unifeed.xml')
for entry in feed.for_date(pub_date):
if not hasattr(entry, 'summary'):
continue
url = entry.summary.src('img[src*="/strips/"]')
title = entry.title
text = entry.summary.alt('img[src*="/strips/"]')
return CrawlerImage(url, title, text)
|
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
| Add docstrings to Gist integration tests
@esacteksab would be so proud
| # -*- coding: utf-8 -*-
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=[], **kwargs):
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
| Use immutable arg rather mutable arg
Passing mutable objects as default args is a known Python pitfall.
We'd better avoid this. This commit changes mutable default args with
None, then use 'arg = arg or []'.
Change-Id: If3a10d58e6cd792a2011c177c49d3b865a7421ff
| # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.conductor import resource as r
def create_cluster(name, tenant, plugin, version, node_groups, **kwargs):
dct = {'name': name, 'tenant_id': tenant, 'plugin_name': plugin,
'hadoop_version': version, 'node_groups': node_groups}
dct.update(kwargs)
return r.ClusterResource(dct)
def make_ng_dict(name, flavor, processes, count, instances=None, **kwargs):
instances = instances or []
dct = {'name': name, 'flavor_id': flavor, 'node_processes': processes,
'count': count, 'instances': instances}
dct.update(kwargs)
return dct
def make_inst_dict(inst_id, inst_name):
return {'instance_id': inst_id, 'instance_name': inst_name}
|
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, Binary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(Binary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
| Remove sqlalchemy 0.7 warning (Binary => LargeBinary)
| #-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
from sqlalchemy import Column, String, DateTime, LargeBinary
from sqlalchemy.ext.declarative import declarative_base
SessionBase = declarative_base()
class Session(SessionBase):
__tablename__ = 'Sessions'
sess_id = Column(String(100), primary_key = True)
session_pool_id = Column(String(100), nullable = False)
start_date = Column(DateTime(), nullable = False)
latest_access = Column(DateTime())
latest_change = Column(DateTime())
session_obj = Column(LargeBinary(), nullable = False)
def __init__(self, sess_id, session_pool_id, start_date, session_obj):
self.sess_id = sess_id
self.session_pool_id = session_pool_id
self.start_date = start_date
self.session_obj = session_obj
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
| Fix GoogleDrive OAuth callback URL in OAuth module.
| # -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
| Remove unnecessary conditional in argument parsing | import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
| Make the FunctionLogger a context manager
| from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
| Use absolute path for scikits.image.data_dir.
| """Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
| Make test compatible with Python 2.6
| import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
| Add requested call to check_args.
| """
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
| Fix has no attribute on templatetags image_obj
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
| Add a skipped failing test for the result-repetition bug.
| """Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
| Update powerline battery wrapper for new API
| from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
| Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e
| """Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
| Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564
| #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
| Print which DB will be cleared.
| """Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
| Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.
| import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
| Add changed_files support to mercurial
| from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
| Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.
| """
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
| Add modules to pytac namespace.
| """Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
| Add /firefox/nightly/all/ to download link tests
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74 | Tag commit for v0.0.10-master generated by gitmake.py
| major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26 |
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
| Fix monkey patch test condition.
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
| Add install task to fab file
| from StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
Add foward propagation and network initialization functions
| import numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
| Set default label color to black
| from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
| Increment version to 0.1.2 to fix pip not updating to edge
|
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
| Add fab commands to push and pull
| from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
from __future__ import absolute_import, division, print_function
import pytest
from screen19.screen import Screen19
def test_screen19_command_line_help_does_not_crash():
Screen19().run([])
def test_screen19(dials_data, tmpdir):
data_dir = dials_data("x4wide").strpath
with tmpdir.as_cwd():
Screen19().run([data_dir])
logfile = tmpdir.join("screen19.log").read()
assert "screen19 successfully completed" in logfile
assert "photon incidence rate is outside the linear response region" in logfile
@pytest.mark.xfail(raises=ValueError, reason="LAPACK bug?")
def test_screen19_single_frame(dials_data, tmpdir):
# TODO Use a single frame with fewer than 80 reflections
image = dials_data("x4wide").join("X4_wide_M1S4_2_0001.cbf").strpath
with tmpdir.as_cwd():
Screen19().run([image])
logfile = tmpdir.join("screen19.log").read()
assert "screen19 successfully completed" in logfile
| Fix tests broken by new log configuration option
| from __future__ import absolute_import, division, print_function
import pytest
from screen19.screen import Screen19
def test_screen19_command_line_help_does_not_crash():
Screen19().run([])
def test_screen19(dials_data, tmpdir):
data_dir = dials_data("x4wide").strpath
with tmpdir.as_cwd():
Screen19().run([data_dir], set_up_logging=True)
logfile = tmpdir.join("screen19.log").read()
assert "screen19 successfully completed" in logfile
assert "photon incidence rate is outside the linear response region" in logfile
@pytest.mark.xfail(raises=ValueError, reason="LAPACK bug?")
def test_screen19_single_frame(dials_data, tmpdir):
# TODO Use a single frame with fewer than 80 reflections
image = dials_data("x4wide").join("X4_wide_M1S4_2_0001.cbf").strpath
with tmpdir.as_cwd():
Screen19().run([image], set_up_logging=True)
logfile = tmpdir.join("screen19.log").read()
assert "screen19 successfully completed" in logfile
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.RunPython(fix_fee_product_index),
migrations.DeleteModel(
name='Fee',
),
]
| Fix order of migration in operations
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
import plugin
import urllib2
import logging
import re
from httplib import HTTPException
from htmllib import HTMLParser
class Plugin(plugin.baseplugin):
def __init__(self, bot):
self._regex = re.compile("(https?://.+)\s?")
self._ua = "Mozilla/5.0 (X11; Linux x86_64; rv:2.0b8) Gecko/20100101 Firefox/4.0b8"
bot.register_regex(self._regex, self.title)
def unescape(self, s):
p = HTMLParser(None)
p.save_bgn()
p.feed(s)
return p.save_end()
def title(self, bot, user, channel, text, match_obj):
url = match_obj.groups()[0]
req = urllib2.Request(url)
req.add_header("User-Agent", self._ua)
try:
content = urllib2.urlopen(req).read()
except (urllib2.URLError, HTTPException), e:
logging.debug("%s - %s" % (e, url))
return
beg = content.find("<title>")
if beg != -1:
title = content[beg+7:content.find("</title>")].replace("\n","")
title = self.unescape(title)
bot.privmsg(channel, "Title: %s" % unicode(title, "utf-8"))
| Use the HTMLParser module instead of htmllib
| import plugin
import urllib2
import logging
import re
from httplib import HTTPException
import HTMLParser
class Plugin(plugin.baseplugin):
def __init__(self, bot):
self._regex = re.compile("(https?://.+)\s?")
self._ua = "Mozilla/5.0 (X11; Linux x86_64; rv:2.0b8) Gecko/20100101 Firefox/4.0b8"
bot.register_regex(self._regex, self.title)
def unescape(self, s):
p = HTMLParser.HTMLParser(None)
p.save_bgn()
p.feed(s)
return p.save_end()
def title(self, bot, user, channel, text, match_obj):
url = match_obj.groups()[0]
req = urllib2.Request(url)
req.add_header("User-Agent", self._ua)
try:
content = urllib2.urlopen(req).read()
except (urllib2.URLError, HTTPException), e:
logging.debug("%s - %s" % (e, url))
return
beg = content.find("<title>")
if beg != -1:
title = content[beg+7:content.find("</title>")].replace("\n","")
try:
title = self.unescape(title)
except HTMLParser.HTMLParseError, e:
logging.info("%s - %s" % (e.msg, url))
bot.privmsg(channel, "Title: %s" % unicode(title, "utf-8"))
|
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright © 2014-2016 Tim Bielawa <timbielawa@gmail.com>
# See GitHub Contributors Graph for more information
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sub-license, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Kept for backward compatibilty
from .argparse import BitmathType
try:
from .progressbar import BitmathFileTransferSpeed
except ModuleNotFoundError:
# Ignore missing dependency as argparse integration will fail if
# progressbar is not installed (#86).
pass
| Fix build errors with Python <3.6
The `ModuleNotFoundError` was introduced in Python 3.6.
| # -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright © 2014-2016 Tim Bielawa <timbielawa@gmail.com>
# See GitHub Contributors Graph for more information
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sub-license, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Kept for backward compatibilty
from .argparse import BitmathType
try:
from .progressbar import BitmathFileTransferSpeed
except ImportError:
# Ignore missing dependency as argparse integration will fail if
# progressbar is not installed (#86).
pass
|
import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
__console = None
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
global __console
__console = Console(window)
return __console
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
| Remove an extra global variable in xorcise package
| import curses
from .character import Character
from .console import Console
from .line import Line
from .attribute import RenditionAttribute, ColorAttribute
from .misc import ESCAPE_CHARS, DELETE_CHARS, BACKSPACE_CHARS, \
is_printable_char, char_with_control_key
def turn_on_console(asciize=False, spaces_per_tab=4):
Line.ASCIIZE = asciize
Line.SPACES_PER_TAB = spaces_per_tab
window = curses.initscr()
curses.noecho()
curses.cbreak()
curses.start_color()
curses.use_default_colors()
ColorAttribute.initialize()
return Console(window)
def turn_off_console():
curses.nocbreak()
curses.echo()
curses.endwin()
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
| Fix the failing dns test on Windows
Gets the name of the first interface on the system. Windows network
interfaces don't have the same name across Window systems. YOu can even
go as far as naming them whatever you want. The test was failing because
the interface name was hard-coded as 'Ethernet'.
| # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
from fabtastic import db
from fabtastic.util.aws import get_s3_connection
class Command(BaseCommand):
help = 'Backs the DB up to S3. Make sure to run s3cmd --configure.'
def handle(self, *args, **options):
db_alias = getattr(settings, 'FABTASTIC_DUMP_DB_ALIAS', 'default')
# Get DB settings from settings.py.
database = db.util.get_db_setting_dict(db_alias=db_alias)
# Generate a temporary DB dump filename.
dump_filename = db.util.get_db_dump_filename(db_alias=db_alias)
# Carry out the DB dump.
dump_file_path = db.dump_db_to_file(dump_filename, database)
print "Uploading to S3."
conn = get_s3_connection()
bucket = conn.create_bucket(settings.S3_DB_BACKUP['BUCKET'])
key = bucket.new_key(dump_filename)
key.set_contents_from_filename(dump_file_path)
print "S3 DB backup complete."
# Clean up the temporary download file.
os.remove(dump_filename)
| Make backing up to S3 use a year/month/date structure for backups, for S3 clients that 'fake' directory structures.
| import os
import datetime
from django.core.management.base import BaseCommand
from django.conf import settings
from fabtastic import db
from fabtastic.util.aws import get_s3_connection
class Command(BaseCommand):
help = 'Backs the DB up to S3. Make sure to run s3cmd --configure.'
def handle(self, *args, **options):
db_alias = getattr(settings, 'FABTASTIC_DUMP_DB_ALIAS', 'default')
# Get DB settings from settings.py.
database = db.util.get_db_setting_dict(db_alias=db_alias)
# Generate a temporary DB dump filename.
dump_filename = db.util.get_db_dump_filename(db_alias=db_alias)
# Carry out the DB dump.
dump_file_path = db.dump_db_to_file(dump_filename, database)
print "Uploading to S3."
conn = get_s3_connection()
bucket = conn.create_bucket(settings.S3_DB_BACKUP['BUCKET'])
now = datetime.datetime.now()
s3_path = '%d/%d/%d/%s' % (
now.year,
now.month,
now.day,
dump_filename,
)
key = bucket.new_key(s3_path)
key.set_contents_from_filename(dump_file_path)
bucket.copy_key(
'latest_db.sql.tar.bz2',
settings.S3_DB_BACKUP['BUCKET'],
s3_path,
)
print "S3 DB backup complete."
# Clean up the temporary download file.
os.remove(dump_filename)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .main import Centerline
__all__ = ['utils', 'Centerline']
| Convert the list to a tuple
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .main import Centerline
__all__ = ('utils', 'Centerline')
|
from django.core.management.base import BaseCommand
from nightreads.emails.models import Email
from nightreads.emails.email_service import send_email_obj
from nightreads.emails.views import get_subscriber_emails
class Command(BaseCommand):
help = 'Send the email to susbcribers'
def handle(self, *args, **options):
email_obj = Email.objects.filter(is_sent=False).first()
if not email_obj:
return
email_obj.recipients = get_subscriber_emails(email_obj=email_obj)
send_email_obj(email_obj=email_obj)
email_obj.is_sent = True
email_obj.targetted_users = len(email_obj.recipients)
email_obj.save()
self.stdout.write(
self.style.SUCCESS('Successfully sent email {}'.format(email_obj)))
| Add a note if no emails are available to send
| from django.core.management.base import BaseCommand
from nightreads.emails.models import Email
from nightreads.emails.email_service import send_email_obj
from nightreads.emails.views import get_subscriber_emails
class Command(BaseCommand):
help = 'Send the email to susbcribers'
def handle(self, *args, **options):
email_obj = Email.objects.filter(is_sent=False).first()
if not email_obj:
self.stdout.write(
self.style.SUCCESS('No emails available to send'))
return
email_obj.recipients = get_subscriber_emails(email_obj=email_obj)
send_email_obj(email_obj=email_obj)
email_obj.is_sent = True
email_obj.targetted_users = len(email_obj.recipients)
email_obj.save()
self.stdout.write(
self.style.SUCCESS('Successfully sent email {}'.format(email_obj)))
|
# This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.db.models import QuerySet
def new_exclude(self, *args, **kwargs):
new_kwargs = dict()
for key, value in kwargs.items():
if not ((isinstance(value, list) and not value) or (isinstance(value, QuerySet) and not value)):
new_kwargs[key] = value
if len(new_kwargs):
return old_exclude(self, *args, **new_kwargs)
else:
return self
def new_filter(self, *args, **kwargs):
new_kwargs = dict()
for key, value in kwargs.items():
if not ((isinstance(value, list) and not value) or (isinstance(value, QuerySet) and not value)):
new_kwargs[key] = value
if len(new_kwargs):
return old_filter(self, *args, **new_kwargs)
else:
return self
old_exclude = QuerySet.exclude
QuerySet.exclude = new_exclude
old_filter = QuerySet.filter
QuerySet.filter = new_filter | Fix buggy patched QuerySet methods
| # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.db.models import QuerySet
def new_exclude(self, *args, **kwargs):
new_kwargs = dict()
for key, value in kwargs.items():
if not ((isinstance(value, list) and not value) or (isinstance(value, QuerySet) and not value)):
new_kwargs[key] = value
return old_exclude(self, *args, **new_kwargs)
def new_filter(self, *args, **kwargs):
new_kwargs = dict()
for key, value in kwargs.items():
if not ((isinstance(value, list) and not value) or (isinstance(value, QuerySet) and not value)):
new_kwargs[key] = value
return old_filter(self, *args, **new_kwargs)
old_exclude = QuerySet.exclude
QuerySet.exclude = new_exclude
old_filter = QuerySet.filter
QuerySet.filter = new_filter |
from api.caching.tasks import ban_url, logger
from framework.guid.model import Guid
from framework.tasks.handlers import enqueue_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
else:
# I don't think this should ever happen, but ... just in case.
guid_obj = Guid.load(instance._id)
if guid_obj is not None:
typedModel = guid_obj.referent
if hasattr(typedModel, 'absolute_api_v2_url'):
abs_url = typedModel.absolute_api_v2_url
if abs_url is not None:
enqueue_task(ban_url.s(abs_url))
else:
logger.error('Cannot ban None url for {} with id {}'.format(instance._name, instance._id))
| Remove logging. It will just break travis.
| from api.caching.tasks import ban_url, logger
from framework.guid.model import Guid
from framework.tasks.handlers import enqueue_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
else:
# I don't think this should ever happen, but ... just in case.
guid_obj = Guid.load(instance._id)
if guid_obj is not None:
typedModel = guid_obj.referent
if hasattr(typedModel, 'absolute_api_v2_url'):
abs_url = typedModel.absolute_api_v2_url
if abs_url is not None:
enqueue_task(ban_url.s(abs_url))
|
#!/usr/bin/env python
import actorcore.ICC
class OurActor(actorcore.ICC.ICC):
def __init__(self, name,
productName=None, configFile=None,
modelNames=('charis', 'hx'),
debugLevel=30):
""" Setup an Actor instance. See help for actorcore.Actor for details. """
# This sets up the connections to/from the hub, the logger, and the twisted reactor.
#
actorcore.ICC.ICC.__init__(self, name,
productName=productName,
configFile=configFile,
modelNames=modelNames)
#
# To work
def main():
theActor = OurActor('hx', productName='hxActor')
theActor.run()
if __name__ == '__main__':
main()
| Make sure we auto-connect to the IDL server
| #!/usr/bin/env python
import actorcore.ICC
class OurActor(actorcore.ICC.ICC):
def __init__(self, name,
productName=None, configFile=None,
modelNames=('charis', 'hx'),
debugLevel=30):
""" Setup an Actor instance. See help for actorcore.Actor for details. """
# This sets up the connections to/from the hub, the logger, and the twisted reactor.
#
actorcore.ICC.ICC.__init__(self, name,
productName=productName,
configFile=configFile,
modelNames=modelNames)
self.everConnected = False
def connectionMade(self):
if self.everConnected is False:
self.logger.info("Attaching all controllers...")
self.allControllers = [s.strip() for s in self.config.get(self.name, 'startingControllers').split(',')]
self.attachAllControllers()
self.everConnected = True
#
# To work
def main():
theActor = OurActor('hx', productName='hxActor')
theActor.run()
if __name__ == '__main__':
main()
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
readonly_fields = ['conference', 'submitter']
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
| Make ProposalAdmin work when creating
| from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from import_export.admin import ExportMixin
from .models import AdditionalSpeaker, TalkProposal, TutorialProposal
from .resources import TalkProposalResource
class AdditionalSpeakerInline(GenericTabularInline):
model = AdditionalSpeaker
fields = ['user', 'status', 'cancelled']
ct_field = 'proposal_type'
ct_fk_field = 'proposal_id'
extra = 0
class ProposalAdmin(admin.ModelAdmin):
fields = [
'conference', 'submitter', 'title', 'category', 'duration',
'language', 'abstract', 'python_level', 'objective',
'detailed_description', 'outline', 'supplementary',
'recording_policy', 'slide_link', 'cancelled',
]
search_fields = ['title', 'abstract']
inlines = [AdditionalSpeakerInline]
@admin.register(TalkProposal)
class TalkProposalAdmin(ExportMixin, ProposalAdmin):
fields = ProposalAdmin.fields + ['accepted']
list_display = [
'title', 'category', 'duration', 'language',
'python_level', 'accepted',
]
list_filter = [
'category', 'duration', 'language', 'python_level', 'accepted',
]
resource_class = TalkProposalResource
@admin.register(TutorialProposal)
class TutorialProposalAdmin(ProposalAdmin):
list_display = ['title', 'category', 'language', 'python_level']
list_filter = ['category', 'language', 'python_level']
|
#!/usr/bin/env python
from storage import Storage
NEW_REPORT = {'foo': 'bar', 'boo': 'baz'}
def main():
db_store = Storage.get_storage()
for key, value in db_store.__dict__.iteritems():
print '%s: %s' % (key, value)
print '\n'
# report_id = db_store.store(NEW_REPORT)
report_id = 'AVM0dGOF6iQbRONBw9yB'
print db_store.get_report(report_id)
print db_store.get_report(3)
# db_store.delete(report_id)
# print db_store.delete(2)
if __name__ == '__main__':
main()
| Add populate es function to test driver
| #!/usr/bin/env python
from storage import Storage
NEW_REPORT = {'foo': 'bar', 'boo': 'baz'}
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/other_file": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
def populate_es():
db_store = Storage.get_storage()
for report in REPORTS:
db_store.store(report)
def main():
db_store = Storage.get_storage()
for key, value in db_store.__dict__.iteritems():
print '%s: %s' % (key, value)
print '\n'
# report_id = db_store.store(NEW_REPORT)
report_id = 'AVM0dGOF6iQbRONBw9yB'
print db_store.get_report(report_id)
print db_store.get_report(3)
# db_store.delete(report_id)
# print db_store.delete(2)
if __name__ == '__main__':
main()
|
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
"reddit-validations": {
"task": "reddit.tasks.process_validations",
"schedule": timedelta(minutes=10),
},
"eveapi-update": {
"task": "eve_api.tasks.account.queue_apikey_updates",
"schedule": timedelta(minutes=10),
},
"alliance-update": {
"task": "eve_api.tasks.alliance.import_alliance_details",
"schedule": timedelta(hours=6),
},
"api-log-clear": {
"task": "eve_proxy.tasks.clear_old_logs",
"schedule": timedelta(days=1),
},
"blacklist-check": {
"task": "hr.tasks.blacklist_check",
"schedule": timedelta(days=1),
},
"reddit-update": {
"task": "reddit.tasks.queue_account_updates",
"schedule": timedelta(minutes=15),
}
}
CELERY_ROUTES = {
"sso.tasks.update_service_groups": {'queue': 'bulk'},
}
| Add the blacklist checking to the bulk
| from datetime import timedelta
CELERYBEAT_SCHEDULE = {
"reddit-validations": {
"task": "reddit.tasks.process_validations",
"schedule": timedelta(minutes=10),
},
"eveapi-update": {
"task": "eve_api.tasks.account.queue_apikey_updates",
"schedule": timedelta(minutes=10),
},
"alliance-update": {
"task": "eve_api.tasks.alliance.import_alliance_details",
"schedule": timedelta(hours=6),
},
"api-log-clear": {
"task": "eve_proxy.tasks.clear_old_logs",
"schedule": timedelta(days=1),
},
"blacklist-check": {
"task": "hr.tasks.blacklist_check",
"schedule": timedelta(days=1),
},
"reddit-update": {
"task": "reddit.tasks.queue_account_updates",
"schedule": timedelta(minutes=15),
}
}
CELERY_ROUTES = {
"sso.tasks.update_service_groups": {'queue': 'bulk'},
"hr.tasks.blacklist_check": {'queue': 'bulk'},
}
|
from sqlalchemy.sql import text
from c2corg_api.scripts.migration.migrate_base import MigrateBase
class UpdateSequences(MigrateBase):
sequences = [
('guidebook', 'documents_archives', 'id', 'documents_archives_id_seq'),
('guidebook', 'documents', 'document_id', 'documents_document_id_seq'),
('guidebook', 'documents_geometries_archives', 'id',
'documents_geometries_archives_id_seq'),
('guidebook', 'documents_locales_archives', 'id',
'documents_locales_archives_id_seq'),
('guidebook', 'documents_locales', 'id', 'documents_locales_id_seq'),
('guidebook', 'documents_versions', 'id', 'documents_versions_id_seq'),
('guidebook', 'history_metadata', 'id', 'history_metadata_id_seq'),
]
def migrate(self):
self.start('sequences')
stmt = "select setval('{0}.{1}', (select max({2}) from {0}.{3}));"
for schema, table, field, sequence in UpdateSequences.sequences:
self.session_target.execute(text(
stmt.format(schema, sequence, field, table)))
self.stop()
| Add missing user_id_seq in migration script
| from sqlalchemy.sql import text
from c2corg_api.scripts.migration.migrate_base import MigrateBase
class UpdateSequences(MigrateBase):
sequences = [
('guidebook', 'documents_archives', 'id', 'documents_archives_id_seq'),
('guidebook', 'documents', 'document_id', 'documents_document_id_seq'),
('guidebook', 'documents_geometries_archives', 'id',
'documents_geometries_archives_id_seq'),
('guidebook', 'documents_locales_archives', 'id',
'documents_locales_archives_id_seq'),
('guidebook', 'documents_locales', 'id', 'documents_locales_id_seq'),
('guidebook', 'documents_versions', 'id', 'documents_versions_id_seq'),
('guidebook', 'history_metadata', 'id', 'history_metadata_id_seq'),
('users', 'user', 'id', 'user_id_seq'),
]
def migrate(self):
self.start('sequences')
stmt = "select setval('{0}.{1}', (select max({2}) from {0}.{3}));"
for schema, table, field, sequence in UpdateSequences.sequences:
self.session_target.execute(text(
stmt.format(schema, sequence, field, table)))
self.stop()
|
from django.views.generic import View
from django.http import JsonResponse
from chatterbot import ChatBot
from chatterbot.ext.django_chatterbot import settings
class ChatterBotView(View):
chatterbot = ChatBot(**settings.CHATTERBOT)
def post(self, request, *args, **kwargs):
input_statement = request.POST.get('text')
response_data = self.chatterbot.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.',
'name': self.chatterbot.name
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
| Return recent statement data in GET response.
| from django.views.generic import View
from django.http import JsonResponse
from chatterbot import ChatBot
from chatterbot.ext.django_chatterbot import settings
import json
class ChatterBotView(View):
chatterbot = ChatBot(**settings.CHATTERBOT)
def _serialize_recent_statements(self):
if self.chatterbot.recent_statements.empty():
return []
recent_statements = []
for statement, response in self.chatterbot.recent_statements:
recent_statements.append([statement.serialize(), response.serialize()])
return recent_statements
def post(self, request, *args, **kwargs):
if request.is_ajax():
data = json.loads(request.body)
input_statement = data.get('text')
else:
input_statement = request.POST.get('text')
response_data = self.chatterbot.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.',
'name': self.chatterbot.name,
'recent_statements': self._serialize_recent_statements()
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
|
from flask import Flask
from flask_login import LoginManager
from flask_misaka import Misaka
from flask_moment import Moment
from flask_security import Security, SQLAlchemyUserDatastore
from flask_sqlalchemy import SQLAlchemy
from flask_whooshalchemy import whoosh_index
from config import config
db = SQLAlchemy()
lm = LoginManager()
markdown = Misaka()
moment = Moment()
security = Security()
from app.models import Post, User, Role
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
def create_app(config_name):
"""
Initializes a Flask app.
Args:
config_name: The configuration object to use.
Returns:
The Flask app object.
"""
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
markdown.init_app(app)
moment.init_app(app)
security.init_app(app, user_datastore)
whoosh_index(app, Post)
from app.main.views import main
from app.admin.views import admin
app.register_blueprint(main)
app.register_blueprint(admin, url_prefix='/admin')
return app
| Add AppSettings dict to app config and inject it
| from flask import Flask
from flask_login import LoginManager
from flask_misaka import Misaka
from flask_moment import Moment
from flask_security import Security, SQLAlchemyUserDatastore
from flask_sqlalchemy import SQLAlchemy
from flask_whooshalchemy import whoosh_index
from config import config
db = SQLAlchemy()
lm = LoginManager()
markdown = Misaka()
moment = Moment()
security = Security()
from app.models import Post, User, Role
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
def create_app(config_name):
"""
Initializes a Flask app.
Args:
config_name: The configuration object to use.
Returns:
The Flask app object.
"""
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
markdown.init_app(app)
moment.init_app(app)
security.init_app(app, user_datastore)
whoosh_index(app, Post)
from app.utils.settings import AppSettings
with app.app_context():
app.config['SETTINGS'] = AppSettings()
@app.context_processor
def inject_settings():
return {'settings': app.config['SETTINGS']}
from app.main.views import main
from app.admin.views import admin
app.register_blueprint(main)
app.register_blueprint(admin, url_prefix='/admin')
return app
|
class Countable(object):
@classmethod
def count(cls, options={}):
return int(cls.get("count", **options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
| Allow count method to be used the same way as find.
| class Countable(object):
@classmethod
def count(cls, _options=None, **kwargs):
if _options is None:
_options = kwargs
return int(cls.get("count", **_options))
class Metafields(object):
def metafields(self):
return Metafield.find(resource=self.__class__.plural, resource_id=self.id)
def add_metafield(self, metafield):
if self.is_new():
raise ValueError("You can only add metafields to a resource that has been saved")
metafield._prefix_options = dict(resource=self.__class__.plural, resource_id=self.id)
metafield.save()
return metafield
class Events(object):
def events(self):
return Event.find(resource=self.__class__.plural, resource_id=self.id)
|
# coding: utf-8
from __future__ import unicode_literals
import six
__all__ = [
'collaboration',
'events',
'file',
'folder',
'group',
'group_membership',
'search',
'user',
]
if six.PY2:
__all__ = [unicode.encode(x, 'utf-8') for x in __all__]
| Change format of sub-module names in the object module to str
| # coding: utf-8
from __future__ import unicode_literals
from six.moves import map # pylint:disable=redefined-builtin
__all__ = list(map(str, ['collaboration', 'events', 'file', 'folder', 'group', 'group_membership', 'search', 'user']))
|
import pytest
from hypothesis import given
from hypothesis.strategies import lists, text
from datatyping.datatyping import validate
@given(ss=lists(text()))
def test_simple(ss):
assert validate([str], ss) is None
@given(s=text())
def test_simple_error(s):
with pytest.raises(TypeError):
validate([str], s)
@given(ss=lists(lists(text())))
def test_nested(ss):
assert validate([[str]], ss) is None
| Rewrite str tests with hypothesis
Remove lists from testing
| import pytest
from hypothesis import given
from hypothesis.strategies import integers, text
from datatyping.datatyping import validate
@given(string=text())
def test_simple(string):
assert validate(str, string) is None
@given(not_string=integers())
def test_simple_error(not_string):
with pytest.raises(TypeError):
validate(str, not_string)
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ilya Akhmadullin
# Copyright (c) 2013 Ilya Akhmadullin
#
# License: MIT
#
"""This module exports the jscs plugin class."""
from SublimeLinter.lint import Linter
class Jscs(Linter):
"""Provides an interface to jscs."""
syntax = ('javascript', 'html', 'html 5')
cmd = 'jscs -r checkstyle'
config_file = ('-c', '.jscs.json')
regex = (
r'^\s+?<error line="(?P<line>\d+)" '
r'column="(?P<col>\d+)" '
# jscs always reports with error severity; show as warning
r'severity="(?P<warning>error)" '
r'message="(?P<message>.+?)"'
)
multiline = True
selectors = {'html': 'source.js.embedded.html'}
tempfile_suffix = 'js'
| Use the full option name for clarity and also search in the user's home directory
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ilya Akhmadullin
# Copyright (c) 2013 Ilya Akhmadullin
#
# License: MIT
#
"""This module exports the jscs plugin class."""
from SublimeLinter.lint import Linter
class Jscs(Linter):
"""Provides an interface to jscs."""
syntax = ('javascript', 'html', 'html 5')
cmd = 'jscs -r checkstyle'
regex = (
r'^\s+?<error line="(?P<line>\d+)" '
r'column="(?P<col>\d+)" '
# jscs always reports with error severity; show as warning
r'severity="(?P<warning>error)" '
r'message="(?P<message>.+?)"'
)
multiline = True
selectors = {'html': 'source.js.embedded.html'}
tempfile_suffix = 'js'
config_file = ('--config', '.jscs.json', '~')
|
title="ScreenX TV: First Impressions"
description="""
My initial thoughts of [ScreenX TV](http://screenx.tv), a way to broadcast
your terminal to the world.
"""
time="2012-12-07 Fri 09:53 PM"
# related=[("Some article", "its/url")]
| Update timestamp on screenx post
| title="ScreenX TV: First Impressions"
description="""
My initial thoughts of [ScreenX TV](http://screenx.tv), a way to broadcast
your terminal to the world.
"""
time="2012-12-08 Sat 03:11 AM"
# related=[("Some article", "its/url")]
|
__description__ = "VOEvent Broker"
__url__ = "http://comet.transientskp.org/"
__author__ = "John Swinbank"
__contact__ = "swinbank@princeton.edu"
__version__ = "2.1.0-pre"
| Add alias to appropriate raw bytes for this Python.
| __description__ = "VOEvent Broker"
__url__ = "http://comet.transientskp.org/"
__author__ = "John Swinbank"
__contact__ = "swinbank@princeton.edu"
__version__ = "2.1.0-pre"
import sys
if sys.version_info.major <= 2:
BINARY_TYPE = str
else:
BINARY_TYPE = bytes
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
| Revert "do not use minified js on prod"
This reverts commit 338f5f9c3ff8be9a87abff2b235408c8fed26346.
| import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
| Add HTTPRelayApplication to vumi.application package API.
| """The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager",
"MessageStore", "HTTPRelayApplication"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
from vumi.application.message_store import MessageStore
from vumi.application.http_relay import HTTPRelayApplication
|
#!/usr/bin/env/python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777), '\n', type(listener)
number_packets_received += 1
| Fix header of Python file
Now correctly points to the Python interpretor
| #!/usr/bin/env python
import socket, fcntl, sys
#Lock to only allow one instance of this program to run
pid_file = '/tmp/send.pid'
fp = open(pid_file, 'w')
try:
fcntl.lockf(fp, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print 'An instance of this program is already running'
sys.exit(0)
import Adafruit_CharLCD as LCD
lcd = LCD.Adafruit_CharLCDPlate()
lcd.set_color(0,0,0)
listener = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP)
number_packets_received = 0
def print_lcd():
lcd.clear()
lcd.message('# of packets\nreceived: ' + str(number_packets_received))
if __name__ == '__main__':
while True:
print_lcd()
print listener.recvfrom(7777)
number_packets_received += 1
|
import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
def test_datasets_with_dataset_id():
resp = api.datasets(DATASET_ID)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
| Add test for latest datasets
| import pytest
from tests import api
RESULT_ATTRIBUTES = [
'id',
'total_products',
'total_stores',
'total_inventories',
'total_product_inventory_count',
'total_product_inventory_volume_in_milliliters',
'total_product_inventory_price_in_cents',
'store_ids',
'product_ids',
'added_product_ids',
'removed_product_ids',
'removed_product_ids',
'removed_store_ids',
'removed_store_ids',
'csv_dump',
'created_at',
'updated_at',
]
DATASET_ID = 800
def _check_result_attrs(result_set):
for attr in RESULT_ATTRIBUTES:
assert attr in result_set
def test_datasets_without_args():
resp = api.datasets()
assert resp['status'] == 200
assert 'pager' in resp
assert 'result' in resp
for res in resp['result']:
_check_result_attrs(res)
@pytest.mark.parametrize("test_input", [
"latest",
DATASET_ID,
])
def test_datasets_with_dataset_id(test_input):
resp = api.datasets(test_input)
assert resp['status'] == 200
assert 'pager' not in resp
assert 'result' in resp
_check_result_attrs(resp['result'])
|
import unittest
# Remember:
# Import your package here
# Install AAAPT package to run the tests
class Test_{package_name}Command(unittest.TestCase):
pass | Add a way to reload the tests from the main command file
| # Remember to install AAAPT package to run the tests
import unittest
# Import your package here
# To reload your tests every time you save your command file, add the following to it:
# for test_file in glob.glob("tests/test_*.py"):
# key = "{package_name}." + test_file[:-3].replace("/", ".")
# if key in sys.modules:
# reload(sys.modules[key])
class Test_{package_name}Command(unittest.TestCase):
pass |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
| Add NDDataBase to package import
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The `astropy.nddata` subpackage provides the `~astropy.nddata.NDData`
class and related tools to manage n-dimensional array-based data (e.g.
CCD images, IFU Data, grid-based simulation data, ...). This is more than
just `numpy.ndarray` objects, because it provides metadata that cannot
be easily provided by a single array.
"""
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
# -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
| Add factory functions for dmenu and rofi in root dynmen file
| # -*- coding: utf-8 -*-
from .menu import Menu, MenuError
del menu
def new_dmenu(**kwargs):
from .dmenu import DMenu
return DMenu(**kwargs)
def new_rofi(**kwargs):
from .rofi import Rofi
return Rofi(**kwargs)
|
from django.contrib.auth import get_user_model
from django.db import models
from django.utils.six import python_2_unicode_compatible
User = get_user_model()
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
| Fix broken tests in 1.8-1.10
| from django.conf import settings
from django.db import models
from django.utils.six import python_2_unicode_compatible
@python_2_unicode_compatible
class OutstandingToken(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
jti = models.UUIDField(unique=True)
token = models.TextField()
created_at = models.DateTimeField()
expires_at = models.DateTimeField()
class Meta:
ordering = ('user',)
def __str__(self):
return 'Token for {} ({})'.format(
self.user,
self.jti,
)
@python_2_unicode_compatible
class BlacklistedToken(models.Model):
token = models.OneToOneField(OutstandingToken, on_delete=models.CASCADE)
blacklisted_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return 'Blacklisted token for {}'.format(self.token.user)
|
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
| Adjust default times to 1.5 and 5 seconds
| # -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1.5:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
# -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'output.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
| Set default for General.output to tsstats.html
| # -*- coding: utf-8 -*-
try:
from configparser import RawConfigParser
except ImportError:
from ConfigParser import RawConfigParser
import logging
logger = logging.getLogger('tsstats')
DEFAULT_CONFIG = {
'General': {
'debug': False,
'log': '',
'output': 'tsstats.html',
'idmap': '',
'onlinedc': True,
'template': 'template.html',
'datetimeformat': '%x %X %Z'
}
}
def load(path=None):
'''
parse config at `config_path`
:param config_path: path to config-file
:type config_path: str
:return: values of config
:rtype: tuple
'''
logger.debug('reading config')
config = RawConfigParser()
# use this way to set defaults, because ConfigParser.read_dict
# is not available < 3.2
for section, items in DEFAULT_CONFIG.items():
if section not in config.sections():
config.add_section(section)
for key, value in items.items():
config.set(section, key, str(value))
if path:
config.read(path)
return config
|
"""proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
self.vars = dict_name_keys(vars)
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.vars, self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
| Delete unnecessary vars attr of Proj
| """proj.py: aospy.Proj class for organizing work in single project."""
import time
from .utils import dict_name_keys
class Proj(object):
"""Project parameters: models, regions, directories, etc."""
def __init__(self, name, vars={}, models={}, default_models={}, regions={},
direc_out='', nc_dir_struc=False, verbose=True):
self.verbose = verbose
if self.verbose:
print ("Initializing Project instance: %s (%s)"
% (name, time.ctime()))
self.name = name
self.direc_out = direc_out
self.nc_dir_struc = nc_dir_struc
if models:
self.models = dict_name_keys(models)
else:
self.models = {}
if default_models == 'all':
self.default_models = self.models
elif default_models:
self.default_models = dict_name_keys(default_models)
else:
self.default_models = {}
if regions:
self.regions = dict_name_keys(regions)
else:
self.regions = {}
for obj_dict in (self.models, self.regions):
for obj in obj_dict.values():
setattr(obj, 'proj', self)
def __str__(self):
return 'Project instance "' + self.name + '"'
__repr__ = __str__
|
#!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"-d" #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
| Use full name (-d => --decode), and needed a comma | #!/usr/bin/env python
"""Handles decoding flac files"""
import subprocess
import config
HANDLES=['flac','fla']
def decode(inF, outF):
st = subprocess.call(["flac",
"--totally-silent", #No output
"--decode", #Decode
"--force", #Overwrite files
"-o", outF, #Output
inF #input
])
if st == 0:
return outF
else:
return False
getMetadata = config.readExiftoolMetadata
|
from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
| Fix typo in broadcasts endpoint usecase
| from predicthq import Client
# Please copy paste your access token here
# or read our Quickstart documentation if you don't have a token yet
# https://docs.predicthq.com/guides/quickstart/
ACCESS_TOKEN = 'abc123'
phq = Client(access_token=ACCESS_TOKEN)
# The search() method returns an EventResultSet which allows you to iterate
# over the first page of Broadcast objects (10 events by default)
for broadcast in phq.broadcasts.search():
print(broadcast.to_dict())
# You can access the Broadcast object attributes directly.
# Broadcast fields and their description are available at
# https://docs.predicthq.com/resources/broadcasts/#broadcast-fields.
for broadcast in phq.broadcasts.search():
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.category, broadcast.dates.start.strftime('%Y-%m-%d'))
# You can add parameters to filter your search further.
# The following example searches for the broadcasts
# with PHQ viewership gte 100 and with event (the physical event the broadcast links to) label 'nfl'.
# The full list of parameters is available at
# https://docs.predicthq.com/resources/broadcasts/#search-broadcasts
for broadcast in phq.broadcasts.search(phq_viewership__gte=100, event__label='nfl'):
print(broadcast.event.title, broadcast.phq_viewership, broadcast.event.labels, broadcast.dates.start.strftime('%Y-%m-%d'))
|
from django.db import models
from datetime import date
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('Cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
add_date = models.DateTimeField('date added', default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.add_date >= timezone.now() - datetime.timedelta(days=30) | Fix date error in books model
Fix typos and the auto addition of date to the date_added field.
| from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30) |
def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
| Add follow up with matrix traversal
| def graph_dfs(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
directions = ((0, 1), (0, -1), (1, 0), (-1, 0))
def dfs(i, j):
if (i, j) in visited:
return
visited.add((i, j))
# Traverse neighbors.
for direction in directions:
next_i, next_j = i + direction[0], j + direction[1]
if 0 <= next_i < rows and 0 <= next_j < cols: # Check boundary.
# Add any other checking here ^
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
# Follow up:
# 1) Diagonal cells are considered neighbors
# 2) View the matrix like Earth, right boundary is adjacent to the left boundary, top adjacent to left, etc.
def graph_dfs_diagonals(matrix):
rows, cols = len(matrix), len(matrix[0])
visited = set()
# Change 1: Add 4 more diagonal directions.
directions = ((0, 1), (0, -1), (1, 0), (-1, 0), (-1, -1), (1, 1), (1, -1), (-1, 1))
def dfs(i, j):
if (i, j) in visited:
return
print(matrix[i][j])
visited.add((i, j))
for direction in directions:
# Change 2: No more boundary, use modulo to allow traversal that exceed boundaries to wrap around.
next_i, next_j = (i + direction[0] + rows) % rows, (j + direction[1] + cols) % cols
dfs(next_i, next_j)
for i in range(rows):
for j in range(cols):
dfs(i, j)
graph_dfs([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
])
|
"""
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
| Add simple test for coverage.
| """
Tests utility scripts
"""
import os
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
import pivot
from pivot.utils import get_latest_term, is_more_recent
from pivot.templatetags.pivot_extras import year_select_tab
TEST_CSV_PATH = os.path.join(os.path.dirname(pivot.__file__),
'test',
'test_resources',
'csvfiles/',)
class UtilsTest(TestCase):
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_get_latest_term(self):
self.assertEquals(get_latest_term(), 'au12')
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_true(self):
self.assertTrue(is_more_recent('au19', 'au18'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_is_more_recent_false(self):
self.assertFalse(is_more_recent('au18', 'au19'))
@override_settings(CSV_ROOT=TEST_CSV_PATH)
def test_pivot_extras(self):
template = """
<a href=".?num_qtrs=8&end_yr=12&end_qtr=AU">
<strong>Last 2 Years</strong> <br>
<span>
AU10 - AU12
</span>
</a>
"""
html = year_select_tab(8)
self.assertEqual(html, template)
|
from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
| Set a KEY_PREFIX for the Redis cache, so that we don't get collisions when running multiple sites on one server
| from .base import *
DEBUG = False
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
'INDEX': 'wagtaildemo'
}
}
INSTALLED_APPS+= (
'djcelery',
'kombu.transport.django',
'gunicorn',
)
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': '127.0.0.1:6379',
'KEY_PREFIX': 'wagtaildemo',
'OPTIONS': {
'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
}
}
}
# CELERY SETTINGS
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False
try:
from .local import *
except ImportError:
pass
|
#!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].docs.find(DOCS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
csv.writerow([row[field] for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
| Make docket export work (done last week, but not committed for some reason).
| #!/usr/bin/env python
import sys
import os
import csv
import time
from datetime import datetime
from collections import namedtuple
from pymongo import Connection
pid = os.getpid()
DOCKETS_QUERY = {'scraped': True}
DOCKET_FIELDS = ['docket_id', 'title', 'agency', 'year']
def filter_for_postgres(v):
if v is None:
return '\N'
if isinstance(v, datetime):
return str(v)
return unicode(v).encode('utf8').replace("\.", ".")
if __name__ == '__main__':
# set up options
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] host dbname file_prefix")
(options, args) = parser.parse_args()
# fetch options, args
host = args[0]
dbname = args[1]
prefix = args[2]
writer = csv.writer(open(sys.argv[3] + '_dockets.csv', 'w'))
writer.writerow(DOCKET_FIELDS)
cursor = Connection(host=host)[dbname].dockets.find(DOCKETS_QUERY)
run_start = time.time()
print '[%s] Starting export...' % pid
for row in cursor:
writer.writerow([filter_for_postgres(row[field]) for field in DOCKET_FIELDS])
print '[%s] Completed export in %s seconds.' % (pid, time.time() - run_start)
|
#!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
url = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
| Fix url to keywords; fix cmd line usage
| #!/usr/bin/env python
from __future__ import print_function
import json
import os
import sys
import requests
import scraperwiki
def main(argv=None):
if argv is None:
argv = sys.argv
arg = argv[1:]
if len(arg) > 0:
# Developers can supply URL as an argument...
keywords = arg[0]
else:
# ... but normally the URL comes from the allSettings.json file
with open(os.path.expanduser("~/allSettings.json")) as settings:
keywords = json.load(settings)['input']
return store_search(keywords)
def store_search(keywords):
"""
Store results of search to .
"""
base_url = "http://localhost:59742/blog/post/_search"
params = {'q': 'body:' + keywords, 'pretty': 'true'}
response = requests.get(base_url, params=params)
j = response.json()
scraperwiki.sql.execute('DROP TABLE IF EXISTS results')
hits = j['hits']['hits']
results = []
for hit in hits:
doc = hit['_source']['body']
score = hit['_score']
doc_id = hit['_id']
results.append(dict(doc=doc, score=score, doc_id=doc_id))
scraperwiki.sql.save(unique_keys=['doc_id'], data=results, table_name='results')
if __name__ == '__main__':
main()
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
| Allow callables for AdminThumbnail image_field arg
This allows images from related models to be displayed. Closes #138.
| from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' % \
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source_file', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: {}, {}, {}>".format(
self.name,
self.login,
self.comments
)
| Reformat string representation of Credentials
| import os
class Credential(object):
def __init__(self, name, login, password, comments):
self.name = name
self.login = login
self.password = password
self.comments = comments
def save(self, database_path):
credential_path = os.path.join(database_path, self.name)
os.makedirs(credential_path)
with open(os.path.join(credential_path, "login"), "w") as f:
f.write(self.login)
with open(os.path.join(credential_path, "password"), "w") as f:
f.write(self.password)
with open(os.path.join(credential_path, "comments"), "w") as f:
f.write(self.comments)
@classmethod
def from_path(cls, path):
return Credential(
name=os.path.basename(path),
login=open(path + "/login").read(),
password=open(path + "/password").read(),
comments=open(path + "/comments").read()
)
def __str__(self):
return "<Credential: name={}, login={}, password='...', {}>".format(
self.name,
self.login,
self.comments
)
|
import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message})
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
| Change carrot serialization from JSON to 'pickle'
| import logging
from carrot.messaging import Publisher
from carrot.connection import BrokerConnection
from lamson.routing import route, route_like, stateless
from config.settings import relay
from lamson import view, queue
@route("forge-list@(host)")
#@route("(post_name)@osb\\.(host)")
@stateless
def POSTING(message, post_name=None, host=None):
relay.deliver(message)
conn = BrokerConnection(hostname="localhost", port=5672,
userid="celeryuser", password="celerypw",
virtual_host="celeryvhost")
publisher = Publisher(connection=conn,
exchange="forge", routing_key="mail")
publisher.send({"message": message}, serializer="pickle")
publisher.close()
#index_q = queue.Queue("run/posts")
#index_q.push(message)
return POSTING
|
import numpy as np
import triangle
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
triangle.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
extents=[0.99]*9, plot_datapoints=False,
fig=fig)
triangle.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
extents=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
| Use updated corner plot API
| import numpy as np
import corner
import astropy.io.ascii as ascii
import matplotlib.pyplot as plt
pyout = ascii.read('test.pyout')
idlout = ascii.read('test.idlout')
fig, axarr = plt.subplots(9, 9, figsize=(10, 10))
fig.suptitle("Black = python, red = IDL")
corner.corner(np.array([pyout['alpha'], pyout['beta'], pyout['sigsqr'],
pyout['mu0'], pyout['usqr'], pyout['wsqr'],
pyout['ximean'], pyout['xisig'], pyout['corr']]).T,
labels=[r"$\alpha$", r"$\beta$", r"$\sigma^2$",
r"$\mu_0$", r"$u^2$", r"$w^2$",
r"$\bar{\xi}$", r"$\sigma_\xi$", r"$\rho_{\xi\eta}$"],
range=[0.99]*9, plot_datapoints=False,
fig=fig)
corner.corner(np.array([idlout['alpha'], idlout['beta'], idlout['sigsqr'],
idlout['mu00'], idlout['usqr'], idlout['wsqr'],
idlout['ximean'], idlout['xisig'], idlout['corr']]).T,
range=[0.99]*9, plot_datapoints=False,
fig=fig, color='r')
fig.subplots_adjust(bottom=0.065, left=0.07)
plt.show()
|
"""
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplemented()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
| Raise NotImplementedError instead of NotImplemented
| """
Interface for all launch-control-tool commands
"""
import inspect
from launch_control.utils.registry import RegistryBase
class Command(RegistryBase):
"""
Base class for all command line tool sub-commands.
"""
def __init__(self, parser, args):
"""
Prepare instance for executing commands.
This method is called immediately after all arguments are parsed
and results are available. This gives subclasses a chance to
configure themselves.
The default implementation does not do anything.
"""
pass
def invoke(self, args):
"""
Invoke command action.
"""
raise NotImplementedError()
@classmethod
def get_name(cls):
"""
Return the name of this command.
The default implementation strips any leading underscores
and replaces all other underscores with dashes.
"""
return cls.__name__.lstrip("_").replace("_", "-")
@classmethod
def get_help(cls):
"""
Return the help message of this command
"""
return inspect.getdoc(cls)
@classmethod
def register_arguments(cls, parser):
"""
Register arguments if required.
Subclasses can override this to add any arguments that will be
exposed to the command line interface.
"""
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
| Change about link to github wiki
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
from Foundation import NSBundle
import rumps
import modules.google_calendar
#rumps.debug_mode(True) # turn on command line logging information for development - default is off
def about(sender):
webbrowser.open("https://github.com/hiroshi/quiet/wiki")
if __name__ == "__main__":
app = rumps.App("My Toolbar App", title='0')
app.title = "Q"
modules.google_calendar.start(app)
app.menu.add(None) # separator
version = NSBundle.mainBundle().infoDictionary()['CFBundleShortVersionString']
app.menu.add(rumps.MenuItem("quiet %s" % version, callback=about))
app.run()
|
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
client_ip = self.client_address[0]
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(client_ip + '\n', 'utf8'))
return
def run(port):
server_address = ('', port)
httpd = HTTPServer(server_address, MyHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
| Add URI param that queries freegeoip
| import json
from string import capwords
import sys
from http.server import HTTPServer, BaseHTTPRequestHandler
import ssl
import urllib.request
class MyHTTPRequestHandler(BaseHTTPRequestHandler):
def __tabulate_results(self, json_obj):
table = ''
for k, v in json_obj.items():
table += '{:{width}} : {}\n'.format(
capwords(' '.join(k.split('_'))),
v,
width=len(max(json_obj, key=len))
)
return table
def __query_freegeoip(self, ip_address):
ssl_ctx = ssl.create_default_context()
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_OPTIONAL
data = urllib.request.urlopen(
'http://freegeoip.net/json/{}'.format(ip_address),
context=ssl_ctx,
).read().decode()
return data
def do_GET(self):
# Get the client IP. This is why this program exists.
client_ip = self.client_address[0]
# Casual check for proxied requests
if client_ip == '127.0.0.1' and 'X-Real-IP' in self.headers:
client_ip = self.headers['X-Real-IP']
data = None
response_code = 200
# Use freegeoip.net to query for more details if requested
if '?full' in self.path:
try:
data = self.__tabulate_results(
json.loads(
self.__query_freegeoip(client_ip)
)
)
except Exception as e:
response_code = 500
data = str(e)
else:
data = client_ip
# Prepare and deliver response
self.send_response(response_code)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(bytes(data + '\n', 'utf8'))
return
def run(port):
server = HTTPServer(('', port), MyHTTPRequestHandler)
server.serve_forever()
if __name__ == '__main__':
run(int(sys.argv[1]) if len(sys.argv) == 2 else 9000)
|
#!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.0",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
| Fix incomplete update to botocross 1.1.1
| #!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import stackformation
import sys
if sys.version_info <= (2, 5):
error = "ERROR: stackformation requires Python Version 2.6 or above...exiting."
print >> sys.stderr, error
sys.exit(1)
setup(name="stackformation",
version=stackformation.__version__,
author="Steffen Opel",
packages=find_packages(),
license="Apache 2",
platforms="Posix; MacOS X; Windows",
install_requires=[
"boto >= 2.6.0",
"botocross >= 1.1.1",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
)
|
Subsets and Splits