body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
f4ea1b1b9ec17424450a9ea6da24343b0da873334048e7e158e9f0943afcef39
def simFN(a, b, t, disp, I): '\n Integrates the FHN ODEs\n\n Input:\n a: the shape of the cubic parabola\n b: describes the kinetics of the recovery variable w\n c: describes the kinetics of the recovery variable\n t: time to integrate over\n disp: (True/False) plot data\n I: input current\n\n Output\n V - membrane voltage\n w - recovery variable that mimics activation of an outward current\n I - resting current\n ' def dALLdt(X, t): (V, w, a, b, c, I) = X dVdt = (((V - ((V ** 3) / 3)) - w) + I) dwdt = (0.08 * ((V + 0.7) - (0.8 * w))) return [dVdt, dwdt] X = odeint(dALLdt, [0, 0.05, a, b, 0.5, I], t) V = X[(:, 0)] w = X[(:, 1)] if (disp == True): plt.subplot(211) plt.title('FitzHugh-Nagumo') plt.plot(t, V, 'r', label='v') plt.ylabel('V (mV)') plt.axis('tight') plt.subplot(212) plt.plot(t, w, 'g', label='w') plt.ylabel('w') plt.show() def f(Y, t): (y1, y2) = Y return [((y1 - ((y1 ** 3) / 3)) - y2), (0.08 * ((y1 + 0.7) - (0.8 * y2)))] y1 = np.linspace((- 5.0), 5.0, 20) y2 = np.linspace((- 20.0), 20.0, 20) (Y1, Y2) = np.meshgrid(y1, y2) tau = 0 (u, v) = (np.zeros(Y1.shape), np.zeros(Y2.shape)) (NI, NJ) = Y1.shape for i in range(NI): for j in range(NJ): x = Y1[(i, j)] y = Y2[(i, j)] yprime = f([x, y], tau) u[(i, j)] = yprime[0] v[(i, j)] = yprime[1] Q = plt.quiver(Y1, Y2, u, v, color='r') plt.xlabel('V') plt.ylabel('w') plt.title('Phase Portrait of Fitzhugh Nagumo System') plt.show() return (V, w)
Integrates the FHN ODEs Input: a: the shape of the cubic parabola b: describes the kinetics of the recovery variable w c: describes the kinetics of the recovery variable t: time to integrate over disp: (True/False) plot data I: input current Output V - membrane voltage w - recovery variable that mimics activation of an outward current I - resting current
simulateFHN.py
simFN
thq80/auxiliary-particle-filter
1
python
def simFN(a, b, t, disp, I): '\n Integrates the FHN ODEs\n\n Input:\n a: the shape of the cubic parabola\n b: describes the kinetics of the recovery variable w\n c: describes the kinetics of the recovery variable\n t: time to integrate over\n disp: (True/False) plot data\n I: input current\n\n Output\n V - membrane voltage\n w - recovery variable that mimics activation of an outward current\n I - resting current\n ' def dALLdt(X, t): (V, w, a, b, c, I) = X dVdt = (((V - ((V ** 3) / 3)) - w) + I) dwdt = (0.08 * ((V + 0.7) - (0.8 * w))) return [dVdt, dwdt] X = odeint(dALLdt, [0, 0.05, a, b, 0.5, I], t) V = X[(:, 0)] w = X[(:, 1)] if (disp == True): plt.subplot(211) plt.title('FitzHugh-Nagumo') plt.plot(t, V, 'r', label='v') plt.ylabel('V (mV)') plt.axis('tight') plt.subplot(212) plt.plot(t, w, 'g', label='w') plt.ylabel('w') plt.show() def f(Y, t): (y1, y2) = Y return [((y1 - ((y1 ** 3) / 3)) - y2), (0.08 * ((y1 + 0.7) - (0.8 * y2)))] y1 = np.linspace((- 5.0), 5.0, 20) y2 = np.linspace((- 20.0), 20.0, 20) (Y1, Y2) = np.meshgrid(y1, y2) tau = 0 (u, v) = (np.zeros(Y1.shape), np.zeros(Y2.shape)) (NI, NJ) = Y1.shape for i in range(NI): for j in range(NJ): x = Y1[(i, j)] y = Y2[(i, j)] yprime = f([x, y], tau) u[(i, j)] = yprime[0] v[(i, j)] = yprime[1] Q = plt.quiver(Y1, Y2, u, v, color='r') plt.xlabel('V') plt.ylabel('w') plt.title('Phase Portrait of Fitzhugh Nagumo System') plt.show() return (V, w)
def simFN(a, b, t, disp, I): '\n Integrates the FHN ODEs\n\n Input:\n a: the shape of the cubic parabola\n b: describes the kinetics of the recovery variable w\n c: describes the kinetics of the recovery variable\n t: time to integrate over\n disp: (True/False) plot data\n I: input current\n\n Output\n V - membrane voltage\n w - recovery variable that mimics activation of an outward current\n I - resting current\n ' def dALLdt(X, t): (V, w, a, b, c, I) = X dVdt = (((V - ((V ** 3) / 3)) - w) + I) dwdt = (0.08 * ((V + 0.7) - (0.8 * w))) return [dVdt, dwdt] X = odeint(dALLdt, [0, 0.05, a, b, 0.5, I], t) V = X[(:, 0)] w = X[(:, 1)] if (disp == True): plt.subplot(211) plt.title('FitzHugh-Nagumo') plt.plot(t, V, 'r', label='v') plt.ylabel('V (mV)') plt.axis('tight') plt.subplot(212) plt.plot(t, w, 'g', label='w') plt.ylabel('w') plt.show() def f(Y, t): (y1, y2) = Y return [((y1 - ((y1 ** 3) / 3)) - y2), (0.08 * ((y1 + 0.7) - (0.8 * y2)))] y1 = np.linspace((- 5.0), 5.0, 20) y2 = np.linspace((- 20.0), 20.0, 20) (Y1, Y2) = np.meshgrid(y1, y2) tau = 0 (u, v) = (np.zeros(Y1.shape), np.zeros(Y2.shape)) (NI, NJ) = Y1.shape for i in range(NI): for j in range(NJ): x = Y1[(i, j)] y = Y2[(i, j)] yprime = f([x, y], tau) u[(i, j)] = yprime[0] v[(i, j)] = yprime[1] Q = plt.quiver(Y1, Y2, u, v, color='r') plt.xlabel('V') plt.ylabel('w') plt.title('Phase Portrait of Fitzhugh Nagumo System') plt.show() return (V, w)<|docstring|>Integrates the FHN ODEs Input: a: the shape of the cubic parabola b: describes the kinetics of the recovery variable w c: describes the kinetics of the recovery variable t: time to integrate over disp: (True/False) plot data I: input current Output V - membrane voltage w - recovery variable that mimics activation of an outward current I - resting current<|endoftext|>
c44e783b92300b7d79c47a1787ff587e502683d41cb86765b262992844d25fae
def test_reinforceio_homepage(self): '\n Code example from the homepage and README.md.\n ' from tensorforce.agents import TRPOAgent agent = TRPOAgent(states=dict(shape=(10,), type='float'), actions=dict(type='int', num_actions=2), network=[dict(type='dense', size=50), dict(type='dense', size=50)], update_mode=dict(unit='episodes', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=False, capacity=100)) client = TestTutorialCode.MyClient('http://127.0.0.1', 8080) state = client.get_state() action = agent.act(states=state) reward = client.execute(action) agent.observe(reward=reward, terminal=False) agent.close()
Code example from the homepage and README.md.
tensorforce/tests/test_tutorial_code.py
test_reinforceio_homepage
Jonathan-Livingston-Seagull/tensorforce
2
python
def test_reinforceio_homepage(self): '\n \n ' from tensorforce.agents import TRPOAgent agent = TRPOAgent(states=dict(shape=(10,), type='float'), actions=dict(type='int', num_actions=2), network=[dict(type='dense', size=50), dict(type='dense', size=50)], update_mode=dict(unit='episodes', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=False, capacity=100)) client = TestTutorialCode.MyClient('http://127.0.0.1', 8080) state = client.get_state() action = agent.act(states=state) reward = client.execute(action) agent.observe(reward=reward, terminal=False) agent.close()
def test_reinforceio_homepage(self): '\n \n ' from tensorforce.agents import TRPOAgent agent = TRPOAgent(states=dict(shape=(10,), type='float'), actions=dict(type='int', num_actions=2), network=[dict(type='dense', size=50), dict(type='dense', size=50)], update_mode=dict(unit='episodes', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=False, capacity=100)) client = TestTutorialCode.MyClient('http://127.0.0.1', 8080) state = client.get_state() action = agent.act(states=state) reward = client.execute(action) agent.observe(reward=reward, terminal=False) agent.close()<|docstring|>Code example from the homepage and README.md.<|endoftext|>
efc4c8636b3e187a77d15cb0d258779e8414d04db02cc4f81329b7d44daa10e3
def test_blogpost_introduction(self): '\n Test of introduction blog post examples.\n ' import tensorflow as tf from tensorforce.agents import DQNAgent network_spec = [dict(type='dense', size=32), dict(type='dense', size=32)] states = dict(shape=(10,), type='float') actions = dict(type='int', num_actions=5) agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=True, capacity=100), target_sync_frequency=10) agent.close() states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) def observe(self, reward, terminal): super(DQNAgent, self).observe(reward, terminal) if ((self.timestep >= self.first_update) and ((self.timestep % self.target_update_frequency) == 0)): self.model.update_target() network_json = '\n [\n {\n "type": "conv2d",\n "size": 32,\n "window": 8,\n "stride": 4\n },\n {\n "type": "conv2d",\n "size": 64,\n "window": 4,\n "stride": 2\n },\n {\n "type": "flatten"\n },\n {\n "type": "dense",\n "size": 512\n }\n ]\n ' import json network_spec = json.loads(network_json) modified_dense = '\n [\n {\n "type": "dense",\n "size": 64,\n "bias": false,\n "activation": "selu",\n "l2_regularization": 0.001\n }\n ]\n ' network_spec = json.loads(modified_dense) from tensorforce.core.networks import Layer class BatchNormalization(Layer): def __init__(self, variance_epsilon=1e-06, scope='batchnorm', summary_labels=None): super(BatchNormalization, self).__init__(scope=scope, summary_labels=summary_labels) self.variance_epsilon = variance_epsilon def tf_apply(self, x, update): (mean, variance) = tf.nn.moments(x, axes=tuple(range((x.shape.ndims - 1)))) return tf.nn.batch_normalization(x=x, mean=mean, variance=variance, offset=None, scale=None, variance_epsilon=self.variance_epsilon) states = dict(shape=(10,), type='float') network_spec = [{'type': 'dense', 'size': 32}, {'type': BatchNormalization, 'variance_epsilon': 1e-09}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=8, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Network class CustomNetwork(Network): def tf_apply(self, x, internals, update, return_internals=False): image = x['image'] caption = x['caption'] initializer = tf.random_normal_initializer(mean=0.0, stddev=0.01, dtype=tf.float32) weights = tf.get_variable(name='W1', shape=(3, 3, 3, 16), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') weights = tf.get_variable(name='W2', shape=(3, 3, 16, 32), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') image = tf.reshape(image, shape=((- 1), (16 * 16), 32)) image = tf.reduce_mean(image, axis=1) weights = tf.get_variable(name='W3', shape=(30, 32), initializer=initializer) caption = tf.nn.embedding_lookup(params=weights, ids=caption) lstm = tf.contrib.rnn.LSTMCell(num_units=32) (caption, _) = tf.nn.dynamic_rnn(cell=lstm, inputs=caption, dtype=tf.float32) caption = tf.reduce_mean(caption, axis=1) if return_internals: return (tf.multiply(image, caption), list()) else: return tf.multiply(image, caption) states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) agent = DQNAgent(states=states, actions=actions, network=CustomNetwork, memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Layer class Lstm(Layer): def __init__(self, size, scope='lstm', summary_labels=()): self.size = size super(Lstm, self).__init__(scope=scope, summary_labels=summary_labels) def tf_apply(self, x, update, state): state = tf.contrib.rnn.LSTMStateTuple(c=state[(:, 0, :)], h=state[(:, 1, :)]) self.lstm_cell = tf.contrib.rnn.LSTMCell(num_units=self.size) (x, state) = self.lstm_cell(inputs=x, state=state) state = tf.stack(values=(state.c, state.h), axis=1) return (x, dict(state=state)) def internals_spec(self): return dict(state=dict(type='float', shape=(2, self.size), initialization='zeros')) states = dict(shape=(10,), type='float') network_spec = [{'type': 'flatten'}, {'type': Lstm, 'size': 10}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=100, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() states = dict(shape=(84, 84, 3), type='float') states_preprocessing_spec = [dict(type='image_resize', width=84, height=84), dict(type='grayscale'), dict(type='normalize')] agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), target_sync_frequency=50, states_preprocessing=states_preprocessing_spec) agent.close() exploration = dict(type='ornstein_uhlenbeck', sigma=0.1, mu=0.0, theta=0.1) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close() exploration = dict(type='epsilon_decay', initial_epsilon=1.0, final_epsilon=0.01, timesteps=1000000.0) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close()
Test of introduction blog post examples.
tensorforce/tests/test_tutorial_code.py
test_blogpost_introduction
Jonathan-Livingston-Seagull/tensorforce
2
python
def test_blogpost_introduction(self): '\n \n ' import tensorflow as tf from tensorforce.agents import DQNAgent network_spec = [dict(type='dense', size=32), dict(type='dense', size=32)] states = dict(shape=(10,), type='float') actions = dict(type='int', num_actions=5) agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=True, capacity=100), target_sync_frequency=10) agent.close() states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) def observe(self, reward, terminal): super(DQNAgent, self).observe(reward, terminal) if ((self.timestep >= self.first_update) and ((self.timestep % self.target_update_frequency) == 0)): self.model.update_target() network_json = '\n [\n {\n "type": "conv2d",\n "size": 32,\n "window": 8,\n "stride": 4\n },\n {\n "type": "conv2d",\n "size": 64,\n "window": 4,\n "stride": 2\n },\n {\n "type": "flatten"\n },\n {\n "type": "dense",\n "size": 512\n }\n ]\n ' import json network_spec = json.loads(network_json) modified_dense = '\n [\n {\n "type": "dense",\n "size": 64,\n "bias": false,\n "activation": "selu",\n "l2_regularization": 0.001\n }\n ]\n ' network_spec = json.loads(modified_dense) from tensorforce.core.networks import Layer class BatchNormalization(Layer): def __init__(self, variance_epsilon=1e-06, scope='batchnorm', summary_labels=None): super(BatchNormalization, self).__init__(scope=scope, summary_labels=summary_labels) self.variance_epsilon = variance_epsilon def tf_apply(self, x, update): (mean, variance) = tf.nn.moments(x, axes=tuple(range((x.shape.ndims - 1)))) return tf.nn.batch_normalization(x=x, mean=mean, variance=variance, offset=None, scale=None, variance_epsilon=self.variance_epsilon) states = dict(shape=(10,), type='float') network_spec = [{'type': 'dense', 'size': 32}, {'type': BatchNormalization, 'variance_epsilon': 1e-09}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=8, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Network class CustomNetwork(Network): def tf_apply(self, x, internals, update, return_internals=False): image = x['image'] caption = x['caption'] initializer = tf.random_normal_initializer(mean=0.0, stddev=0.01, dtype=tf.float32) weights = tf.get_variable(name='W1', shape=(3, 3, 3, 16), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') weights = tf.get_variable(name='W2', shape=(3, 3, 16, 32), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') image = tf.reshape(image, shape=((- 1), (16 * 16), 32)) image = tf.reduce_mean(image, axis=1) weights = tf.get_variable(name='W3', shape=(30, 32), initializer=initializer) caption = tf.nn.embedding_lookup(params=weights, ids=caption) lstm = tf.contrib.rnn.LSTMCell(num_units=32) (caption, _) = tf.nn.dynamic_rnn(cell=lstm, inputs=caption, dtype=tf.float32) caption = tf.reduce_mean(caption, axis=1) if return_internals: return (tf.multiply(image, caption), list()) else: return tf.multiply(image, caption) states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) agent = DQNAgent(states=states, actions=actions, network=CustomNetwork, memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Layer class Lstm(Layer): def __init__(self, size, scope='lstm', summary_labels=()): self.size = size super(Lstm, self).__init__(scope=scope, summary_labels=summary_labels) def tf_apply(self, x, update, state): state = tf.contrib.rnn.LSTMStateTuple(c=state[(:, 0, :)], h=state[(:, 1, :)]) self.lstm_cell = tf.contrib.rnn.LSTMCell(num_units=self.size) (x, state) = self.lstm_cell(inputs=x, state=state) state = tf.stack(values=(state.c, state.h), axis=1) return (x, dict(state=state)) def internals_spec(self): return dict(state=dict(type='float', shape=(2, self.size), initialization='zeros')) states = dict(shape=(10,), type='float') network_spec = [{'type': 'flatten'}, {'type': Lstm, 'size': 10}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=100, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() states = dict(shape=(84, 84, 3), type='float') states_preprocessing_spec = [dict(type='image_resize', width=84, height=84), dict(type='grayscale'), dict(type='normalize')] agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), target_sync_frequency=50, states_preprocessing=states_preprocessing_spec) agent.close() exploration = dict(type='ornstein_uhlenbeck', sigma=0.1, mu=0.0, theta=0.1) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close() exploration = dict(type='epsilon_decay', initial_epsilon=1.0, final_epsilon=0.01, timesteps=1000000.0) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close()
def test_blogpost_introduction(self): '\n \n ' import tensorflow as tf from tensorforce.agents import DQNAgent network_spec = [dict(type='dense', size=32), dict(type='dense', size=32)] states = dict(shape=(10,), type='float') actions = dict(type='int', num_actions=5) agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=1, frequency=1), memory=dict(type='latest', include_next_states=True, capacity=100), target_sync_frequency=10) agent.close() states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) def observe(self, reward, terminal): super(DQNAgent, self).observe(reward, terminal) if ((self.timestep >= self.first_update) and ((self.timestep % self.target_update_frequency) == 0)): self.model.update_target() network_json = '\n [\n {\n "type": "conv2d",\n "size": 32,\n "window": 8,\n "stride": 4\n },\n {\n "type": "conv2d",\n "size": 64,\n "window": 4,\n "stride": 2\n },\n {\n "type": "flatten"\n },\n {\n "type": "dense",\n "size": 512\n }\n ]\n ' import json network_spec = json.loads(network_json) modified_dense = '\n [\n {\n "type": "dense",\n "size": 64,\n "bias": false,\n "activation": "selu",\n "l2_regularization": 0.001\n }\n ]\n ' network_spec = json.loads(modified_dense) from tensorforce.core.networks import Layer class BatchNormalization(Layer): def __init__(self, variance_epsilon=1e-06, scope='batchnorm', summary_labels=None): super(BatchNormalization, self).__init__(scope=scope, summary_labels=summary_labels) self.variance_epsilon = variance_epsilon def tf_apply(self, x, update): (mean, variance) = tf.nn.moments(x, axes=tuple(range((x.shape.ndims - 1)))) return tf.nn.batch_normalization(x=x, mean=mean, variance=variance, offset=None, scale=None, variance_epsilon=self.variance_epsilon) states = dict(shape=(10,), type='float') network_spec = [{'type': 'dense', 'size': 32}, {'type': BatchNormalization, 'variance_epsilon': 1e-09}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=8, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Network class CustomNetwork(Network): def tf_apply(self, x, internals, update, return_internals=False): image = x['image'] caption = x['caption'] initializer = tf.random_normal_initializer(mean=0.0, stddev=0.01, dtype=tf.float32) weights = tf.get_variable(name='W1', shape=(3, 3, 3, 16), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') weights = tf.get_variable(name='W2', shape=(3, 3, 16, 32), initializer=initializer) image = tf.nn.conv2d(image, filter=weights, strides=(1, 1, 1, 1), padding='SAME') image = tf.nn.relu(image) image = tf.nn.max_pool(image, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') image = tf.reshape(image, shape=((- 1), (16 * 16), 32)) image = tf.reduce_mean(image, axis=1) weights = tf.get_variable(name='W3', shape=(30, 32), initializer=initializer) caption = tf.nn.embedding_lookup(params=weights, ids=caption) lstm = tf.contrib.rnn.LSTMCell(num_units=32) (caption, _) = tf.nn.dynamic_rnn(cell=lstm, inputs=caption, dtype=tf.float32) caption = tf.reduce_mean(caption, axis=1) if return_internals: return (tf.multiply(image, caption), list()) else: return tf.multiply(image, caption) states = dict(image=dict(shape=(64, 64, 3), type='float'), caption=dict(shape=(20,), type='int')) agent = DQNAgent(states=states, actions=actions, network=CustomNetwork, memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() from tensorforce.core.networks import Layer class Lstm(Layer): def __init__(self, size, scope='lstm', summary_labels=()): self.size = size super(Lstm, self).__init__(scope=scope, summary_labels=summary_labels) def tf_apply(self, x, update, state): state = tf.contrib.rnn.LSTMStateTuple(c=state[(:, 0, :)], h=state[(:, 1, :)]) self.lstm_cell = tf.contrib.rnn.LSTMCell(num_units=self.size) (x, state) = self.lstm_cell(inputs=x, state=state) state = tf.stack(values=(state.c, state.h), axis=1) return (x, dict(state=state)) def internals_spec(self): return dict(state=dict(type='float', shape=(2, self.size), initialization='zeros')) states = dict(shape=(10,), type='float') network_spec = [{'type': 'flatten'}, {'type': Lstm, 'size': 10}] agent = DQNAgent(states=states, actions=actions, network=network_spec, update_mode=dict(unit='timesteps', batch_size=100, frequency=4), memory=dict(type='replay', include_next_states=True, capacity=100)) agent.close() states = dict(shape=(84, 84, 3), type='float') states_preprocessing_spec = [dict(type='image_resize', width=84, height=84), dict(type='grayscale'), dict(type='normalize')] agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), target_sync_frequency=50, states_preprocessing=states_preprocessing_spec) agent.close() exploration = dict(type='ornstein_uhlenbeck', sigma=0.1, mu=0.0, theta=0.1) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close() exploration = dict(type='epsilon_decay', initial_epsilon=1.0, final_epsilon=0.01, timesteps=1000000.0) agent = DQNAgent(states=states, actions=actions, network=network_spec, memory=dict(type='replay', include_next_states=True, capacity=100), actions_exploration=exploration) agent.close()<|docstring|>Test of introduction blog post examples.<|endoftext|>
0223b196a61405d32e9809415093d55e6c56d6249c6a9e27cbf5651077436d76
@property def id(self): 'The ID of the entity.' return self.data[self.id_field]
The ID of the entity.
cartoframes/data/observatory/catalog/entity.py
id
CartoDB/cartoframes
236
python
@property def id(self): return self.data[self.id_field]
@property def id(self): return self.data[self.id_field]<|docstring|>The ID of the entity.<|endoftext|>
37963d69424da678bdd5b774a6a4e99271d0f0ccc9938d1312b15b26d315b385
@property def slug(self): 'The slug (short ID) of the entity.' try: return self.data['slug'] except KeyError: return None
The slug (short ID) of the entity.
cartoframes/data/observatory/catalog/entity.py
slug
CartoDB/cartoframes
236
python
@property def slug(self): try: return self.data['slug'] except KeyError: return None
@property def slug(self): try: return self.data['slug'] except KeyError: return None<|docstring|>The slug (short ID) of the entity.<|endoftext|>
37af31659259b9b8f399c0ca120bad0dd6165da59b54390637c282950d94534f
@classmethod def get(cls, id_): "Get an instance of an entity by ID or slug.\n\n Args:\n id_ (str):\n ID or slug of a catalog entity.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id(id_)
Get an instance of an entity by ID or slug. Args: id_ (str): ID or slug of a catalog entity. Raises: CatalogError: if there's a problem when connecting to the catalog or no entities are found.
cartoframes/data/observatory/catalog/entity.py
get
CartoDB/cartoframes
236
python
@classmethod def get(cls, id_): "Get an instance of an entity by ID or slug.\n\n Args:\n id_ (str):\n ID or slug of a catalog entity.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id(id_)
@classmethod def get(cls, id_): "Get an instance of an entity by ID or slug.\n\n Args:\n id_ (str):\n ID or slug of a catalog entity.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id(id_)<|docstring|>Get an instance of an entity by ID or slug. Args: id_ (str): ID or slug of a catalog entity. Raises: CatalogError: if there's a problem when connecting to the catalog or no entities are found.<|endoftext|>
62228648d26fa6702751cb3da955493424fd84d15b9d9a44440396af99d68c3f
@classmethod def get_all(cls, filters=None): 'List all instances of an entity.\n\n Args:\n filters (dict, optional):\n Dict containing pairs of entity properties and its value to be used as filters to query the available\n entities. If none is provided, no filters will be applied to the query.\n\n ' return cls._entity_repo.get_all(filters)
List all instances of an entity. Args: filters (dict, optional): Dict containing pairs of entity properties and its value to be used as filters to query the available entities. If none is provided, no filters will be applied to the query.
cartoframes/data/observatory/catalog/entity.py
get_all
CartoDB/cartoframes
236
python
@classmethod def get_all(cls, filters=None): 'List all instances of an entity.\n\n Args:\n filters (dict, optional):\n Dict containing pairs of entity properties and its value to be used as filters to query the available\n entities. If none is provided, no filters will be applied to the query.\n\n ' return cls._entity_repo.get_all(filters)
@classmethod def get_all(cls, filters=None): 'List all instances of an entity.\n\n Args:\n filters (dict, optional):\n Dict containing pairs of entity properties and its value to be used as filters to query the available\n entities. If none is provided, no filters will be applied to the query.\n\n ' return cls._entity_repo.get_all(filters)<|docstring|>List all instances of an entity. Args: filters (dict, optional): Dict containing pairs of entity properties and its value to be used as filters to query the available entities. If none is provided, no filters will be applied to the query.<|endoftext|>
6a37c32b5bb404286096bb81301df57fa6cfa87a3d645a8063eaa03be4098a32
@classmethod def get_list(cls, id_list): "Get a list of instance of an entity by a list of IDs or slugs.\n\n Args:\n id_list (list):\n List of ID or slugs of entities in the catalog to retrieve instances.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id_list(id_list)
Get a list of instance of an entity by a list of IDs or slugs. Args: id_list (list): List of ID or slugs of entities in the catalog to retrieve instances. Raises: CatalogError: if there's a problem when connecting to the catalog or no entities are found.
cartoframes/data/observatory/catalog/entity.py
get_list
CartoDB/cartoframes
236
python
@classmethod def get_list(cls, id_list): "Get a list of instance of an entity by a list of IDs or slugs.\n\n Args:\n id_list (list):\n List of ID or slugs of entities in the catalog to retrieve instances.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id_list(id_list)
@classmethod def get_list(cls, id_list): "Get a list of instance of an entity by a list of IDs or slugs.\n\n Args:\n id_list (list):\n List of ID or slugs of entities in the catalog to retrieve instances.\n\n Raises:\n CatalogError: if there's a problem when connecting to the catalog or no entities are found.\n\n " return cls._entity_repo.get_by_id_list(id_list)<|docstring|>Get a list of instance of an entity by a list of IDs or slugs. Args: id_list (list): List of ID or slugs of entities in the catalog to retrieve instances. Raises: CatalogError: if there's a problem when connecting to the catalog or no entities are found.<|endoftext|>
7f793df3f847d450bfee289c0c03c7e7281c83f14bdf6d5f7e786307467682eb
def to_series(self): 'Converts the entity instance to a pandas Series.' return pd.Series(self.data)
Converts the entity instance to a pandas Series.
cartoframes/data/observatory/catalog/entity.py
to_series
CartoDB/cartoframes
236
python
def to_series(self): return pd.Series(self.data)
def to_series(self): return pd.Series(self.data)<|docstring|>Converts the entity instance to a pandas Series.<|endoftext|>
78627af9dc0cfaed43e2c3cbdc43967a0409402283c0b533c1e7632dae0ec3f4
def to_dict(self): 'Converts the entity instance to a Python dict.' return {key: value for (key, value) in self.data.items() if (key not in self.export_excluded_fields)}
Converts the entity instance to a Python dict.
cartoframes/data/observatory/catalog/entity.py
to_dict
CartoDB/cartoframes
236
python
def to_dict(self): return {key: value for (key, value) in self.data.items() if (key not in self.export_excluded_fields)}
def to_dict(self): return {key: value for (key, value) in self.data.items() if (key not in self.export_excluded_fields)}<|docstring|>Converts the entity instance to a Python dict.<|endoftext|>
6b8ea859c42427502a4c07fea30fed1a0ae0664648ab821d6b8662ebc75d8efa
def is_subscribed(self, credentials, entity_type): 'Check if the entity is subscribed' return (self.is_public_data or (self.id in subscriptions.get_subscription_ids(credentials, entity_type)))
Check if the entity is subscribed
cartoframes/data/observatory/catalog/entity.py
is_subscribed
CartoDB/cartoframes
236
python
def is_subscribed(self, credentials, entity_type): return (self.is_public_data or (self.id in subscriptions.get_subscription_ids(credentials, entity_type)))
def is_subscribed(self, credentials, entity_type): return (self.is_public_data or (self.id in subscriptions.get_subscription_ids(credentials, entity_type)))<|docstring|>Check if the entity is subscribed<|endoftext|>
647feb966a8e04af493170dc2c01865d60caabf3500aaea4ef63ff0b8f6abe4b
def to_dataframe(self): 'Converts a list to a pandas DataFrame.\n\n Examples:\n >>> catalog = Catalog()\n >>> catalog.categories.to_dataframe()\n\n ' df = pd.DataFrame([item.data for item in self]) if ('summary_json' in df): del df['summary_json'] return df
Converts a list to a pandas DataFrame. Examples: >>> catalog = Catalog() >>> catalog.categories.to_dataframe()
cartoframes/data/observatory/catalog/entity.py
to_dataframe
CartoDB/cartoframes
236
python
def to_dataframe(self): 'Converts a list to a pandas DataFrame.\n\n Examples:\n >>> catalog = Catalog()\n >>> catalog.categories.to_dataframe()\n\n ' df = pd.DataFrame([item.data for item in self]) if ('summary_json' in df): del df['summary_json'] return df
def to_dataframe(self): 'Converts a list to a pandas DataFrame.\n\n Examples:\n >>> catalog = Catalog()\n >>> catalog.categories.to_dataframe()\n\n ' df = pd.DataFrame([item.data for item in self]) if ('summary_json' in df): del df['summary_json'] return df<|docstring|>Converts a list to a pandas DataFrame. Examples: >>> catalog = Catalog() >>> catalog.categories.to_dataframe()<|endoftext|>
8c8d15f8582fa415657be605cbfab67f5517a83598def7d11bfeacd67801697f
@classmethod def load_viz(cls, viz_name: str, *args, **kwargs) -> Viz: 'Loads built in visualization class.\n Currently supports: "lorenz", "cylinder", "grayscott"\n\n Args:\n viz_name (str): Keyword/name of visualization class\n\n Raises:\n KeyError: If viz_name is not a supported visualization type\n\n Returns:\n (Viz): Initialized viz class\n ' if (viz_name in VIZ_MAPPING.keys()): viz_class = VIZ_MAPPING[viz_name] return viz_class(*args, **kwargs) else: err_str = 'Provided viz name, {:s}, not found in existing visualization classes.'.format(viz_name) raise KeyError(err_str)
Loads built in visualization class. Currently supports: "lorenz", "cylinder", "grayscott" Args: viz_name (str): Keyword/name of visualization class Raises: KeyError: If viz_name is not a supported visualization type Returns: (Viz): Initialized viz class
trphysx/viz/viz_auto.py
load_viz
zabaras/transformer-physx
33
python
@classmethod def load_viz(cls, viz_name: str, *args, **kwargs) -> Viz: 'Loads built in visualization class.\n Currently supports: "lorenz", "cylinder", "grayscott"\n\n Args:\n viz_name (str): Keyword/name of visualization class\n\n Raises:\n KeyError: If viz_name is not a supported visualization type\n\n Returns:\n (Viz): Initialized viz class\n ' if (viz_name in VIZ_MAPPING.keys()): viz_class = VIZ_MAPPING[viz_name] return viz_class(*args, **kwargs) else: err_str = 'Provided viz name, {:s}, not found in existing visualization classes.'.format(viz_name) raise KeyError(err_str)
@classmethod def load_viz(cls, viz_name: str, *args, **kwargs) -> Viz: 'Loads built in visualization class.\n Currently supports: "lorenz", "cylinder", "grayscott"\n\n Args:\n viz_name (str): Keyword/name of visualization class\n\n Raises:\n KeyError: If viz_name is not a supported visualization type\n\n Returns:\n (Viz): Initialized viz class\n ' if (viz_name in VIZ_MAPPING.keys()): viz_class = VIZ_MAPPING[viz_name] return viz_class(*args, **kwargs) else: err_str = 'Provided viz name, {:s}, not found in existing visualization classes.'.format(viz_name) raise KeyError(err_str)<|docstring|>Loads built in visualization class. Currently supports: "lorenz", "cylinder", "grayscott" Args: viz_name (str): Keyword/name of visualization class Raises: KeyError: If viz_name is not a supported visualization type Returns: (Viz): Initialized viz class<|endoftext|>
42d942fc8535b6ae0a0e532b373e4941b29e4946c6c9001bae97ba06f56a37df
def get_label(self): 'Gets the label of a traffic light color.\n\n Returns:\n :obj:`str`: The label string.\n ' if (self.value == 1): return 'red traffic light' elif (self.value == 2): return 'yellow traffic light' elif (self.value == 3): return 'green traffic light' else: return 'off traffic light'
Gets the label of a traffic light color. Returns: :obj:`str`: The label string.
pylot/perception/detection/traffic_light.py
get_label
chirpyjh/pylot
231
python
def get_label(self): 'Gets the label of a traffic light color.\n\n Returns:\n :obj:`str`: The label string.\n ' if (self.value == 1): return 'red traffic light' elif (self.value == 2): return 'yellow traffic light' elif (self.value == 3): return 'green traffic light' else: return 'off traffic light'
def get_label(self): 'Gets the label of a traffic light color.\n\n Returns:\n :obj:`str`: The label string.\n ' if (self.value == 1): return 'red traffic light' elif (self.value == 2): return 'yellow traffic light' elif (self.value == 3): return 'green traffic light' else: return 'off traffic light'<|docstring|>Gets the label of a traffic light color. Returns: :obj:`str`: The label string.<|endoftext|>
061c6baa52d67ae1e3cf926dab52de841d26fef1f0718de1306316a406bd402c
@classmethod def from_simulator_actor(cls, traffic_light): ' Creates a TrafficLight from a simulator traffic light actor.\n\n Args:\n traffic_light: A simulator traffic light actor.\n\n Returns:\n :py:class:`.TrafficLight`: A traffic light.\n ' from carla import TrafficLight, TrafficLightState if (not isinstance(traffic_light, TrafficLight)): raise ValueError('The traffic light must be a TrafficLight') transform = pylot.utils.Transform.from_simulator_transform(traffic_light.get_transform()) trigger_volume_extent = pylot.utils.Vector3D(traffic_light.trigger_volume.extent.x, traffic_light.trigger_volume.extent.y, traffic_light.trigger_volume.extent.z) traffic_light_state = traffic_light.get_state() state = TrafficLightColor.OFF if (traffic_light_state == TrafficLightState.Red): state = TrafficLightColor.RED elif (traffic_light_state == TrafficLightState.Yellow): state = TrafficLightColor.YELLOW elif (traffic_light_state == TrafficLightState.Green): state = TrafficLightColor.GREEN return cls(1.0, state, traffic_light.id, transform, trigger_volume_extent)
Creates a TrafficLight from a simulator traffic light actor. Args: traffic_light: A simulator traffic light actor. Returns: :py:class:`.TrafficLight`: A traffic light.
pylot/perception/detection/traffic_light.py
from_simulator_actor
chirpyjh/pylot
231
python
@classmethod def from_simulator_actor(cls, traffic_light): ' Creates a TrafficLight from a simulator traffic light actor.\n\n Args:\n traffic_light: A simulator traffic light actor.\n\n Returns:\n :py:class:`.TrafficLight`: A traffic light.\n ' from carla import TrafficLight, TrafficLightState if (not isinstance(traffic_light, TrafficLight)): raise ValueError('The traffic light must be a TrafficLight') transform = pylot.utils.Transform.from_simulator_transform(traffic_light.get_transform()) trigger_volume_extent = pylot.utils.Vector3D(traffic_light.trigger_volume.extent.x, traffic_light.trigger_volume.extent.y, traffic_light.trigger_volume.extent.z) traffic_light_state = traffic_light.get_state() state = TrafficLightColor.OFF if (traffic_light_state == TrafficLightState.Red): state = TrafficLightColor.RED elif (traffic_light_state == TrafficLightState.Yellow): state = TrafficLightColor.YELLOW elif (traffic_light_state == TrafficLightState.Green): state = TrafficLightColor.GREEN return cls(1.0, state, traffic_light.id, transform, trigger_volume_extent)
@classmethod def from_simulator_actor(cls, traffic_light): ' Creates a TrafficLight from a simulator traffic light actor.\n\n Args:\n traffic_light: A simulator traffic light actor.\n\n Returns:\n :py:class:`.TrafficLight`: A traffic light.\n ' from carla import TrafficLight, TrafficLightState if (not isinstance(traffic_light, TrafficLight)): raise ValueError('The traffic light must be a TrafficLight') transform = pylot.utils.Transform.from_simulator_transform(traffic_light.get_transform()) trigger_volume_extent = pylot.utils.Vector3D(traffic_light.trigger_volume.extent.x, traffic_light.trigger_volume.extent.y, traffic_light.trigger_volume.extent.z) traffic_light_state = traffic_light.get_state() state = TrafficLightColor.OFF if (traffic_light_state == TrafficLightState.Red): state = TrafficLightColor.RED elif (traffic_light_state == TrafficLightState.Yellow): state = TrafficLightColor.YELLOW elif (traffic_light_state == TrafficLightState.Green): state = TrafficLightColor.GREEN return cls(1.0, state, traffic_light.id, transform, trigger_volume_extent)<|docstring|>Creates a TrafficLight from a simulator traffic light actor. Args: traffic_light: A simulator traffic light actor. Returns: :py:class:`.TrafficLight`: A traffic light.<|endoftext|>
77e32acdd65b4ea2feca86bf784186f1fea7706836b8b4dbcc0b7314ba56ad95
def is_traffic_light_visible(self, camera_transform: pylot.utils.Transform, town_name: str=None, distance_threshold: int=70): 'Checks if the traffic light is visible from the camera transform.\n\n Args:\n transform (:py:class:`~pylot.utils.Transform`): Transform of the\n camera in the world frame of reference.\n distance_threshold (:obj:`int`): Maximum distance to the camera\n (in m).\n\n Returns:\n bool: True if the traffic light is visible from the camera\n transform.\n ' prod = np.dot([self.transform.forward_vector.y, (- self.transform.forward_vector.x), self.transform.forward_vector.z], [camera_transform.forward_vector.x, camera_transform.forward_vector.y, camera_transform.forward_vector.z]) if (self.transform.location.distance(camera_transform.location) > distance_threshold): return (prod > 0.4) if (town_name is None): return (prod > (- 0.8)) elif ((town_name == 'Town01') or (town_name == 'Town02')): return (prod > 0.3) return (prod > (- 0.8))
Checks if the traffic light is visible from the camera transform. Args: transform (:py:class:`~pylot.utils.Transform`): Transform of the camera in the world frame of reference. distance_threshold (:obj:`int`): Maximum distance to the camera (in m). Returns: bool: True if the traffic light is visible from the camera transform.
pylot/perception/detection/traffic_light.py
is_traffic_light_visible
chirpyjh/pylot
231
python
def is_traffic_light_visible(self, camera_transform: pylot.utils.Transform, town_name: str=None, distance_threshold: int=70): 'Checks if the traffic light is visible from the camera transform.\n\n Args:\n transform (:py:class:`~pylot.utils.Transform`): Transform of the\n camera in the world frame of reference.\n distance_threshold (:obj:`int`): Maximum distance to the camera\n (in m).\n\n Returns:\n bool: True if the traffic light is visible from the camera\n transform.\n ' prod = np.dot([self.transform.forward_vector.y, (- self.transform.forward_vector.x), self.transform.forward_vector.z], [camera_transform.forward_vector.x, camera_transform.forward_vector.y, camera_transform.forward_vector.z]) if (self.transform.location.distance(camera_transform.location) > distance_threshold): return (prod > 0.4) if (town_name is None): return (prod > (- 0.8)) elif ((town_name == 'Town01') or (town_name == 'Town02')): return (prod > 0.3) return (prod > (- 0.8))
def is_traffic_light_visible(self, camera_transform: pylot.utils.Transform, town_name: str=None, distance_threshold: int=70): 'Checks if the traffic light is visible from the camera transform.\n\n Args:\n transform (:py:class:`~pylot.utils.Transform`): Transform of the\n camera in the world frame of reference.\n distance_threshold (:obj:`int`): Maximum distance to the camera\n (in m).\n\n Returns:\n bool: True if the traffic light is visible from the camera\n transform.\n ' prod = np.dot([self.transform.forward_vector.y, (- self.transform.forward_vector.x), self.transform.forward_vector.z], [camera_transform.forward_vector.x, camera_transform.forward_vector.y, camera_transform.forward_vector.z]) if (self.transform.location.distance(camera_transform.location) > distance_threshold): return (prod > 0.4) if (town_name is None): return (prod > (- 0.8)) elif ((town_name == 'Town01') or (town_name == 'Town02')): return (prod > 0.3) return (prod > (- 0.8))<|docstring|>Checks if the traffic light is visible from the camera transform. Args: transform (:py:class:`~pylot.utils.Transform`): Transform of the camera in the world frame of reference. distance_threshold (:obj:`int`): Maximum distance to the camera (in m). Returns: bool: True if the traffic light is visible from the camera transform.<|endoftext|>
b6e7172f69555e0261bd1b37f670263288ce77e6c5743fa4b991e7e33d88c97e
def get_all_detected_traffic_light_boxes(self, town_name: str, depth_frame, segmented_image): ' Returns traffic lights for all boxes of a simulator traffic light.\n\n Note:\n All the traffic lights returned will have the same id and\n transform.\n\n Args:\n town_name (:obj:`str`): Name of the town in which the traffic light\n is.\n depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`):\n Depth frame.\n segmented_image: A segmented image np array used to refine the\n bounding boxes.\n\n Returns:\n list(:py:class:`~pylot.perception.detection.traffic_light.TrafficLight`):\n Detected traffic lights, one for each traffic light box.\n ' traffic_lights = [] bboxes = self._get_bboxes(town_name) for bbox in bboxes: bounding_box = [loc.to_camera_view(depth_frame.camera_setup.get_extrinsic_matrix(), depth_frame.camera_setup.get_intrinsic_matrix()) for loc in bbox] bbox_2d = get_bounding_box_in_camera_view(bounding_box, depth_frame.camera_setup.width, depth_frame.camera_setup.height) if (not bbox_2d): continue cropped_image = segmented_image[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] cropped_depth = depth_frame.frame[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] if (cropped_image.size > 0): masked_image = np.zeros_like(cropped_image) masked_image[np.where(np.logical_or((cropped_image == 12), (cropped_image == 18)))] = 1 if (np.sum(masked_image) >= (0.2 * masked_image.size)): masked_depth = cropped_depth[np.where((masked_image == 1))] mean_depth = (np.mean(masked_depth) * 1000) if ((abs((mean_depth - bounding_box[0].z)) <= 2) and (mean_depth < 150)): traffic_lights.append(TrafficLight(1.0, self.state, self.id, self.transform, self.trigger_volume_extent, bbox_2d)) return traffic_lights
Returns traffic lights for all boxes of a simulator traffic light. Note: All the traffic lights returned will have the same id and transform. Args: town_name (:obj:`str`): Name of the town in which the traffic light is. depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`): Depth frame. segmented_image: A segmented image np array used to refine the bounding boxes. Returns: list(:py:class:`~pylot.perception.detection.traffic_light.TrafficLight`): Detected traffic lights, one for each traffic light box.
pylot/perception/detection/traffic_light.py
get_all_detected_traffic_light_boxes
chirpyjh/pylot
231
python
def get_all_detected_traffic_light_boxes(self, town_name: str, depth_frame, segmented_image): ' Returns traffic lights for all boxes of a simulator traffic light.\n\n Note:\n All the traffic lights returned will have the same id and\n transform.\n\n Args:\n town_name (:obj:`str`): Name of the town in which the traffic light\n is.\n depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`):\n Depth frame.\n segmented_image: A segmented image np array used to refine the\n bounding boxes.\n\n Returns:\n list(:py:class:`~pylot.perception.detection.traffic_light.TrafficLight`):\n Detected traffic lights, one for each traffic light box.\n ' traffic_lights = [] bboxes = self._get_bboxes(town_name) for bbox in bboxes: bounding_box = [loc.to_camera_view(depth_frame.camera_setup.get_extrinsic_matrix(), depth_frame.camera_setup.get_intrinsic_matrix()) for loc in bbox] bbox_2d = get_bounding_box_in_camera_view(bounding_box, depth_frame.camera_setup.width, depth_frame.camera_setup.height) if (not bbox_2d): continue cropped_image = segmented_image[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] cropped_depth = depth_frame.frame[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] if (cropped_image.size > 0): masked_image = np.zeros_like(cropped_image) masked_image[np.where(np.logical_or((cropped_image == 12), (cropped_image == 18)))] = 1 if (np.sum(masked_image) >= (0.2 * masked_image.size)): masked_depth = cropped_depth[np.where((masked_image == 1))] mean_depth = (np.mean(masked_depth) * 1000) if ((abs((mean_depth - bounding_box[0].z)) <= 2) and (mean_depth < 150)): traffic_lights.append(TrafficLight(1.0, self.state, self.id, self.transform, self.trigger_volume_extent, bbox_2d)) return traffic_lights
def get_all_detected_traffic_light_boxes(self, town_name: str, depth_frame, segmented_image): ' Returns traffic lights for all boxes of a simulator traffic light.\n\n Note:\n All the traffic lights returned will have the same id and\n transform.\n\n Args:\n town_name (:obj:`str`): Name of the town in which the traffic light\n is.\n depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`):\n Depth frame.\n segmented_image: A segmented image np array used to refine the\n bounding boxes.\n\n Returns:\n list(:py:class:`~pylot.perception.detection.traffic_light.TrafficLight`):\n Detected traffic lights, one for each traffic light box.\n ' traffic_lights = [] bboxes = self._get_bboxes(town_name) for bbox in bboxes: bounding_box = [loc.to_camera_view(depth_frame.camera_setup.get_extrinsic_matrix(), depth_frame.camera_setup.get_intrinsic_matrix()) for loc in bbox] bbox_2d = get_bounding_box_in_camera_view(bounding_box, depth_frame.camera_setup.width, depth_frame.camera_setup.height) if (not bbox_2d): continue cropped_image = segmented_image[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] cropped_depth = depth_frame.frame[(bbox_2d.y_min:bbox_2d.y_max, bbox_2d.x_min:bbox_2d.x_max)] if (cropped_image.size > 0): masked_image = np.zeros_like(cropped_image) masked_image[np.where(np.logical_or((cropped_image == 12), (cropped_image == 18)))] = 1 if (np.sum(masked_image) >= (0.2 * masked_image.size)): masked_depth = cropped_depth[np.where((masked_image == 1))] mean_depth = (np.mean(masked_depth) * 1000) if ((abs((mean_depth - bounding_box[0].z)) <= 2) and (mean_depth < 150)): traffic_lights.append(TrafficLight(1.0, self.state, self.id, self.transform, self.trigger_volume_extent, bbox_2d)) return traffic_lights<|docstring|>Returns traffic lights for all boxes of a simulator traffic light. Note: All the traffic lights returned will have the same id and transform. Args: town_name (:obj:`str`): Name of the town in which the traffic light is. depth_frame (:py:class:`~pylot.perception.depth_frame.DepthFrame`): Depth frame. segmented_image: A segmented image np array used to refine the bounding boxes. Returns: list(:py:class:`~pylot.perception.detection.traffic_light.TrafficLight`): Detected traffic lights, one for each traffic light box.<|endoftext|>
1d3b90f9e3fa7600e5950a657dff1c0d5fd259a304cfd3370880dec13e35e876
def _relative_to_traffic_light(self, points): 'Transforms the bounding box specified in the points relative to the\n light.\n\n Args:\n points: An array of length 4 representing the 4 points of the\n rectangle.\n ' def rotate(yaw, location): ' Rotate a given 3D vector around the Z-axis. ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)]) transformed_points = [rotate(np.radians(self.transform.rotation.yaw), point) for point in points] base_relative_points = [(self.transform.location + point) for point in transformed_points] return base_relative_points
Transforms the bounding box specified in the points relative to the light. Args: points: An array of length 4 representing the 4 points of the rectangle.
pylot/perception/detection/traffic_light.py
_relative_to_traffic_light
chirpyjh/pylot
231
python
def _relative_to_traffic_light(self, points): 'Transforms the bounding box specified in the points relative to the\n light.\n\n Args:\n points: An array of length 4 representing the 4 points of the\n rectangle.\n ' def rotate(yaw, location): ' Rotate a given 3D vector around the Z-axis. ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)]) transformed_points = [rotate(np.radians(self.transform.rotation.yaw), point) for point in points] base_relative_points = [(self.transform.location + point) for point in transformed_points] return base_relative_points
def _relative_to_traffic_light(self, points): 'Transforms the bounding box specified in the points relative to the\n light.\n\n Args:\n points: An array of length 4 representing the 4 points of the\n rectangle.\n ' def rotate(yaw, location): ' Rotate a given 3D vector around the Z-axis. ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)]) transformed_points = [rotate(np.radians(self.transform.rotation.yaw), point) for point in points] base_relative_points = [(self.transform.location + point) for point in transformed_points] return base_relative_points<|docstring|>Transforms the bounding box specified in the points relative to the light. Args: points: An array of length 4 representing the 4 points of the rectangle.<|endoftext|>
64b234550ab2734a5cf9407ba6e40b417006e92b4bbb19d781e6da33bc4f5a00
def rotate(yaw, location): ' Rotate a given 3D vector around the Z-axis. ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)])
Rotate a given 3D vector around the Z-axis.
pylot/perception/detection/traffic_light.py
rotate
chirpyjh/pylot
231
python
def rotate(yaw, location): ' ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)])
def rotate(yaw, location): ' ' rotation_matrix = np.identity(3) rotation_matrix[(0, 0)] = np.cos(yaw) rotation_matrix[(0, 1)] = (- np.sin(yaw)) rotation_matrix[(1, 0)] = np.sin(yaw) rotation_matrix[(1, 1)] = np.cos(yaw) location_vector = np.array([[location.x], [location.y], [location.z]]) transformed = np.dot(rotation_matrix, location_vector) return pylot.utils.Location(x=transformed[(0, 0)], y=transformed[(1, 0)], z=transformed[(2, 0)])<|docstring|>Rotate a given 3D vector around the Z-axis.<|endoftext|>
b351e0c3e2f0932c0d9e92e3ab46f629f0c847755e20a5bae5e69e3b2e4eb5fe
def size_term(land_use, destination_choice_coeffs): '\n This method takes the land use data and multiplies various columns of the\n land use data by coefficients from the spec table in order\n to yield a size term (a linear combination of land use variables).\n\n Parameters\n ----------\n land_use : DataFrame\n A dataframe of land use attributes - the column names should match\n the index of destination_choice_coeffs\n destination_choice_coeffs : Series\n A series of coefficients for the land use attributes - the index\n describes the link to the land use table, and the values are floating\n points numbers used to do the linear combination\n\n Returns\n -------\n values : Series\n The index will be the same as land use, and the values will the\n linear combination of the land use table columns specified by the\n coefficients series.\n ' coeffs = destination_choice_coeffs missing = coeffs[(~ coeffs.index.isin(land_use.columns))] if (len(missing) > 0): logger.warning(('%s missing columns in land use' % len(missing.index))) for v in missing.index.values: logger.warning(('missing: %s' % v)) return land_use[coeffs.index].dot(coeffs)
This method takes the land use data and multiplies various columns of the land use data by coefficients from the spec table in order to yield a size term (a linear combination of land use variables). Parameters ---------- land_use : DataFrame A dataframe of land use attributes - the column names should match the index of destination_choice_coeffs destination_choice_coeffs : Series A series of coefficients for the land use attributes - the index describes the link to the land use table, and the values are floating points numbers used to do the linear combination Returns ------- values : Series The index will be the same as land use, and the values will the linear combination of the land use table columns specified by the coefficients series.
activitysim/abm/tables/size_terms.py
size_term
mattwigway/activitysim
2
python
def size_term(land_use, destination_choice_coeffs): '\n This method takes the land use data and multiplies various columns of the\n land use data by coefficients from the spec table in order\n to yield a size term (a linear combination of land use variables).\n\n Parameters\n ----------\n land_use : DataFrame\n A dataframe of land use attributes - the column names should match\n the index of destination_choice_coeffs\n destination_choice_coeffs : Series\n A series of coefficients for the land use attributes - the index\n describes the link to the land use table, and the values are floating\n points numbers used to do the linear combination\n\n Returns\n -------\n values : Series\n The index will be the same as land use, and the values will the\n linear combination of the land use table columns specified by the\n coefficients series.\n ' coeffs = destination_choice_coeffs missing = coeffs[(~ coeffs.index.isin(land_use.columns))] if (len(missing) > 0): logger.warning(('%s missing columns in land use' % len(missing.index))) for v in missing.index.values: logger.warning(('missing: %s' % v)) return land_use[coeffs.index].dot(coeffs)
def size_term(land_use, destination_choice_coeffs): '\n This method takes the land use data and multiplies various columns of the\n land use data by coefficients from the spec table in order\n to yield a size term (a linear combination of land use variables).\n\n Parameters\n ----------\n land_use : DataFrame\n A dataframe of land use attributes - the column names should match\n the index of destination_choice_coeffs\n destination_choice_coeffs : Series\n A series of coefficients for the land use attributes - the index\n describes the link to the land use table, and the values are floating\n points numbers used to do the linear combination\n\n Returns\n -------\n values : Series\n The index will be the same as land use, and the values will the\n linear combination of the land use table columns specified by the\n coefficients series.\n ' coeffs = destination_choice_coeffs missing = coeffs[(~ coeffs.index.isin(land_use.columns))] if (len(missing) > 0): logger.warning(('%s missing columns in land use' % len(missing.index))) for v in missing.index.values: logger.warning(('missing: %s' % v)) return land_use[coeffs.index].dot(coeffs)<|docstring|>This method takes the land use data and multiplies various columns of the land use data by coefficients from the spec table in order to yield a size term (a linear combination of land use variables). Parameters ---------- land_use : DataFrame A dataframe of land use attributes - the column names should match the index of destination_choice_coeffs destination_choice_coeffs : Series A series of coefficients for the land use attributes - the index describes the link to the land use table, and the values are floating points numbers used to do the linear combination Returns ------- values : Series The index will be the same as land use, and the values will the linear combination of the land use table columns specified by the coefficients series.<|endoftext|>
f79c3395c77515b1fe339a1298971b96f9062310cad990b8b9e6bfa72b5fab43
def tour_destination_size_terms(land_use, size_terms, model_selector): "\n\n Parameters\n ----------\n land_use - pipeline table\n size_terms - pipeline table\n model_selector - str\n\n Returns\n -------\n\n ::\n\n pandas.dataframe\n one column per model_selector segment with index of land_use\n e.g. for model_selector 'workplace', columns will be work_low, work_med, ...\n and for model_selector 'trip', columns will be eatout, escort, othdiscr, ...\n\n work_low work_med work_high work_veryhigh\n TAZ ...\n 1 1267.00000 522.000 1108.000 1540.0000 ...\n 2 1991.00000 824.500 1759.000 2420.0000 ...\n ...\n " land_use = land_use.to_frame() size_terms = size_terms[(size_terms.model_selector == model_selector)].copy() del size_terms['model_selector'] df = pd.DataFrame({key: size_term(land_use, row) for (key, row) in size_terms.iterrows()}, index=land_use.index) assert (land_use.index.name == 'TAZ') df.index.name = land_use.index.name if (not (df.dtypes == 'float64').all()): logger.warning('Surprised to find that not all size_terms were float64!') return df
Parameters ---------- land_use - pipeline table size_terms - pipeline table model_selector - str Returns ------- :: pandas.dataframe one column per model_selector segment with index of land_use e.g. for model_selector 'workplace', columns will be work_low, work_med, ... and for model_selector 'trip', columns will be eatout, escort, othdiscr, ... work_low work_med work_high work_veryhigh TAZ ... 1 1267.00000 522.000 1108.000 1540.0000 ... 2 1991.00000 824.500 1759.000 2420.0000 ... ...
activitysim/abm/tables/size_terms.py
tour_destination_size_terms
mattwigway/activitysim
2
python
def tour_destination_size_terms(land_use, size_terms, model_selector): "\n\n Parameters\n ----------\n land_use - pipeline table\n size_terms - pipeline table\n model_selector - str\n\n Returns\n -------\n\n ::\n\n pandas.dataframe\n one column per model_selector segment with index of land_use\n e.g. for model_selector 'workplace', columns will be work_low, work_med, ...\n and for model_selector 'trip', columns will be eatout, escort, othdiscr, ...\n\n work_low work_med work_high work_veryhigh\n TAZ ...\n 1 1267.00000 522.000 1108.000 1540.0000 ...\n 2 1991.00000 824.500 1759.000 2420.0000 ...\n ...\n " land_use = land_use.to_frame() size_terms = size_terms[(size_terms.model_selector == model_selector)].copy() del size_terms['model_selector'] df = pd.DataFrame({key: size_term(land_use, row) for (key, row) in size_terms.iterrows()}, index=land_use.index) assert (land_use.index.name == 'TAZ') df.index.name = land_use.index.name if (not (df.dtypes == 'float64').all()): logger.warning('Surprised to find that not all size_terms were float64!') return df
def tour_destination_size_terms(land_use, size_terms, model_selector): "\n\n Parameters\n ----------\n land_use - pipeline table\n size_terms - pipeline table\n model_selector - str\n\n Returns\n -------\n\n ::\n\n pandas.dataframe\n one column per model_selector segment with index of land_use\n e.g. for model_selector 'workplace', columns will be work_low, work_med, ...\n and for model_selector 'trip', columns will be eatout, escort, othdiscr, ...\n\n work_low work_med work_high work_veryhigh\n TAZ ...\n 1 1267.00000 522.000 1108.000 1540.0000 ...\n 2 1991.00000 824.500 1759.000 2420.0000 ...\n ...\n " land_use = land_use.to_frame() size_terms = size_terms[(size_terms.model_selector == model_selector)].copy() del size_terms['model_selector'] df = pd.DataFrame({key: size_term(land_use, row) for (key, row) in size_terms.iterrows()}, index=land_use.index) assert (land_use.index.name == 'TAZ') df.index.name = land_use.index.name if (not (df.dtypes == 'float64').all()): logger.warning('Surprised to find that not all size_terms were float64!') return df<|docstring|>Parameters ---------- land_use - pipeline table size_terms - pipeline table model_selector - str Returns ------- :: pandas.dataframe one column per model_selector segment with index of land_use e.g. for model_selector 'workplace', columns will be work_low, work_med, ... and for model_selector 'trip', columns will be eatout, escort, othdiscr, ... work_low work_med work_high work_veryhigh TAZ ... 1 1267.00000 522.000 1108.000 1540.0000 ... 2 1991.00000 824.500 1759.000 2420.0000 ... ...<|endoftext|>
d8f5698568976df605536b2f1935033bf690fc17ee4c067be0571933c449ded6
def __init__(__self__, resource_name, opts=None, curve=None, key_opts=None, key_size=None, key_type=None, key_vault_id=None, name=None, tags=None, vault_uri=None, __name__=None, __opts__=None): '\n Manages a Key Vault Key.\n \n :param str resource_name: The name of the resource.\n :param kulado.ResourceOptions opts: Options for the resource.\n :param kulado.Input[str] curve: Specifies the curve to use when creating an `EC` key. Possible values are `P-256`, `P-384`, `P-521`, and `SECP256K1`. This field will be required in a future release if `key_type` is `EC` or `EC-HSM`. The API will default to `P-256` if nothing is specified. Changing this forces a new resource to be created.\n :param kulado.Input[list] key_opts: A list of JSON web key operations. Possible values include: `decrypt`, `encrypt`, `sign`, `unwrapKey`, `verify` and `wrapKey`. Please note these values are case sensitive.\n :param kulado.Input[float] key_size: Specifies the Size of the RSA key to create in bytes. For example, 1024 or 2048. *Note*: This field is required if `key_type` is `RSA` or `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_type: Specifies the Key Type to use for this Key Vault Key. Possible values are `EC` (Elliptic Curve), `EC-HSM`, `Oct` (Octet), `RSA` and `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_vault_id: The ID of the Key Vault where the Key should be created. Changing this forces a new resource to be created.\n :param kulado.Input[str] name: Specifies the name of the Key Vault Key. Changing this forces a new resource to be created.\n :param kulado.Input[dict] tags: A mapping of tags to assign to the resource.\n\n > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/key_vault_key.html.markdown.\n ' if (__name__ is not None): warnings.warn('explicit use of __name__ is deprecated', DeprecationWarning) resource_name = __name__ if (__opts__ is not None): warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if (not resource_name): raise TypeError('Missing resource name argument (for URN creation)') if (not isinstance(resource_name, str)): raise TypeError('Expected resource name to be a string') if (opts and (not isinstance(opts, kulado.ResourceOptions))): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict() __props__['curve'] = curve if (key_opts is None): raise TypeError("Missing required property 'key_opts'") __props__['key_opts'] = key_opts __props__['key_size'] = key_size if (key_type is None): raise TypeError("Missing required property 'key_type'") __props__['key_type'] = key_type __props__['key_vault_id'] = key_vault_id __props__['name'] = name __props__['tags'] = tags __props__['vault_uri'] = vault_uri __props__['e'] = None __props__['n'] = None __props__['version'] = None __props__['x'] = None __props__['y'] = None super(Key, __self__).__init__('azure:keyvault/key:Key', resource_name, __props__, opts)
Manages a Key Vault Key. :param str resource_name: The name of the resource. :param kulado.ResourceOptions opts: Options for the resource. :param kulado.Input[str] curve: Specifies the curve to use when creating an `EC` key. Possible values are `P-256`, `P-384`, `P-521`, and `SECP256K1`. This field will be required in a future release if `key_type` is `EC` or `EC-HSM`. The API will default to `P-256` if nothing is specified. Changing this forces a new resource to be created. :param kulado.Input[list] key_opts: A list of JSON web key operations. Possible values include: `decrypt`, `encrypt`, `sign`, `unwrapKey`, `verify` and `wrapKey`. Please note these values are case sensitive. :param kulado.Input[float] key_size: Specifies the Size of the RSA key to create in bytes. For example, 1024 or 2048. *Note*: This field is required if `key_type` is `RSA` or `RSA-HSM`. Changing this forces a new resource to be created. :param kulado.Input[str] key_type: Specifies the Key Type to use for this Key Vault Key. Possible values are `EC` (Elliptic Curve), `EC-HSM`, `Oct` (Octet), `RSA` and `RSA-HSM`. Changing this forces a new resource to be created. :param kulado.Input[str] key_vault_id: The ID of the Key Vault where the Key should be created. Changing this forces a new resource to be created. :param kulado.Input[str] name: Specifies the name of the Key Vault Key. Changing this forces a new resource to be created. :param kulado.Input[dict] tags: A mapping of tags to assign to the resource. > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/key_vault_key.html.markdown.
sdk/python/kulado_azure/keyvault/key.py
__init__
kulado/kulado-azure
0
python
def __init__(__self__, resource_name, opts=None, curve=None, key_opts=None, key_size=None, key_type=None, key_vault_id=None, name=None, tags=None, vault_uri=None, __name__=None, __opts__=None): '\n Manages a Key Vault Key.\n \n :param str resource_name: The name of the resource.\n :param kulado.ResourceOptions opts: Options for the resource.\n :param kulado.Input[str] curve: Specifies the curve to use when creating an `EC` key. Possible values are `P-256`, `P-384`, `P-521`, and `SECP256K1`. This field will be required in a future release if `key_type` is `EC` or `EC-HSM`. The API will default to `P-256` if nothing is specified. Changing this forces a new resource to be created.\n :param kulado.Input[list] key_opts: A list of JSON web key operations. Possible values include: `decrypt`, `encrypt`, `sign`, `unwrapKey`, `verify` and `wrapKey`. Please note these values are case sensitive.\n :param kulado.Input[float] key_size: Specifies the Size of the RSA key to create in bytes. For example, 1024 or 2048. *Note*: This field is required if `key_type` is `RSA` or `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_type: Specifies the Key Type to use for this Key Vault Key. Possible values are `EC` (Elliptic Curve), `EC-HSM`, `Oct` (Octet), `RSA` and `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_vault_id: The ID of the Key Vault where the Key should be created. Changing this forces a new resource to be created.\n :param kulado.Input[str] name: Specifies the name of the Key Vault Key. Changing this forces a new resource to be created.\n :param kulado.Input[dict] tags: A mapping of tags to assign to the resource.\n\n > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/key_vault_key.html.markdown.\n ' if (__name__ is not None): warnings.warn('explicit use of __name__ is deprecated', DeprecationWarning) resource_name = __name__ if (__opts__ is not None): warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if (not resource_name): raise TypeError('Missing resource name argument (for URN creation)') if (not isinstance(resource_name, str)): raise TypeError('Expected resource name to be a string') if (opts and (not isinstance(opts, kulado.ResourceOptions))): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict() __props__['curve'] = curve if (key_opts is None): raise TypeError("Missing required property 'key_opts'") __props__['key_opts'] = key_opts __props__['key_size'] = key_size if (key_type is None): raise TypeError("Missing required property 'key_type'") __props__['key_type'] = key_type __props__['key_vault_id'] = key_vault_id __props__['name'] = name __props__['tags'] = tags __props__['vault_uri'] = vault_uri __props__['e'] = None __props__['n'] = None __props__['version'] = None __props__['x'] = None __props__['y'] = None super(Key, __self__).__init__('azure:keyvault/key:Key', resource_name, __props__, opts)
def __init__(__self__, resource_name, opts=None, curve=None, key_opts=None, key_size=None, key_type=None, key_vault_id=None, name=None, tags=None, vault_uri=None, __name__=None, __opts__=None): '\n Manages a Key Vault Key.\n \n :param str resource_name: The name of the resource.\n :param kulado.ResourceOptions opts: Options for the resource.\n :param kulado.Input[str] curve: Specifies the curve to use when creating an `EC` key. Possible values are `P-256`, `P-384`, `P-521`, and `SECP256K1`. This field will be required in a future release if `key_type` is `EC` or `EC-HSM`. The API will default to `P-256` if nothing is specified. Changing this forces a new resource to be created.\n :param kulado.Input[list] key_opts: A list of JSON web key operations. Possible values include: `decrypt`, `encrypt`, `sign`, `unwrapKey`, `verify` and `wrapKey`. Please note these values are case sensitive.\n :param kulado.Input[float] key_size: Specifies the Size of the RSA key to create in bytes. For example, 1024 or 2048. *Note*: This field is required if `key_type` is `RSA` or `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_type: Specifies the Key Type to use for this Key Vault Key. Possible values are `EC` (Elliptic Curve), `EC-HSM`, `Oct` (Octet), `RSA` and `RSA-HSM`. Changing this forces a new resource to be created.\n :param kulado.Input[str] key_vault_id: The ID of the Key Vault where the Key should be created. Changing this forces a new resource to be created.\n :param kulado.Input[str] name: Specifies the name of the Key Vault Key. Changing this forces a new resource to be created.\n :param kulado.Input[dict] tags: A mapping of tags to assign to the resource.\n\n > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/key_vault_key.html.markdown.\n ' if (__name__ is not None): warnings.warn('explicit use of __name__ is deprecated', DeprecationWarning) resource_name = __name__ if (__opts__ is not None): warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if (not resource_name): raise TypeError('Missing resource name argument (for URN creation)') if (not isinstance(resource_name, str)): raise TypeError('Expected resource name to be a string') if (opts and (not isinstance(opts, kulado.ResourceOptions))): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict() __props__['curve'] = curve if (key_opts is None): raise TypeError("Missing required property 'key_opts'") __props__['key_opts'] = key_opts __props__['key_size'] = key_size if (key_type is None): raise TypeError("Missing required property 'key_type'") __props__['key_type'] = key_type __props__['key_vault_id'] = key_vault_id __props__['name'] = name __props__['tags'] = tags __props__['vault_uri'] = vault_uri __props__['e'] = None __props__['n'] = None __props__['version'] = None __props__['x'] = None __props__['y'] = None super(Key, __self__).__init__('azure:keyvault/key:Key', resource_name, __props__, opts)<|docstring|>Manages a Key Vault Key. :param str resource_name: The name of the resource. :param kulado.ResourceOptions opts: Options for the resource. :param kulado.Input[str] curve: Specifies the curve to use when creating an `EC` key. Possible values are `P-256`, `P-384`, `P-521`, and `SECP256K1`. This field will be required in a future release if `key_type` is `EC` or `EC-HSM`. The API will default to `P-256` if nothing is specified. Changing this forces a new resource to be created. :param kulado.Input[list] key_opts: A list of JSON web key operations. Possible values include: `decrypt`, `encrypt`, `sign`, `unwrapKey`, `verify` and `wrapKey`. Please note these values are case sensitive. :param kulado.Input[float] key_size: Specifies the Size of the RSA key to create in bytes. For example, 1024 or 2048. *Note*: This field is required if `key_type` is `RSA` or `RSA-HSM`. Changing this forces a new resource to be created. :param kulado.Input[str] key_type: Specifies the Key Type to use for this Key Vault Key. Possible values are `EC` (Elliptic Curve), `EC-HSM`, `Oct` (Octet), `RSA` and `RSA-HSM`. Changing this forces a new resource to be created. :param kulado.Input[str] key_vault_id: The ID of the Key Vault where the Key should be created. Changing this forces a new resource to be created. :param kulado.Input[str] name: Specifies the name of the Key Vault Key. Changing this forces a new resource to be created. :param kulado.Input[dict] tags: A mapping of tags to assign to the resource. > This content is derived from https://github.com/terraform-providers/terraform-provider-azurerm/blob/master/website/docs/r/key_vault_key.html.markdown.<|endoftext|>
dc88598542b233430eba10a2d3802bef6e9b92a189ee25d600a8484d19bfaa2f
def apply_weights(values: Sequence[float], weights: Sequence[float]): '\n Returns values[0] * weights[0] + values[1] * weights[1] + ... + values[n] * weights[n]\n ' return sum([(v[0] * v[1]) for v in zip(values, weights)])
Returns values[0] * weights[0] + values[1] * weights[1] + ... + values[n] * weights[n]
src/math_utils.py
apply_weights
gcardozo123/cg_playground
1
python
def apply_weights(values: Sequence[float], weights: Sequence[float]): '\n \n ' return sum([(v[0] * v[1]) for v in zip(values, weights)])
def apply_weights(values: Sequence[float], weights: Sequence[float]): '\n \n ' return sum([(v[0] * v[1]) for v in zip(values, weights)])<|docstring|>Returns values[0] * weights[0] + values[1] * weights[1] + ... + values[n] * weights[n]<|endoftext|>
5fc30c832a576ee5b57c9a0fb957f97bab78ddde39118072ba56f941cae2dc4e
def angle_to(self, other): '\n Returns the radian angles to other Vec2\n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))
Returns the radian angles to other Vec2
src/math_utils.py
angle_to
gcardozo123/cg_playground
1
python
def angle_to(self, other): '\n \n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))
def angle_to(self, other): '\n \n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))<|docstring|>Returns the radian angles to other Vec2<|endoftext|>
ba69a62875edda163efe215901dce5355574e5c85aff31c3c9344c2f997805f5
@classmethod def obtain_barycentric_weights(cls, p, v0, v1, v2): '\n Returns the barycentric weights (reference: https://codeplea.com/triangular-interpolation)\n for a given point `p` inside a triangle (v0, v1, v2).\n ' denominator = (((v1.y - v2.y) * (v0.x - v2.x)) + ((v2.x - v1.x) * (v0.y - v2.y))) if (denominator == 0): return ((- 1), (- 1), (- 1)) w1 = ((((v1.y - v2.y) * (p.x - v2.x)) + ((v2.x - v1.x) * (p.y - v2.y))) / denominator) w2 = ((((v2.y - v0.y) * (p.x - v2.x)) + ((v0.x - v2.x) * (p.y - v2.y))) / denominator) w3 = ((1 - w1) - w2) return (w1, w2, w3)
Returns the barycentric weights (reference: https://codeplea.com/triangular-interpolation) for a given point `p` inside a triangle (v0, v1, v2).
src/math_utils.py
obtain_barycentric_weights
gcardozo123/cg_playground
1
python
@classmethod def obtain_barycentric_weights(cls, p, v0, v1, v2): '\n Returns the barycentric weights (reference: https://codeplea.com/triangular-interpolation)\n for a given point `p` inside a triangle (v0, v1, v2).\n ' denominator = (((v1.y - v2.y) * (v0.x - v2.x)) + ((v2.x - v1.x) * (v0.y - v2.y))) if (denominator == 0): return ((- 1), (- 1), (- 1)) w1 = ((((v1.y - v2.y) * (p.x - v2.x)) + ((v2.x - v1.x) * (p.y - v2.y))) / denominator) w2 = ((((v2.y - v0.y) * (p.x - v2.x)) + ((v0.x - v2.x) * (p.y - v2.y))) / denominator) w3 = ((1 - w1) - w2) return (w1, w2, w3)
@classmethod def obtain_barycentric_weights(cls, p, v0, v1, v2): '\n Returns the barycentric weights (reference: https://codeplea.com/triangular-interpolation)\n for a given point `p` inside a triangle (v0, v1, v2).\n ' denominator = (((v1.y - v2.y) * (v0.x - v2.x)) + ((v2.x - v1.x) * (v0.y - v2.y))) if (denominator == 0): return ((- 1), (- 1), (- 1)) w1 = ((((v1.y - v2.y) * (p.x - v2.x)) + ((v2.x - v1.x) * (p.y - v2.y))) / denominator) w2 = ((((v2.y - v0.y) * (p.x - v2.x)) + ((v0.x - v2.x) * (p.y - v2.y))) / denominator) w3 = ((1 - w1) - w2) return (w1, w2, w3)<|docstring|>Returns the barycentric weights (reference: https://codeplea.com/triangular-interpolation) for a given point `p` inside a triangle (v0, v1, v2).<|endoftext|>
5fc30c832a576ee5b57c9a0fb957f97bab78ddde39118072ba56f941cae2dc4e
def angle_to(self, other): '\n Returns the radian angles to other Vec2\n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))
Returns the radian angles to other Vec2
src/math_utils.py
angle_to
gcardozo123/cg_playground
1
python
def angle_to(self, other): '\n \n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))
def angle_to(self, other): '\n \n ' dot_prod = self.dot(other) mag = self.magnitude() other_mag = other.magnitude() return acos((dot_prod / (mag * other_mag)))<|docstring|>Returns the radian angles to other Vec2<|endoftext|>
d20763c908a214eab9ced2eb49f7b097e6acfa0adc76e15148f2b63c729509ea
@classmethod def triangle_area(cls, p0, p1, p2): '\n Returns twice the area - just to avoid division - of the oriented triangle (p0, p1, p2).\n The area is positive if the triangle is oriented counterclockwise.\n ' return (((p1.x - p0.x) * (p2.y - p0.y)) - ((p1.y - p0.y) * (p2.x - p0.x)))
Returns twice the area - just to avoid division - of the oriented triangle (p0, p1, p2). The area is positive if the triangle is oriented counterclockwise.
src/math_utils.py
triangle_area
gcardozo123/cg_playground
1
python
@classmethod def triangle_area(cls, p0, p1, p2): '\n Returns twice the area - just to avoid division - of the oriented triangle (p0, p1, p2).\n The area is positive if the triangle is oriented counterclockwise.\n ' return (((p1.x - p0.x) * (p2.y - p0.y)) - ((p1.y - p0.y) * (p2.x - p0.x)))
@classmethod def triangle_area(cls, p0, p1, p2): '\n Returns twice the area - just to avoid division - of the oriented triangle (p0, p1, p2).\n The area is positive if the triangle is oriented counterclockwise.\n ' return (((p1.x - p0.x) * (p2.y - p0.y)) - ((p1.y - p0.y) * (p2.x - p0.x)))<|docstring|>Returns twice the area - just to avoid division - of the oriented triangle (p0, p1, p2). The area is positive if the triangle is oriented counterclockwise.<|endoftext|>
a690e77913a64f0776cbfe9b6b71bfd8b8d9d40235841956466ba2bb88f8c1a8
@classmethod def is_counterclockwise(cls, p0, p1, p2): '\n Returns true if the Vec2s p0, p1, p2 are in a counterclockwise order\n ' return (Vec3.triangle_area(p0, p1, p2) > 0)
Returns true if the Vec2s p0, p1, p2 are in a counterclockwise order
src/math_utils.py
is_counterclockwise
gcardozo123/cg_playground
1
python
@classmethod def is_counterclockwise(cls, p0, p1, p2): '\n \n ' return (Vec3.triangle_area(p0, p1, p2) > 0)
@classmethod def is_counterclockwise(cls, p0, p1, p2): '\n \n ' return (Vec3.triangle_area(p0, p1, p2) > 0)<|docstring|>Returns true if the Vec2s p0, p1, p2 are in a counterclockwise order<|endoftext|>
baf920a1674cc2e3f190f180a5d4568fca804354230959e45c16d3ee5f1de82d
@classmethod def _build_dependencies(cls, analyzer_names): 'Build a dependency list of analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Returns:\n A list of sets of analyzer names. Each set represents\n one dependency group.\n\n Raises:\n KeyError: if class introduces circular dependencies.\n ' dependency_tree = [] dependencies = {} for analyzer_name in analyzer_names: analyzer_class = cls.get_analyzer(analyzer_name) dependencies[analyzer_name] = [x.lower() for x in analyzer_class.DEPENDENCIES] while dependencies: dependency_list = [] for value in iter(dependencies.values()): dependency_list.extend(value) dependency_set = (set(dependency_list) - set(dependencies.keys())) dependency_set.update((name for (name, dep) in iter(dependencies.items()) if (not dep))) if (not dependency_set): raise KeyError('Unable to build dependency tree, there is a circular dependency somewhere') dependency_tree.append(dependency_set) new_dependencies = {} for (analyzer_name, analyzer_dependencies) in dependencies.items(): if (not analyzer_dependencies): continue new_dependencies[analyzer_name] = list((set(analyzer_dependencies) - dependency_set)) dependencies = new_dependencies return dependency_tree
Build a dependency list of analyzers. Args: analyzer_names (list): List of analyzer names. Returns: A list of sets of analyzer names. Each set represents one dependency group. Raises: KeyError: if class introduces circular dependencies.
timesketch/lib/analyzers/manager.py
_build_dependencies
tomchop/timesketch
1,810
python
@classmethod def _build_dependencies(cls, analyzer_names): 'Build a dependency list of analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Returns:\n A list of sets of analyzer names. Each set represents\n one dependency group.\n\n Raises:\n KeyError: if class introduces circular dependencies.\n ' dependency_tree = [] dependencies = {} for analyzer_name in analyzer_names: analyzer_class = cls.get_analyzer(analyzer_name) dependencies[analyzer_name] = [x.lower() for x in analyzer_class.DEPENDENCIES] while dependencies: dependency_list = [] for value in iter(dependencies.values()): dependency_list.extend(value) dependency_set = (set(dependency_list) - set(dependencies.keys())) dependency_set.update((name for (name, dep) in iter(dependencies.items()) if (not dep))) if (not dependency_set): raise KeyError('Unable to build dependency tree, there is a circular dependency somewhere') dependency_tree.append(dependency_set) new_dependencies = {} for (analyzer_name, analyzer_dependencies) in dependencies.items(): if (not analyzer_dependencies): continue new_dependencies[analyzer_name] = list((set(analyzer_dependencies) - dependency_set)) dependencies = new_dependencies return dependency_tree
@classmethod def _build_dependencies(cls, analyzer_names): 'Build a dependency list of analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Returns:\n A list of sets of analyzer names. Each set represents\n one dependency group.\n\n Raises:\n KeyError: if class introduces circular dependencies.\n ' dependency_tree = [] dependencies = {} for analyzer_name in analyzer_names: analyzer_class = cls.get_analyzer(analyzer_name) dependencies[analyzer_name] = [x.lower() for x in analyzer_class.DEPENDENCIES] while dependencies: dependency_list = [] for value in iter(dependencies.values()): dependency_list.extend(value) dependency_set = (set(dependency_list) - set(dependencies.keys())) dependency_set.update((name for (name, dep) in iter(dependencies.items()) if (not dep))) if (not dependency_set): raise KeyError('Unable to build dependency tree, there is a circular dependency somewhere') dependency_tree.append(dependency_set) new_dependencies = {} for (analyzer_name, analyzer_dependencies) in dependencies.items(): if (not analyzer_dependencies): continue new_dependencies[analyzer_name] = list((set(analyzer_dependencies) - dependency_set)) dependencies = new_dependencies return dependency_tree<|docstring|>Build a dependency list of analyzers. Args: analyzer_names (list): List of analyzer names. Returns: A list of sets of analyzer names. Each set represents one dependency group. Raises: KeyError: if class introduces circular dependencies.<|endoftext|>
1c850c85424f349ef7955691fcd7de484986f3bdff5c293a382bad737d77ddcf
@classmethod def clear_registration(cls): 'Clears all analyzer registration.' cls._class_ordering = [] cls._class_registry = {}
Clears all analyzer registration.
timesketch/lib/analyzers/manager.py
clear_registration
tomchop/timesketch
1,810
python
@classmethod def clear_registration(cls): cls._class_ordering = [] cls._class_registry = {}
@classmethod def clear_registration(cls): cls._class_ordering = [] cls._class_registry = {}<|docstring|>Clears all analyzer registration.<|endoftext|>
39632ef98512edcb1dc1303486552a1862523ebda5c31944755bd072a3ea629f
@classmethod def get_analyzers(cls, analyzer_names=None): 'Retrieves the registered analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Yields:\n tuple: containing:\n str: the uniquely identifying name of the analyzer\n type: the analyzer class.\n ' if (not analyzer_names): analyzer_names = cls._class_registry.keys() completed_analyzers = set() for cluster in cls._build_dependencies(analyzer_names): for analyzer_name in cluster: if (analyzer_name in completed_analyzers): continue analyzer_class = cls.get_analyzer(analyzer_name) (yield (analyzer_name, analyzer_class)) completed_analyzers.add(analyzer_name)
Retrieves the registered analyzers. Args: analyzer_names (list): List of analyzer names. Yields: tuple: containing: str: the uniquely identifying name of the analyzer type: the analyzer class.
timesketch/lib/analyzers/manager.py
get_analyzers
tomchop/timesketch
1,810
python
@classmethod def get_analyzers(cls, analyzer_names=None): 'Retrieves the registered analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Yields:\n tuple: containing:\n str: the uniquely identifying name of the analyzer\n type: the analyzer class.\n ' if (not analyzer_names): analyzer_names = cls._class_registry.keys() completed_analyzers = set() for cluster in cls._build_dependencies(analyzer_names): for analyzer_name in cluster: if (analyzer_name in completed_analyzers): continue analyzer_class = cls.get_analyzer(analyzer_name) (yield (analyzer_name, analyzer_class)) completed_analyzers.add(analyzer_name)
@classmethod def get_analyzers(cls, analyzer_names=None): 'Retrieves the registered analyzers.\n\n Args:\n analyzer_names (list): List of analyzer names.\n\n Yields:\n tuple: containing:\n str: the uniquely identifying name of the analyzer\n type: the analyzer class.\n ' if (not analyzer_names): analyzer_names = cls._class_registry.keys() completed_analyzers = set() for cluster in cls._build_dependencies(analyzer_names): for analyzer_name in cluster: if (analyzer_name in completed_analyzers): continue analyzer_class = cls.get_analyzer(analyzer_name) (yield (analyzer_name, analyzer_class)) completed_analyzers.add(analyzer_name)<|docstring|>Retrieves the registered analyzers. Args: analyzer_names (list): List of analyzer names. Yields: tuple: containing: str: the uniquely identifying name of the analyzer type: the analyzer class.<|endoftext|>
862ae692c5106e3e05232a8939985c98df5dad946859bff200b2d7dcaa81fd33
@classmethod def get_analyzer(cls, analyzer_name): 'Retrieves a class object of a specific analyzer.\n\n Args:\n analyzer_name (str): name of the analyzer to retrieve.\n\n Returns:\n Analyzer class object.\n ' return cls._class_registry[analyzer_name.lower()]
Retrieves a class object of a specific analyzer. Args: analyzer_name (str): name of the analyzer to retrieve. Returns: Analyzer class object.
timesketch/lib/analyzers/manager.py
get_analyzer
tomchop/timesketch
1,810
python
@classmethod def get_analyzer(cls, analyzer_name): 'Retrieves a class object of a specific analyzer.\n\n Args:\n analyzer_name (str): name of the analyzer to retrieve.\n\n Returns:\n Analyzer class object.\n ' return cls._class_registry[analyzer_name.lower()]
@classmethod def get_analyzer(cls, analyzer_name): 'Retrieves a class object of a specific analyzer.\n\n Args:\n analyzer_name (str): name of the analyzer to retrieve.\n\n Returns:\n Analyzer class object.\n ' return cls._class_registry[analyzer_name.lower()]<|docstring|>Retrieves a class object of a specific analyzer. Args: analyzer_name (str): name of the analyzer to retrieve. Returns: Analyzer class object.<|endoftext|>
47eb791fe2b72c00d0331d55dbb71052be1c632e4242fe8c1068d665d6ff471f
@classmethod def register_analyzer(cls, analyzer_class): 'Registers an analyzer class.\n\n The analyzer classes are identified by their lower case name.\n\n Args:\n analyzer_class (type): the analyzer class to register.\n\n Raises:\n KeyError: if class is already set for the corresponding name.\n ' analyzer_name = analyzer_class.NAME.lower() if (analyzer_name in cls._class_registry): raise KeyError('Class already set for name: {0:s}.'.format(analyzer_class.NAME)) cls._class_registry[analyzer_name] = analyzer_class
Registers an analyzer class. The analyzer classes are identified by their lower case name. Args: analyzer_class (type): the analyzer class to register. Raises: KeyError: if class is already set for the corresponding name.
timesketch/lib/analyzers/manager.py
register_analyzer
tomchop/timesketch
1,810
python
@classmethod def register_analyzer(cls, analyzer_class): 'Registers an analyzer class.\n\n The analyzer classes are identified by their lower case name.\n\n Args:\n analyzer_class (type): the analyzer class to register.\n\n Raises:\n KeyError: if class is already set for the corresponding name.\n ' analyzer_name = analyzer_class.NAME.lower() if (analyzer_name in cls._class_registry): raise KeyError('Class already set for name: {0:s}.'.format(analyzer_class.NAME)) cls._class_registry[analyzer_name] = analyzer_class
@classmethod def register_analyzer(cls, analyzer_class): 'Registers an analyzer class.\n\n The analyzer classes are identified by their lower case name.\n\n Args:\n analyzer_class (type): the analyzer class to register.\n\n Raises:\n KeyError: if class is already set for the corresponding name.\n ' analyzer_name = analyzer_class.NAME.lower() if (analyzer_name in cls._class_registry): raise KeyError('Class already set for name: {0:s}.'.format(analyzer_class.NAME)) cls._class_registry[analyzer_name] = analyzer_class<|docstring|>Registers an analyzer class. The analyzer classes are identified by their lower case name. Args: analyzer_class (type): the analyzer class to register. Raises: KeyError: if class is already set for the corresponding name.<|endoftext|>
0870b5e1c3e3bf2d1338759dd87e6ead1efa383a771cad1ace87fef13c9cf866
def __init__(self, **kwargs): '\n Parameters\n -----------------------------------------------------------------------\n xL_0 Local plane x-value of the origin point\n xL Local plane x-value of the destination point\n yL_0 Local plane y-value of the origin point\n yL Local plane y-value of the destination point\n zL_0 Local plane z-value of the origin point\n zL Local plane z-value of the destination point\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians)\n zetaL_0 Local plane zenith angle at the origin point (in radians)\n sL Local plane distance between origin and desintation points\n -----------------------------------------------------------------------\n\n Calculated Values\n -----------------------------------------------------------------------\n xL Local plane x-value of the destination point (if not in\n parameters)\n yL Local plane y-value of the destination point (if not in\n parameters)\n zL Local plane z-value of the destination point (if not in\n parameters)\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians; if not in parameters)\n alphaL Local plane back azimuth to the origin point (in radians;\n if not in parameters)\n zetaL_0 Local plane zenith angle at the origin point (in radians;\n if not in parameters)\n sL Local plane distance between origin and desintation points\n (if not in parameters)\n -----------------------------------------------------------------------\n\n Usage\n -----------------------------------------------------------------------\n Instantiation of a local_polar object will perform calculations based\n on provided parameters.\n\n Use of the local_polar.data() method provides parameters and calulcated\n values.\n \n Manual calculation is also available through object attribute\n assignment and use of the local_polar.build() method.\n -----------------------------------------------------------------------\n ' self.params = kwargs self.__build()
Parameters ----------------------------------------------------------------------- xL_0 Local plane x-value of the origin point xL Local plane x-value of the destination point yL_0 Local plane y-value of the origin point yL Local plane y-value of the destination point zL_0 Local plane z-value of the origin point zL Local plane z-value of the destination point alphaL_0 Local plane forward azimuth from the origin point (in radians) zetaL_0 Local plane zenith angle at the origin point (in radians) sL Local plane distance between origin and desintation points ----------------------------------------------------------------------- Calculated Values ----------------------------------------------------------------------- xL Local plane x-value of the destination point (if not in parameters) yL Local plane y-value of the destination point (if not in parameters) zL Local plane z-value of the destination point (if not in parameters) alphaL_0 Local plane forward azimuth from the origin point (in radians; if not in parameters) alphaL Local plane back azimuth to the origin point (in radians; if not in parameters) zetaL_0 Local plane zenith angle at the origin point (in radians; if not in parameters) sL Local plane distance between origin and desintation points (if not in parameters) ----------------------------------------------------------------------- Usage ----------------------------------------------------------------------- Instantiation of a local_polar object will perform calculations based on provided parameters. Use of the local_polar.data() method provides parameters and calulcated values. Manual calculation is also available through object attribute assignment and use of the local_polar.build() method. -----------------------------------------------------------------------
local_polar.py
__init__
stannielson/c3m_3d
0
python
def __init__(self, **kwargs): '\n Parameters\n -----------------------------------------------------------------------\n xL_0 Local plane x-value of the origin point\n xL Local plane x-value of the destination point\n yL_0 Local plane y-value of the origin point\n yL Local plane y-value of the destination point\n zL_0 Local plane z-value of the origin point\n zL Local plane z-value of the destination point\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians)\n zetaL_0 Local plane zenith angle at the origin point (in radians)\n sL Local plane distance between origin and desintation points\n -----------------------------------------------------------------------\n\n Calculated Values\n -----------------------------------------------------------------------\n xL Local plane x-value of the destination point (if not in\n parameters)\n yL Local plane y-value of the destination point (if not in\n parameters)\n zL Local plane z-value of the destination point (if not in\n parameters)\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians; if not in parameters)\n alphaL Local plane back azimuth to the origin point (in radians;\n if not in parameters)\n zetaL_0 Local plane zenith angle at the origin point (in radians;\n if not in parameters)\n sL Local plane distance between origin and desintation points\n (if not in parameters)\n -----------------------------------------------------------------------\n\n Usage\n -----------------------------------------------------------------------\n Instantiation of a local_polar object will perform calculations based\n on provided parameters.\n\n Use of the local_polar.data() method provides parameters and calulcated\n values.\n \n Manual calculation is also available through object attribute\n assignment and use of the local_polar.build() method.\n -----------------------------------------------------------------------\n ' self.params = kwargs self.__build()
def __init__(self, **kwargs): '\n Parameters\n -----------------------------------------------------------------------\n xL_0 Local plane x-value of the origin point\n xL Local plane x-value of the destination point\n yL_0 Local plane y-value of the origin point\n yL Local plane y-value of the destination point\n zL_0 Local plane z-value of the origin point\n zL Local plane z-value of the destination point\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians)\n zetaL_0 Local plane zenith angle at the origin point (in radians)\n sL Local plane distance between origin and desintation points\n -----------------------------------------------------------------------\n\n Calculated Values\n -----------------------------------------------------------------------\n xL Local plane x-value of the destination point (if not in\n parameters)\n yL Local plane y-value of the destination point (if not in\n parameters)\n zL Local plane z-value of the destination point (if not in\n parameters)\n alphaL_0 Local plane forward azimuth from the origin point (in\n radians; if not in parameters)\n alphaL Local plane back azimuth to the origin point (in radians;\n if not in parameters)\n zetaL_0 Local plane zenith angle at the origin point (in radians;\n if not in parameters)\n sL Local plane distance between origin and desintation points\n (if not in parameters)\n -----------------------------------------------------------------------\n\n Usage\n -----------------------------------------------------------------------\n Instantiation of a local_polar object will perform calculations based\n on provided parameters.\n\n Use of the local_polar.data() method provides parameters and calulcated\n values.\n \n Manual calculation is also available through object attribute\n assignment and use of the local_polar.build() method.\n -----------------------------------------------------------------------\n ' self.params = kwargs self.__build()<|docstring|>Parameters ----------------------------------------------------------------------- xL_0 Local plane x-value of the origin point xL Local plane x-value of the destination point yL_0 Local plane y-value of the origin point yL Local plane y-value of the destination point zL_0 Local plane z-value of the origin point zL Local plane z-value of the destination point alphaL_0 Local plane forward azimuth from the origin point (in radians) zetaL_0 Local plane zenith angle at the origin point (in radians) sL Local plane distance between origin and desintation points ----------------------------------------------------------------------- Calculated Values ----------------------------------------------------------------------- xL Local plane x-value of the destination point (if not in parameters) yL Local plane y-value of the destination point (if not in parameters) zL Local plane z-value of the destination point (if not in parameters) alphaL_0 Local plane forward azimuth from the origin point (in radians; if not in parameters) alphaL Local plane back azimuth to the origin point (in radians; if not in parameters) zetaL_0 Local plane zenith angle at the origin point (in radians; if not in parameters) sL Local plane distance between origin and desintation points (if not in parameters) ----------------------------------------------------------------------- Usage ----------------------------------------------------------------------- Instantiation of a local_polar object will perform calculations based on provided parameters. Use of the local_polar.data() method provides parameters and calulcated values. Manual calculation is also available through object attribute assignment and use of the local_polar.build() method. -----------------------------------------------------------------------<|endoftext|>
271d9e1c948a103d33769e5a4d78f0865253ab531170d304b2338382087ad3ef
def get_init_state(self, user_id): 'Get init state' state = {'length': InitValues.LENGTH, 'width': InitValues.WIDTH, 'id': user_id} message = dict() for entities in self.get_entities(): for entity in entities: if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] if message: state.update(message) return json.dumps({'type': 'init', **state})
Get init state
game_board.py
get_init_state
Exocen/Bomberman
2
python
def get_init_state(self, user_id): state = {'length': InitValues.LENGTH, 'width': InitValues.WIDTH, 'id': user_id} message = dict() for entities in self.get_entities(): for entity in entities: if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] if message: state.update(message) return json.dumps({'type': 'init', **state})
def get_init_state(self, user_id): state = {'length': InitValues.LENGTH, 'width': InitValues.WIDTH, 'id': user_id} message = dict() for entities in self.get_entities(): for entity in entities: if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] if message: state.update(message) return json.dumps({'type': 'init', **state})<|docstring|>Get init state<|endoftext|>
23927ebcdf2e8f9b3f38883a3693500181a7b256b12ef5b052fa73bbbe923d8c
def create_message(self): 'Create message to notify users about map update\n\n Returns:\n dict : {{"type": "map"}{ updated : entities }} OR None\n ' message = dict() new_game_map = self.create_map() def _update_message(entity): if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] for position in new_game_map: if (self.game_map[position] != new_game_map[position]): if new_game_map[position]: for e in new_game_map[position]: _update_message(e) else: _update_message(Entity(position, self.mailbox)) self.game_map = new_game_map if message: message.update({'type': 'map'}) return json.dumps(message) return None
Create message to notify users about map update Returns: dict : {{"type": "map"}{ updated : entities }} OR None
game_board.py
create_message
Exocen/Bomberman
2
python
def create_message(self): 'Create message to notify users about map update\n\n Returns:\n dict : {{"type": "map"}{ updated : entities }} OR None\n ' message = dict() new_game_map = self.create_map() def _update_message(entity): if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] for position in new_game_map: if (self.game_map[position] != new_game_map[position]): if new_game_map[position]: for e in new_game_map[position]: _update_message(e) else: _update_message(Entity(position, self.mailbox)) self.game_map = new_game_map if message: message.update({'type': 'map'}) return json.dumps(message) return None
def create_message(self): 'Create message to notify users about map update\n\n Returns:\n dict : {{"type": "map"}{ updated : entities }} OR None\n ' message = dict() new_game_map = self.create_map() def _update_message(entity): if (entity.get_name() in message): message[entity.get_name()].append(entity.get_state()) else: message[entity.get_name()] = [entity.get_state()] for position in new_game_map: if (self.game_map[position] != new_game_map[position]): if new_game_map[position]: for e in new_game_map[position]: _update_message(e) else: _update_message(Entity(position, self.mailbox)) self.game_map = new_game_map if message: message.update({'type': 'map'}) return json.dumps(message) return None<|docstring|>Create message to notify users about map update Returns: dict : {{"type": "map"}{ updated : entities }} OR None<|endoftext|>
3e95d4ac900fa47f1b938770fc399c7b8733c7939607a63138cc86b14a148153
async def notify(self, message): '\n Notify all users about game updates\n ' try: (await asyncio.gather(*[user.ws.send(message) for user in self.users])) except websockets.exceptions.ConnectionClosed: logging.debug('Connection lost') except Exception: logging.exception('Connection lost for unexpected reasons :') raise
Notify all users about game updates
game_board.py
notify
Exocen/Bomberman
2
python
async def notify(self, message): '\n \n ' try: (await asyncio.gather(*[user.ws.send(message) for user in self.users])) except websockets.exceptions.ConnectionClosed: logging.debug('Connection lost') except Exception: logging.exception('Connection lost for unexpected reasons :') raise
async def notify(self, message): '\n \n ' try: (await asyncio.gather(*[user.ws.send(message) for user in self.users])) except websockets.exceptions.ConnectionClosed: logging.debug('Connection lost') except Exception: logging.exception('Connection lost for unexpected reasons :') raise<|docstring|>Notify all users about game updates<|endoftext|>
65d5533a6041a1555b9ea534993d08ce93201173bc660bfa02c62adaf8b25a19
def is_position_free(self, position): "Check if position doesn't contain a blockable entity" for entities in self.get_entities(): if (entities and next(iter(entities)).BLOCKABLE): for entity in entities: if (entity.get_position() == position): return False return True
Check if position doesn't contain a blockable entity
game_board.py
is_position_free
Exocen/Bomberman
2
python
def is_position_free(self, position): for entities in self.get_entities(): if (entities and next(iter(entities)).BLOCKABLE): for entity in entities: if (entity.get_position() == position): return False return True
def is_position_free(self, position): for entities in self.get_entities(): if (entities and next(iter(entities)).BLOCKABLE): for entity in entities: if (entity.get_position() == position): return False return True<|docstring|>Check if position doesn't contain a blockable entity<|endoftext|>
b3f799d80349a3b50650e7a9841a0271018cb143cf96ab4c7dd100940e4ccfcc
def random_spawn(self, nb=1): 'Return nb available positions\n\n Args:\n nb (int, optional): nb positions. Defaults to 1.\n\n Returns:\n [Position]: positions list\n ' x_choices = range(InitValues.LENGTH) y_choices = range(InitValues.WIDTH) pos_choices = set() for x in x_choices: for y in y_choices: pos_choices.add(Position(x, y)) blocked_positions = set() for entities in self.get_entities(): for entity in entities: blocked_positions.add(entity.get_position()) new_pos = (pos_choices - blocked_positions) return random.choices(list(new_pos), k=nb)
Return nb available positions Args: nb (int, optional): nb positions. Defaults to 1. Returns: [Position]: positions list
game_board.py
random_spawn
Exocen/Bomberman
2
python
def random_spawn(self, nb=1): 'Return nb available positions\n\n Args:\n nb (int, optional): nb positions. Defaults to 1.\n\n Returns:\n [Position]: positions list\n ' x_choices = range(InitValues.LENGTH) y_choices = range(InitValues.WIDTH) pos_choices = set() for x in x_choices: for y in y_choices: pos_choices.add(Position(x, y)) blocked_positions = set() for entities in self.get_entities(): for entity in entities: blocked_positions.add(entity.get_position()) new_pos = (pos_choices - blocked_positions) return random.choices(list(new_pos), k=nb)
def random_spawn(self, nb=1): 'Return nb available positions\n\n Args:\n nb (int, optional): nb positions. Defaults to 1.\n\n Returns:\n [Position]: positions list\n ' x_choices = range(InitValues.LENGTH) y_choices = range(InitValues.WIDTH) pos_choices = set() for x in x_choices: for y in y_choices: pos_choices.add(Position(x, y)) blocked_positions = set() for entities in self.get_entities(): for entity in entities: blocked_positions.add(entity.get_position()) new_pos = (pos_choices - blocked_positions) return random.choices(list(new_pos), k=nb)<|docstring|>Return nb available positions Args: nb (int, optional): nb positions. Defaults to 1. Returns: [Position]: positions list<|endoftext|>
4805944dd867e42c3e50dc16b9ed711b36059876894087aca6ec9ebf8d8371c1
async def game_loop(self): '\n Update and clean all entities and notify users\n ' while True: (await asyncio.sleep(InitValues.TICKS)) if self.users: self.mailbox.drop_key(EntitiesNames.BOARD) message_queue = self.mailbox.get(EntitiesNames.BOARD) if message_queue: tasks = [] if (Messages.BOOM in message_queue): boom_pos_list = message_queue.pop(Messages.BOOM) tasks += [self.boom(pos) for pos in boom_pos_list] if (Messages.MOVE in message_queue): move_mess = message_queue.pop(Messages.MOVE) tasks += [self.move_user(m[0], m[1]) for m in move_mess] if (Messages.BOMB in message_queue): bomb_mess = message_queue.pop(Messages.BOMB) tasks += [self.put_bomb(u) for u in bomb_mess] if tasks: (await asyncio.gather(*tasks)) self.mailbox.drop() entities_task_update = [] for entities in self.get_entities(): for entity in entities: entities_task_update.append(entity.update()) (await asyncio.gather(*entities_task_update)) message = self.create_message() if message: (await self.notify(message)) (await self.clean_entities()) (await self.send_logs()) else: (await asyncio.sleep(1))
Update and clean all entities and notify users
game_board.py
game_loop
Exocen/Bomberman
2
python
async def game_loop(self): '\n \n ' while True: (await asyncio.sleep(InitValues.TICKS)) if self.users: self.mailbox.drop_key(EntitiesNames.BOARD) message_queue = self.mailbox.get(EntitiesNames.BOARD) if message_queue: tasks = [] if (Messages.BOOM in message_queue): boom_pos_list = message_queue.pop(Messages.BOOM) tasks += [self.boom(pos) for pos in boom_pos_list] if (Messages.MOVE in message_queue): move_mess = message_queue.pop(Messages.MOVE) tasks += [self.move_user(m[0], m[1]) for m in move_mess] if (Messages.BOMB in message_queue): bomb_mess = message_queue.pop(Messages.BOMB) tasks += [self.put_bomb(u) for u in bomb_mess] if tasks: (await asyncio.gather(*tasks)) self.mailbox.drop() entities_task_update = [] for entities in self.get_entities(): for entity in entities: entities_task_update.append(entity.update()) (await asyncio.gather(*entities_task_update)) message = self.create_message() if message: (await self.notify(message)) (await self.clean_entities()) (await self.send_logs()) else: (await asyncio.sleep(1))
async def game_loop(self): '\n \n ' while True: (await asyncio.sleep(InitValues.TICKS)) if self.users: self.mailbox.drop_key(EntitiesNames.BOARD) message_queue = self.mailbox.get(EntitiesNames.BOARD) if message_queue: tasks = [] if (Messages.BOOM in message_queue): boom_pos_list = message_queue.pop(Messages.BOOM) tasks += [self.boom(pos) for pos in boom_pos_list] if (Messages.MOVE in message_queue): move_mess = message_queue.pop(Messages.MOVE) tasks += [self.move_user(m[0], m[1]) for m in move_mess] if (Messages.BOMB in message_queue): bomb_mess = message_queue.pop(Messages.BOMB) tasks += [self.put_bomb(u) for u in bomb_mess] if tasks: (await asyncio.gather(*tasks)) self.mailbox.drop() entities_task_update = [] for entities in self.get_entities(): for entity in entities: entities_task_update.append(entity.update()) (await asyncio.gather(*entities_task_update)) message = self.create_message() if message: (await self.notify(message)) (await self.clean_entities()) (await self.send_logs()) else: (await asyncio.sleep(1))<|docstring|>Update and clean all entities and notify users<|endoftext|>
8ae1316097767ebc9cf746808d54d06c86e452db3aba4c1998aab45ea0249114
@staticmethod async def clean_entity_list(entity_set, lock): 'Remove dead entities\n\n Args:\n entity_set (set): entity list to clean\n lock (async lock): entity lock\n ' async with lock: entity_set -= {e for e in entity_set if e.is_dead()}
Remove dead entities Args: entity_set (set): entity list to clean lock (async lock): entity lock
game_board.py
clean_entity_list
Exocen/Bomberman
2
python
@staticmethod async def clean_entity_list(entity_set, lock): 'Remove dead entities\n\n Args:\n entity_set (set): entity list to clean\n lock (async lock): entity lock\n ' async with lock: entity_set -= {e for e in entity_set if e.is_dead()}
@staticmethod async def clean_entity_list(entity_set, lock): 'Remove dead entities\n\n Args:\n entity_set (set): entity list to clean\n lock (async lock): entity lock\n ' async with lock: entity_set -= {e for e in entity_set if e.is_dead()}<|docstring|>Remove dead entities Args: entity_set (set): entity list to clean lock (async lock): entity lock<|endoftext|>
ab2bb79c243f7f82afe36b861190cc2258a504abe620d9a8005a65aa15e2fdb4
def _identity(self): 'Identity element.' raise NotImplemented('Not yet implemented')
Identity element.
sympy/microlie/se3.py
_identity
evbernardes/sympy
0
python
def _identity(self): raise NotImplemented('Not yet implemented')
def _identity(self): raise NotImplemented('Not yet implemented')<|docstring|>Identity element.<|endoftext|>
df0a41e109ec080056945bba32765224d01311cdef058f151b182fdcbed19458
def _inverse(self): ' .' return SE3(((- self.rot.T) * self.pos), self.rot.T)
.
sympy/microlie/se3.py
_inverse
evbernardes/sympy
0
python
def _inverse(self): ' ' return SE3(((- selfrotT) * selfpos), selfrotT)
def _inverse(self): ' ' return SE3(((- selfrotT) * selfpos), selfrotT)<|docstring|>.<|endoftext|>
6123ef61b4eb4aa00c01478878855d6355d330db6f59c16500819151d1aa975f
def _compose(self, Elem): 'With another Group element.' if (not isinstance(Elem, SE3)): raise NotImplemented R1 = self.rot R2 = Elem.rot p1 = self.pos p2 = Elem.pos return SE3((p1 + (R1 * p2)), (R1 * R2))
With another Group element.
sympy/microlie/se3.py
_compose
evbernardes/sympy
0
python
def _compose(self, Elem): if (not isinstance(Elem, SE3)): raise NotImplemented R1 = self.rot R2 = Elem.rot p1 = self.pos p2 = Elem.pos return SE3((p1 + (R1 * p2)), (R1 * R2))
def _compose(self, Elem): if (not isinstance(Elem, SE3)): raise NotImplemented R1 = self.rot R2 = Elem.rot p1 = self.pos p2 = Elem.pos return SE3((p1 + (R1 * p2)), (R1 * R2))<|docstring|>With another Group element.<|endoftext|>
d80cc29b9665022e4d71059d15b9aecab3532031b494a60db19e64677934e994
def _act(self, v): 'Act on a vector.' if (v.shape == (1, 3)): return (self.pos + (self.rot * v.T)) elif (v.shape == (3, 1)): return (self.pos + (self.rot * v)) else: raise ValueError('Vector must have length == 3')
Act on a vector.
sympy/microlie/se3.py
_act
evbernardes/sympy
0
python
def _act(self, v): if (v.shape == (1, 3)): return (self.pos + (self.rot * v.T)) elif (v.shape == (3, 1)): return (self.pos + (self.rot * v)) else: raise ValueError('Vector must have length == 3')
def _act(self, v): if (v.shape == (1, 3)): return (self.pos + (self.rot * v.T)) elif (v.shape == (3, 1)): return (self.pos + (self.rot * v)) else: raise ValueError('Vector must have length == 3')<|docstring|>Act on a vector.<|endoftext|>
1b6a690215b91d79e2fce8e0f90075b316bb9a04ee1a639a15ec5ac5b91cffac
def _log(self): 'Lift to tangent space.' ang = vee(ln(self.rot)) lin = (Vinv(ang) * self.pos) return TgSE3(lin, ang)
Lift to tangent space.
sympy/microlie/se3.py
_log
evbernardes/sympy
0
python
def _log(self): ang = vee(ln(self.rot)) lin = (Vinv(ang) * self.pos) return TgSE3(lin, ang)
def _log(self): ang = vee(ln(self.rot)) lin = (Vinv(ang) * self.pos) return TgSE3(lin, ang)<|docstring|>Lift to tangent space.<|endoftext|>
752aeb21b3dca81a9a55d3246f70a3bb1247b81bea8a6a3fd1f4d93bc7ee9eb2
def _hat(self): 'Lie Algebra isomorphism.' if self.is_symbolic: return BlockMatrix([[self.ang.hat(), self.lin], [ZeroMatrix(1, 3), ZeroMatrix(1, 1)]]) else: return Matrix([[self.ang.hat(), self.lin], [0, 0, 0, 0]])
Lie Algebra isomorphism.
sympy/microlie/se3.py
_hat
evbernardes/sympy
0
python
def _hat(self): if self.is_symbolic: return BlockMatrix([[self.ang.hat(), self.lin], [ZeroMatrix(1, 3), ZeroMatrix(1, 1)]]) else: return Matrix([[self.ang.hat(), self.lin], [0, 0, 0, 0]])
def _hat(self): if self.is_symbolic: return BlockMatrix([[self.ang.hat(), self.lin], [ZeroMatrix(1, 3), ZeroMatrix(1, 1)]]) else: return Matrix([[self.ang.hat(), self.lin], [0, 0, 0, 0]])<|docstring|>Lie Algebra isomorphism.<|endoftext|>
eac315601c6d43bf291f8a95c61df13ce0f426aaa4dbed8ae5859c2dd39c6d1b
def _exp(self): 'Retract to group element.' theta = self.ang pos = (V(theta) * self.lin) rot = exp(theta.hat()) return SE3(pos, rot)
Retract to group element.
sympy/microlie/se3.py
_exp
evbernardes/sympy
0
python
def _exp(self): theta = self.ang pos = (V(theta) * self.lin) rot = exp(theta.hat()) return SE3(pos, rot)
def _exp(self): theta = self.ang pos = (V(theta) * self.lin) rot = exp(theta.hat()) return SE3(pos, rot)<|docstring|>Retract to group element.<|endoftext|>
3d7f74b2e025059bf531dd37950c7f1d1db5db9ca70a05edf693284cc67cce9d
def test_init(): 'Test Initialization method.' arps = AdjustingRPSAgent() assert (arps.weight == 1) assert (arps.original_strategy == arps.strategy) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_int = AdjustingRPSAgent(weight=6) assert (arps_int.weight == 6) assert (arps_int.counts == [2, 2, 2]) arps_float = AdjustingRPSAgent(weight=2) assert (arps_float.weight == 2) assert (arps_float.counts == [(2 / 3), (2 / 3), (2 / 3)]) arps_nstd = AdjustingRPSAgent(strategy_in='rock') assert (arps_nstd.weight == 1) assert (arps_nstd.counts == [1, 0, 0]) arps_nstd_weight = AdjustingRPSAgent(strategy_in='rock', weight=5) assert (arps_nstd_weight.counts == [5, 0, 0])
Test Initialization method.
tests/agent_tests/adjusting_rps_agent_test.py
test_init
aturfah/cmplxsys530-final
4
python
def test_init(): arps = AdjustingRPSAgent() assert (arps.weight == 1) assert (arps.original_strategy == arps.strategy) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_int = AdjustingRPSAgent(weight=6) assert (arps_int.weight == 6) assert (arps_int.counts == [2, 2, 2]) arps_float = AdjustingRPSAgent(weight=2) assert (arps_float.weight == 2) assert (arps_float.counts == [(2 / 3), (2 / 3), (2 / 3)]) arps_nstd = AdjustingRPSAgent(strategy_in='rock') assert (arps_nstd.weight == 1) assert (arps_nstd.counts == [1, 0, 0]) arps_nstd_weight = AdjustingRPSAgent(strategy_in='rock', weight=5) assert (arps_nstd_weight.counts == [5, 0, 0])
def test_init(): arps = AdjustingRPSAgent() assert (arps.weight == 1) assert (arps.original_strategy == arps.strategy) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_int = AdjustingRPSAgent(weight=6) assert (arps_int.weight == 6) assert (arps_int.counts == [2, 2, 2]) arps_float = AdjustingRPSAgent(weight=2) assert (arps_float.weight == 2) assert (arps_float.counts == [(2 / 3), (2 / 3), (2 / 3)]) arps_nstd = AdjustingRPSAgent(strategy_in='rock') assert (arps_nstd.weight == 1) assert (arps_nstd.counts == [1, 0, 0]) arps_nstd_weight = AdjustingRPSAgent(strategy_in='rock', weight=5) assert (arps_nstd_weight.counts == [5, 0, 0])<|docstring|>Test Initialization method.<|endoftext|>
ada993843cb136dbda7681b6069f41e2df33a1b59b51c1bef51d52525482a6bb
def test_update_info(): 'Test that update_info works properly.' arps = AdjustingRPSAgent() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) arps.update_info(opp_move=0) assert (arps.counts == [(1 / 3), (4 / 3), (1 / 3)]) assert (arps.original_strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert ([round(prob, 4) for prob in arps.strategy] == [0.1667, 0.6667, 0.1667]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=1) assert (arps_weight.counts == [3, 3, 4]) assert (arps_weight.strategy == [0.3, 0.3, 0.4])
Test that update_info works properly.
tests/agent_tests/adjusting_rps_agent_test.py
test_update_info
aturfah/cmplxsys530-final
4
python
def test_update_info(): arps = AdjustingRPSAgent() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) arps.update_info(opp_move=0) assert (arps.counts == [(1 / 3), (4 / 3), (1 / 3)]) assert (arps.original_strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert ([round(prob, 4) for prob in arps.strategy] == [0.1667, 0.6667, 0.1667]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=1) assert (arps_weight.counts == [3, 3, 4]) assert (arps_weight.strategy == [0.3, 0.3, 0.4])
def test_update_info(): arps = AdjustingRPSAgent() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) arps.update_info(opp_move=0) assert (arps.counts == [(1 / 3), (4 / 3), (1 / 3)]) assert (arps.original_strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert ([round(prob, 4) for prob in arps.strategy] == [0.1667, 0.6667, 0.1667]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=1) assert (arps_weight.counts == [3, 3, 4]) assert (arps_weight.strategy == [0.3, 0.3, 0.4])<|docstring|>Test that update_info works properly.<|endoftext|>
4bcdbc8e7ca4ef6e2b28a5c59b543cc62f881266e1141c26955775405afd5c76
def test_reset_state(): 'Test that reset_state resets state.' arps = AdjustingRPSAgent() arps.update_info(opp_move=0) arps.reset_state() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=0) arps_weight.reset_state() assert (arps_weight.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps_weight.counts == [3, 3, 3])
Test that reset_state resets state.
tests/agent_tests/adjusting_rps_agent_test.py
test_reset_state
aturfah/cmplxsys530-final
4
python
def test_reset_state(): arps = AdjustingRPSAgent() arps.update_info(opp_move=0) arps.reset_state() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=0) arps_weight.reset_state() assert (arps_weight.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps_weight.counts == [3, 3, 3])
def test_reset_state(): arps = AdjustingRPSAgent() arps.update_info(opp_move=0) arps.reset_state() assert (arps.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps.counts == [(1 / 3), (1 / 3), (1 / 3)]) arps_weight = AdjustingRPSAgent(weight=9) arps_weight.update_info(opp_move=0) arps_weight.reset_state() assert (arps_weight.strategy == [(1 / 3), (1 / 3), (1 / 3)]) assert (arps_weight.counts == [3, 3, 3])<|docstring|>Test that reset_state resets state.<|endoftext|>
29610dce9921ba1ed72443e4bca880273cbb4ef5dcfb808fbe962015679d4557
@bp.route('/send', methods=['POST']) def send(): 'Send message to telegram bot.\n\n :returns: bot response\n ' data = request.get_json() try: message = data['message'] chat = data['chat'] except KeyError: abort(404) (response, status) = telegram_bot.send(chat, message) return (response, status)
Send message to telegram bot. :returns: bot response
app/telegram.py
send
plyq/plyq-binance-telegram-connector
0
python
@bp.route('/send', methods=['POST']) def send(): 'Send message to telegram bot.\n\n :returns: bot response\n ' data = request.get_json() try: message = data['message'] chat = data['chat'] except KeyError: abort(404) (response, status) = telegram_bot.send(chat, message) return (response, status)
@bp.route('/send', methods=['POST']) def send(): 'Send message to telegram bot.\n\n :returns: bot response\n ' data = request.get_json() try: message = data['message'] chat = data['chat'] except KeyError: abort(404) (response, status) = telegram_bot.send(chat, message) return (response, status)<|docstring|>Send message to telegram bot. :returns: bot response<|endoftext|>
890568673183e7c244ac7d6f959efe1e488a1ed426077b97670b1a8a6dcac96b
@bp.route('/check', methods=['GET']) def check(): 'Get telegram bot info.\n\n :returns: bot response\n ' (response, status) = telegram_bot.check() return (response, status)
Get telegram bot info. :returns: bot response
app/telegram.py
check
plyq/plyq-binance-telegram-connector
0
python
@bp.route('/check', methods=['GET']) def check(): 'Get telegram bot info.\n\n :returns: bot response\n ' (response, status) = telegram_bot.check() return (response, status)
@bp.route('/check', methods=['GET']) def check(): 'Get telegram bot info.\n\n :returns: bot response\n ' (response, status) = telegram_bot.check() return (response, status)<|docstring|>Get telegram bot info. :returns: bot response<|endoftext|>
144a2069cebab3f25106121423a9d275d4737eadff0c02a4f4c15a8a1d42b953
def is_process_running(pid): '\n Check if the provided pid is running\n ' try: os.kill(pid, 0) except OSError: return False else: return True
Check if the provided pid is running
src/catcher/utils.py
is_process_running
gavin-anders/callback-catcher
2
python
def is_process_running(pid): '\n \n ' try: os.kill(pid, 0) except OSError: return False else: return True
def is_process_running(pid): '\n \n ' try: os.kill(pid, 0) except OSError: return False else: return True<|docstring|>Check if the provided pid is running<|endoftext|>
ef947fe51e03e935db8ac4ec1e04f4332f24f031848f4c87d8f4685ad0da2c05
@API.public def factory(f: Optional[T]=None, *, singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), wiring: Optional[Wiring]=Wiring()) -> Union[(Callable[([T], T)], T)]: "\n .. deprecated:: 1.4\n Use :py:func:`.lazy` instead for external classes or :py:func:`.injectable` with\n the :code:`factory_method` argument for classes you own.\n\n .. admonition:: MIGRATION\n\n For classes you own you should use :py:func:`.injectable`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable, world\n >>> @injectable(factory_method='build')\n ... class MyDatabase:\n ... @classmethod\n ... def build(cls) -> 'MyDatabase':\n ... return MyDatabase()\n >>> world.get(MyDatabase)\n <MyDatabase object at ...>\n\n For the other cases you can use :py:func:`.lazy`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import lazy, inject\n >>> class External:\n ... pass\n >>> @lazy\n ... def my_external() -> External:\n ... return External()\n >>> @inject\n ... def build(ext: External = my_external()) -> External:\n ... return ext\n >>> build()\n <External object at ...>\n >>> # type hint only necessary for correct typing.\n ... build() is world.get[External](my_external())\n True\n\n In both cases you can inject the build function:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable\n >>> @injectable\n ... class MyFactory:\n ... def create_external(self) -> External:\n ... return External()\n >>> @lazy\n ... def current_external(factory: MyFactory = inject.me()) -> External:\n ... return factory.create_external()\n >>> @injectable(factory_method='build')\n ... class MyExternal:\n ... @classmethod\n ... def build(cls, external: External = current_external()) -> 'MyExternal':\n ... return MyExternal()\n >>> world.get(MyExternal)\n <MyExternal object at ...>\n\n Registers a factory which provides as single dependency, defined through the return\n type annotation.\n\n .. doctest:: factory\n\n >>> from antidote import factory\n >>> class Database:\n ... pass\n >>> @factory\n ... def load_db() -> Database:\n ... return Database()\n\n Now to retrieve the dependency:\n\n .. doctest:: factory\n\n >>> from antidote import inject, world\n >>> @inject\n ... def f(db: Database = inject.me(source=load_db)) -> Database:\n ... return db\n >>> f() is world.get(Database, source=load_db)\n True\n\n :py:func:`.inject` supports two other alternatives:\n\n .. doctest:: factory\n\n >>> from typing import Annotated\n >>> from antidote import From, Get\n >>> @inject\n ... def f(db: Annotated[Database, From(load_db)]) -> Database:\n ... return db\n >>> @inject({'db': Get(Database, source=load_db)})\n ... def f(db: Database) -> Database:\n ... return db\n\n It's also possible to have a stateful factory using a class. The class will be instantiated\n only once.\n\n .. doctest:: factory\n\n >>> @factory\n ... class DatabaseFactory:\n ... def __call__(self) -> Database:\n ... return Database()\n\n\n\n Args:\n f: Factory function or class which builds the dependency.\n singleton: Whether the returned dependency is a singleton or not. If so,\n the factory will be called at most once and the result re-used. Mutually\n exclusive with :code:`scope`. Defaults to :py:obj:`True`.\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned dependency\n will be cached. See :py:class:`~.core.container.Scope`. Defaults to\n :py:meth:`~.core.container.Scope.singleton`.\n wiring: :py:class:`.Wiring` to be used on the class. By defaults will apply\n a simple :py:func:`.inject` on all methods, so only annotated type hints are\n taken into account. Can be deactivated by specifying :py:obj:`None`. If the\n factory is a function, it'll only be injected if not :py:obj:`None`.\n\n Returns:\n The factory or the decorator.\n\n " scope = validated_scope(scope, singleton, default=Scope.singleton()) if ((wiring is not None) and (not isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not a {type(wiring)!r}') @inject def register_factory(func: T, factory_provider: FactoryProvider=inject.me()) -> T: from .service import service if (callable(func) and inspect.isfunction(func)): output: object = get_type_hints(func).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') if (wiring is not None): try: func = cast(T, inject(func, dependencies=wiring.dependencies)) except DoubleInjectionError: pass func = cast(T, FactoryWrapper(wrapped=cast(Callable[(..., object)], func), output=output)) factory_provider.register(factory=cast(Callable[(..., object)], func), scope=scope, output=output) elif isinstance(func, type): output = get_type_hints(func.__call__).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') service(func, singleton=True, wiring=wiring) factory_provider.register(output=output, scope=scope, factory_dependency=func) else: raise TypeError(f'Factory must be either a class or a function, not a {type(func)}') return func return ((f and register_factory(f)) or register_factory)
.. deprecated:: 1.4 Use :py:func:`.lazy` instead for external classes or :py:func:`.injectable` with the :code:`factory_method` argument for classes you own. .. admonition:: MIGRATION For classes you own you should use :py:func:`.injectable`: .. doctest:: factory_migration >>> from antidote import injectable, world >>> @injectable(factory_method='build') ... class MyDatabase: ... @classmethod ... def build(cls) -> 'MyDatabase': ... return MyDatabase() >>> world.get(MyDatabase) <MyDatabase object at ...> For the other cases you can use :py:func:`.lazy`: .. doctest:: factory_migration >>> from antidote import lazy, inject >>> class External: ... pass >>> @lazy ... def my_external() -> External: ... return External() >>> @inject ... def build(ext: External = my_external()) -> External: ... return ext >>> build() <External object at ...> >>> # type hint only necessary for correct typing. ... build() is world.get[External](my_external()) True In both cases you can inject the build function: .. doctest:: factory_migration >>> from antidote import injectable >>> @injectable ... class MyFactory: ... def create_external(self) -> External: ... return External() >>> @lazy ... def current_external(factory: MyFactory = inject.me()) -> External: ... return factory.create_external() >>> @injectable(factory_method='build') ... class MyExternal: ... @classmethod ... def build(cls, external: External = current_external()) -> 'MyExternal': ... return MyExternal() >>> world.get(MyExternal) <MyExternal object at ...> Registers a factory which provides as single dependency, defined through the return type annotation. .. doctest:: factory >>> from antidote import factory >>> class Database: ... pass >>> @factory ... def load_db() -> Database: ... return Database() Now to retrieve the dependency: .. doctest:: factory >>> from antidote import inject, world >>> @inject ... def f(db: Database = inject.me(source=load_db)) -> Database: ... return db >>> f() is world.get(Database, source=load_db) True :py:func:`.inject` supports two other alternatives: .. doctest:: factory >>> from typing import Annotated >>> from antidote import From, Get >>> @inject ... def f(db: Annotated[Database, From(load_db)]) -> Database: ... return db >>> @inject({'db': Get(Database, source=load_db)}) ... def f(db: Database) -> Database: ... return db It's also possible to have a stateful factory using a class. The class will be instantiated only once. .. doctest:: factory >>> @factory ... class DatabaseFactory: ... def __call__(self) -> Database: ... return Database() Args: f: Factory function or class which builds the dependency. singleton: Whether the returned dependency is a singleton or not. If so, the factory will be called at most once and the result re-used. Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True`. scope: Scope of the returned dependency. Mutually exclusive with :code:`singleton`. The scope defines if and how long the returned dependency will be cached. See :py:class:`~.core.container.Scope`. Defaults to :py:meth:`~.core.container.Scope.singleton`. wiring: :py:class:`.Wiring` to be used on the class. By defaults will apply a simple :py:func:`.inject` on all methods, so only annotated type hints are taken into account. Can be deactivated by specifying :py:obj:`None`. If the factory is a function, it'll only be injected if not :py:obj:`None`. Returns: The factory or the decorator.
src/antidote/factory.py
factory
Finistere/dependency_manager
0
python
@API.public def factory(f: Optional[T]=None, *, singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), wiring: Optional[Wiring]=Wiring()) -> Union[(Callable[([T], T)], T)]: "\n .. deprecated:: 1.4\n Use :py:func:`.lazy` instead for external classes or :py:func:`.injectable` with\n the :code:`factory_method` argument for classes you own.\n\n .. admonition:: MIGRATION\n\n For classes you own you should use :py:func:`.injectable`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable, world\n >>> @injectable(factory_method='build')\n ... class MyDatabase:\n ... @classmethod\n ... def build(cls) -> 'MyDatabase':\n ... return MyDatabase()\n >>> world.get(MyDatabase)\n <MyDatabase object at ...>\n\n For the other cases you can use :py:func:`.lazy`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import lazy, inject\n >>> class External:\n ... pass\n >>> @lazy\n ... def my_external() -> External:\n ... return External()\n >>> @inject\n ... def build(ext: External = my_external()) -> External:\n ... return ext\n >>> build()\n <External object at ...>\n >>> # type hint only necessary for correct typing.\n ... build() is world.get[External](my_external())\n True\n\n In both cases you can inject the build function:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable\n >>> @injectable\n ... class MyFactory:\n ... def create_external(self) -> External:\n ... return External()\n >>> @lazy\n ... def current_external(factory: MyFactory = inject.me()) -> External:\n ... return factory.create_external()\n >>> @injectable(factory_method='build')\n ... class MyExternal:\n ... @classmethod\n ... def build(cls, external: External = current_external()) -> 'MyExternal':\n ... return MyExternal()\n >>> world.get(MyExternal)\n <MyExternal object at ...>\n\n Registers a factory which provides as single dependency, defined through the return\n type annotation.\n\n .. doctest:: factory\n\n >>> from antidote import factory\n >>> class Database:\n ... pass\n >>> @factory\n ... def load_db() -> Database:\n ... return Database()\n\n Now to retrieve the dependency:\n\n .. doctest:: factory\n\n >>> from antidote import inject, world\n >>> @inject\n ... def f(db: Database = inject.me(source=load_db)) -> Database:\n ... return db\n >>> f() is world.get(Database, source=load_db)\n True\n\n :py:func:`.inject` supports two other alternatives:\n\n .. doctest:: factory\n\n >>> from typing import Annotated\n >>> from antidote import From, Get\n >>> @inject\n ... def f(db: Annotated[Database, From(load_db)]) -> Database:\n ... return db\n >>> @inject({'db': Get(Database, source=load_db)})\n ... def f(db: Database) -> Database:\n ... return db\n\n It's also possible to have a stateful factory using a class. The class will be instantiated\n only once.\n\n .. doctest:: factory\n\n >>> @factory\n ... class DatabaseFactory:\n ... def __call__(self) -> Database:\n ... return Database()\n\n\n\n Args:\n f: Factory function or class which builds the dependency.\n singleton: Whether the returned dependency is a singleton or not. If so,\n the factory will be called at most once and the result re-used. Mutually\n exclusive with :code:`scope`. Defaults to :py:obj:`True`.\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned dependency\n will be cached. See :py:class:`~.core.container.Scope`. Defaults to\n :py:meth:`~.core.container.Scope.singleton`.\n wiring: :py:class:`.Wiring` to be used on the class. By defaults will apply\n a simple :py:func:`.inject` on all methods, so only annotated type hints are\n taken into account. Can be deactivated by specifying :py:obj:`None`. If the\n factory is a function, it'll only be injected if not :py:obj:`None`.\n\n Returns:\n The factory or the decorator.\n\n " scope = validated_scope(scope, singleton, default=Scope.singleton()) if ((wiring is not None) and (not isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not a {type(wiring)!r}') @inject def register_factory(func: T, factory_provider: FactoryProvider=inject.me()) -> T: from .service import service if (callable(func) and inspect.isfunction(func)): output: object = get_type_hints(func).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') if (wiring is not None): try: func = cast(T, inject(func, dependencies=wiring.dependencies)) except DoubleInjectionError: pass func = cast(T, FactoryWrapper(wrapped=cast(Callable[(..., object)], func), output=output)) factory_provider.register(factory=cast(Callable[(..., object)], func), scope=scope, output=output) elif isinstance(func, type): output = get_type_hints(func.__call__).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') service(func, singleton=True, wiring=wiring) factory_provider.register(output=output, scope=scope, factory_dependency=func) else: raise TypeError(f'Factory must be either a class or a function, not a {type(func)}') return func return ((f and register_factory(f)) or register_factory)
@API.public def factory(f: Optional[T]=None, *, singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), wiring: Optional[Wiring]=Wiring()) -> Union[(Callable[([T], T)], T)]: "\n .. deprecated:: 1.4\n Use :py:func:`.lazy` instead for external classes or :py:func:`.injectable` with\n the :code:`factory_method` argument for classes you own.\n\n .. admonition:: MIGRATION\n\n For classes you own you should use :py:func:`.injectable`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable, world\n >>> @injectable(factory_method='build')\n ... class MyDatabase:\n ... @classmethod\n ... def build(cls) -> 'MyDatabase':\n ... return MyDatabase()\n >>> world.get(MyDatabase)\n <MyDatabase object at ...>\n\n For the other cases you can use :py:func:`.lazy`:\n\n .. doctest:: factory_migration\n\n >>> from antidote import lazy, inject\n >>> class External:\n ... pass\n >>> @lazy\n ... def my_external() -> External:\n ... return External()\n >>> @inject\n ... def build(ext: External = my_external()) -> External:\n ... return ext\n >>> build()\n <External object at ...>\n >>> # type hint only necessary for correct typing.\n ... build() is world.get[External](my_external())\n True\n\n In both cases you can inject the build function:\n\n .. doctest:: factory_migration\n\n >>> from antidote import injectable\n >>> @injectable\n ... class MyFactory:\n ... def create_external(self) -> External:\n ... return External()\n >>> @lazy\n ... def current_external(factory: MyFactory = inject.me()) -> External:\n ... return factory.create_external()\n >>> @injectable(factory_method='build')\n ... class MyExternal:\n ... @classmethod\n ... def build(cls, external: External = current_external()) -> 'MyExternal':\n ... return MyExternal()\n >>> world.get(MyExternal)\n <MyExternal object at ...>\n\n Registers a factory which provides as single dependency, defined through the return\n type annotation.\n\n .. doctest:: factory\n\n >>> from antidote import factory\n >>> class Database:\n ... pass\n >>> @factory\n ... def load_db() -> Database:\n ... return Database()\n\n Now to retrieve the dependency:\n\n .. doctest:: factory\n\n >>> from antidote import inject, world\n >>> @inject\n ... def f(db: Database = inject.me(source=load_db)) -> Database:\n ... return db\n >>> f() is world.get(Database, source=load_db)\n True\n\n :py:func:`.inject` supports two other alternatives:\n\n .. doctest:: factory\n\n >>> from typing import Annotated\n >>> from antidote import From, Get\n >>> @inject\n ... def f(db: Annotated[Database, From(load_db)]) -> Database:\n ... return db\n >>> @inject({'db': Get(Database, source=load_db)})\n ... def f(db: Database) -> Database:\n ... return db\n\n It's also possible to have a stateful factory using a class. The class will be instantiated\n only once.\n\n .. doctest:: factory\n\n >>> @factory\n ... class DatabaseFactory:\n ... def __call__(self) -> Database:\n ... return Database()\n\n\n\n Args:\n f: Factory function or class which builds the dependency.\n singleton: Whether the returned dependency is a singleton or not. If so,\n the factory will be called at most once and the result re-used. Mutually\n exclusive with :code:`scope`. Defaults to :py:obj:`True`.\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned dependency\n will be cached. See :py:class:`~.core.container.Scope`. Defaults to\n :py:meth:`~.core.container.Scope.singleton`.\n wiring: :py:class:`.Wiring` to be used on the class. By defaults will apply\n a simple :py:func:`.inject` on all methods, so only annotated type hints are\n taken into account. Can be deactivated by specifying :py:obj:`None`. If the\n factory is a function, it'll only be injected if not :py:obj:`None`.\n\n Returns:\n The factory or the decorator.\n\n " scope = validated_scope(scope, singleton, default=Scope.singleton()) if ((wiring is not None) and (not isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not a {type(wiring)!r}') @inject def register_factory(func: T, factory_provider: FactoryProvider=inject.me()) -> T: from .service import service if (callable(func) and inspect.isfunction(func)): output: object = get_type_hints(func).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') if (wiring is not None): try: func = cast(T, inject(func, dependencies=wiring.dependencies)) except DoubleInjectionError: pass func = cast(T, FactoryWrapper(wrapped=cast(Callable[(..., object)], func), output=output)) factory_provider.register(factory=cast(Callable[(..., object)], func), scope=scope, output=output) elif isinstance(func, type): output = get_type_hints(func.__call__).get('return') if (output is None): raise ValueError('A return type hint is necessary. It is used a the dependency.') if (not isinstance(output, type)): raise TypeError(f'The return type hint is expected to be a class, not {type(output)}.') service(func, singleton=True, wiring=wiring) factory_provider.register(output=output, scope=scope, factory_dependency=func) else: raise TypeError(f'Factory must be either a class or a function, not a {type(func)}') return func return ((f and register_factory(f)) or register_factory)<|docstring|>.. deprecated:: 1.4 Use :py:func:`.lazy` instead for external classes or :py:func:`.injectable` with the :code:`factory_method` argument for classes you own. .. admonition:: MIGRATION For classes you own you should use :py:func:`.injectable`: .. doctest:: factory_migration >>> from antidote import injectable, world >>> @injectable(factory_method='build') ... class MyDatabase: ... @classmethod ... def build(cls) -> 'MyDatabase': ... return MyDatabase() >>> world.get(MyDatabase) <MyDatabase object at ...> For the other cases you can use :py:func:`.lazy`: .. doctest:: factory_migration >>> from antidote import lazy, inject >>> class External: ... pass >>> @lazy ... def my_external() -> External: ... return External() >>> @inject ... def build(ext: External = my_external()) -> External: ... return ext >>> build() <External object at ...> >>> # type hint only necessary for correct typing. ... build() is world.get[External](my_external()) True In both cases you can inject the build function: .. doctest:: factory_migration >>> from antidote import injectable >>> @injectable ... class MyFactory: ... def create_external(self) -> External: ... return External() >>> @lazy ... def current_external(factory: MyFactory = inject.me()) -> External: ... return factory.create_external() >>> @injectable(factory_method='build') ... class MyExternal: ... @classmethod ... def build(cls, external: External = current_external()) -> 'MyExternal': ... return MyExternal() >>> world.get(MyExternal) <MyExternal object at ...> Registers a factory which provides as single dependency, defined through the return type annotation. .. doctest:: factory >>> from antidote import factory >>> class Database: ... pass >>> @factory ... def load_db() -> Database: ... return Database() Now to retrieve the dependency: .. doctest:: factory >>> from antidote import inject, world >>> @inject ... def f(db: Database = inject.me(source=load_db)) -> Database: ... return db >>> f() is world.get(Database, source=load_db) True :py:func:`.inject` supports two other alternatives: .. doctest:: factory >>> from typing import Annotated >>> from antidote import From, Get >>> @inject ... def f(db: Annotated[Database, From(load_db)]) -> Database: ... return db >>> @inject({'db': Get(Database, source=load_db)}) ... def f(db: Database) -> Database: ... return db It's also possible to have a stateful factory using a class. The class will be instantiated only once. .. doctest:: factory >>> @factory ... class DatabaseFactory: ... def __call__(self) -> Database: ... return Database() Args: f: Factory function or class which builds the dependency. singleton: Whether the returned dependency is a singleton or not. If so, the factory will be called at most once and the result re-used. Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True`. scope: Scope of the returned dependency. Mutually exclusive with :code:`singleton`. The scope defines if and how long the returned dependency will be cached. See :py:class:`~.core.container.Scope`. Defaults to :py:meth:`~.core.container.Scope.singleton`. wiring: :py:class:`.Wiring` to be used on the class. By defaults will apply a simple :py:func:`.inject` on all methods, so only annotated type hints are taken into account. Can be deactivated by specifying :py:obj:`None`. If the factory is a function, it'll only be injected if not :py:obj:`None`. Returns: The factory or the decorator.<|endoftext|>
ce7cdea14653a714bcd0c8a053a565da5c37586d96848101d23db8ad32dd650c
def __init__(self, *, wiring: Optional[Wiring]=Wiring(), singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), parameters: Optional[Iterable[str]]=None): '\n\n Args:\n wiring: Wiring to be applied on the factory. By default only\n :code:`__init__()` and :code:`__call__()` will be wired. To deactivate\n any wiring at all use :py:obj:`None`.\n singleton: Whether the returned dependency is a singleton or not. If yes,\n the factory will be called at most once and the result re-used.\n Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True`\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned\n dependency will be cached. See :py:class:`~.core.container.Scope`.\n Defaults to :py:meth:`~.core.container.Scope.singleton`.\n ' if (not ((wiring is None) or isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not {type(wiring)}') super().__init__(wiring=wiring, scope=validated_scope(scope, singleton, default=Scope.singleton()), parameters=validated_parameters(parameters))
Args: wiring: Wiring to be applied on the factory. By default only :code:`__init__()` and :code:`__call__()` will be wired. To deactivate any wiring at all use :py:obj:`None`. singleton: Whether the returned dependency is a singleton or not. If yes, the factory will be called at most once and the result re-used. Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True` scope: Scope of the returned dependency. Mutually exclusive with :code:`singleton`. The scope defines if and how long the returned dependency will be cached. See :py:class:`~.core.container.Scope`. Defaults to :py:meth:`~.core.container.Scope.singleton`.
src/antidote/factory.py
__init__
Finistere/dependency_manager
0
python
def __init__(self, *, wiring: Optional[Wiring]=Wiring(), singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), parameters: Optional[Iterable[str]]=None): '\n\n Args:\n wiring: Wiring to be applied on the factory. By default only\n :code:`__init__()` and :code:`__call__()` will be wired. To deactivate\n any wiring at all use :py:obj:`None`.\n singleton: Whether the returned dependency is a singleton or not. If yes,\n the factory will be called at most once and the result re-used.\n Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True`\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned\n dependency will be cached. See :py:class:`~.core.container.Scope`.\n Defaults to :py:meth:`~.core.container.Scope.singleton`.\n ' if (not ((wiring is None) or isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not {type(wiring)}') super().__init__(wiring=wiring, scope=validated_scope(scope, singleton, default=Scope.singleton()), parameters=validated_parameters(parameters))
def __init__(self, *, wiring: Optional[Wiring]=Wiring(), singleton: Optional[bool]=None, scope: Optional[Scope]=Scope.sentinel(), parameters: Optional[Iterable[str]]=None): '\n\n Args:\n wiring: Wiring to be applied on the factory. By default only\n :code:`__init__()` and :code:`__call__()` will be wired. To deactivate\n any wiring at all use :py:obj:`None`.\n singleton: Whether the returned dependency is a singleton or not. If yes,\n the factory will be called at most once and the result re-used.\n Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True`\n scope: Scope of the returned dependency. Mutually exclusive with\n :code:`singleton`. The scope defines if and how long the returned\n dependency will be cached. See :py:class:`~.core.container.Scope`.\n Defaults to :py:meth:`~.core.container.Scope.singleton`.\n ' if (not ((wiring is None) or isinstance(wiring, Wiring))): raise TypeError(f'wiring must be a Wiring or None, not {type(wiring)}') super().__init__(wiring=wiring, scope=validated_scope(scope, singleton, default=Scope.singleton()), parameters=validated_parameters(parameters))<|docstring|>Args: wiring: Wiring to be applied on the factory. By default only :code:`__init__()` and :code:`__call__()` will be wired. To deactivate any wiring at all use :py:obj:`None`. singleton: Whether the returned dependency is a singleton or not. If yes, the factory will be called at most once and the result re-used. Mutually exclusive with :code:`scope`. Defaults to :py:obj:`True` scope: Scope of the returned dependency. Mutually exclusive with :code:`singleton`. The scope defines if and how long the returned dependency will be cached. See :py:class:`~.core.container.Scope`. Defaults to :py:meth:`~.core.container.Scope.singleton`.<|endoftext|>
b0eeb35272bacb8b942c66c801656f1b2042c172973ba2eecfabb2af52a31408
def copy(self, *, wiring: Union[(Optional[Wiring], Copy)]=Copy.IDENTICAL, singleton: Union[(bool, Copy)]=Copy.IDENTICAL, scope: Union[(Optional[Scope], Copy)]=Copy.IDENTICAL, parameters: Union[(Optional[Iterable[str]], Copy)]=Copy.IDENTICAL) -> Factory.Conf: '\n .. deprecated:: 1.1\n\n Copies current configuration and overrides only specified arguments.\n Accepts the same arguments as :code:`__init__`\n ' if (not ((singleton is Copy.IDENTICAL) or (scope is Copy.IDENTICAL))): raise TypeError('Use either singleton or scope argument, not both.') if isinstance(singleton, bool): scope = (Scope.singleton() if singleton else None) return Copy.immutable(self, wiring=wiring, scope=scope, parameters=parameters)
.. deprecated:: 1.1 Copies current configuration and overrides only specified arguments. Accepts the same arguments as :code:`__init__`
src/antidote/factory.py
copy
Finistere/dependency_manager
0
python
def copy(self, *, wiring: Union[(Optional[Wiring], Copy)]=Copy.IDENTICAL, singleton: Union[(bool, Copy)]=Copy.IDENTICAL, scope: Union[(Optional[Scope], Copy)]=Copy.IDENTICAL, parameters: Union[(Optional[Iterable[str]], Copy)]=Copy.IDENTICAL) -> Factory.Conf: '\n .. deprecated:: 1.1\n\n Copies current configuration and overrides only specified arguments.\n Accepts the same arguments as :code:`__init__`\n ' if (not ((singleton is Copy.IDENTICAL) or (scope is Copy.IDENTICAL))): raise TypeError('Use either singleton or scope argument, not both.') if isinstance(singleton, bool): scope = (Scope.singleton() if singleton else None) return Copy.immutable(self, wiring=wiring, scope=scope, parameters=parameters)
def copy(self, *, wiring: Union[(Optional[Wiring], Copy)]=Copy.IDENTICAL, singleton: Union[(bool, Copy)]=Copy.IDENTICAL, scope: Union[(Optional[Scope], Copy)]=Copy.IDENTICAL, parameters: Union[(Optional[Iterable[str]], Copy)]=Copy.IDENTICAL) -> Factory.Conf: '\n .. deprecated:: 1.1\n\n Copies current configuration and overrides only specified arguments.\n Accepts the same arguments as :code:`__init__`\n ' if (not ((singleton is Copy.IDENTICAL) or (scope is Copy.IDENTICAL))): raise TypeError('Use either singleton or scope argument, not both.') if isinstance(singleton, bool): scope = (Scope.singleton() if singleton else None) return Copy.immutable(self, wiring=wiring, scope=scope, parameters=parameters)<|docstring|>.. deprecated:: 1.1 Copies current configuration and overrides only specified arguments. Accepts the same arguments as :code:`__init__`<|endoftext|>
10c530f7d5e98d6810c6e958534ee5ba67c189e384232271e484e1898d6dc740
def lbeta(x, name='lbeta'): 'Computes `ln(|Beta(x)|)`, reducing along the last dimension.\n\n Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define\n\n ```Beta(z) = \\prod_j Gamma(z_j) / Gamma(\\sum_j z_j)```\n\n And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define\n `lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)`. In other words,\n the last dimension is treated as the `z` vector.\n\n Note that if `z = [u, v]`, then\n `Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt`, which defines the traditional\n bivariate beta function.\n\n Args:\n x: A rank `n + 1` `Tensor` with type `float`, or `double`.\n name: A name for the operation (optional).\n\n Returns:\n The logarithm of `|Beta(x)|` reducing along the last dimension.\n\n Raises:\n ValueError: If `x` is empty with rank one or less.\n ' with ops.name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name='x') x = control_flow_ops.with_dependencies([check_ops.assert_rank_at_least(x, 1)], x) is_empty = math_ops.equal(0, array_ops.size(x)) def nonempty_lbeta(): log_prod_gamma_x = math_ops.reduce_sum(math_ops.lgamma(x), reduction_indices=[(- 1)]) sum_x = math_ops.reduce_sum(x, reduction_indices=[(- 1)]) log_gamma_sum_x = math_ops.lgamma(sum_x) result = (log_prod_gamma_x - log_gamma_sum_x) return result def empty_lbeta(): assertion = check_ops.assert_rank_at_least(x, 2) with ops.control_dependencies([assertion]): return array_ops.squeeze(x, squeeze_dims=[0]) static_size = x.get_shape().num_elements() if (static_size is not None): if (static_size > 0): return nonempty_lbeta() else: return empty_lbeta() else: return control_flow_ops.cond(is_empty, empty_lbeta, nonempty_lbeta)
Computes `ln(|Beta(x)|)`, reducing along the last dimension. Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define ```Beta(z) = \prod_j Gamma(z_j) / Gamma(\sum_j z_j)``` And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define `lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)`. In other words, the last dimension is treated as the `z` vector. Note that if `z = [u, v]`, then `Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt`, which defines the traditional bivariate beta function. Args: x: A rank `n + 1` `Tensor` with type `float`, or `double`. name: A name for the operation (optional). Returns: The logarithm of `|Beta(x)|` reducing along the last dimension. Raises: ValueError: If `x` is empty with rank one or less.
tensorflow/python/ops/special_math_ops.py
lbeta
ssameerr/tensorflow
2
python
def lbeta(x, name='lbeta'): 'Computes `ln(|Beta(x)|)`, reducing along the last dimension.\n\n Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define\n\n ```Beta(z) = \\prod_j Gamma(z_j) / Gamma(\\sum_j z_j)```\n\n And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define\n `lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)`. In other words,\n the last dimension is treated as the `z` vector.\n\n Note that if `z = [u, v]`, then\n `Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt`, which defines the traditional\n bivariate beta function.\n\n Args:\n x: A rank `n + 1` `Tensor` with type `float`, or `double`.\n name: A name for the operation (optional).\n\n Returns:\n The logarithm of `|Beta(x)|` reducing along the last dimension.\n\n Raises:\n ValueError: If `x` is empty with rank one or less.\n ' with ops.name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name='x') x = control_flow_ops.with_dependencies([check_ops.assert_rank_at_least(x, 1)], x) is_empty = math_ops.equal(0, array_ops.size(x)) def nonempty_lbeta(): log_prod_gamma_x = math_ops.reduce_sum(math_ops.lgamma(x), reduction_indices=[(- 1)]) sum_x = math_ops.reduce_sum(x, reduction_indices=[(- 1)]) log_gamma_sum_x = math_ops.lgamma(sum_x) result = (log_prod_gamma_x - log_gamma_sum_x) return result def empty_lbeta(): assertion = check_ops.assert_rank_at_least(x, 2) with ops.control_dependencies([assertion]): return array_ops.squeeze(x, squeeze_dims=[0]) static_size = x.get_shape().num_elements() if (static_size is not None): if (static_size > 0): return nonempty_lbeta() else: return empty_lbeta() else: return control_flow_ops.cond(is_empty, empty_lbeta, nonempty_lbeta)
def lbeta(x, name='lbeta'): 'Computes `ln(|Beta(x)|)`, reducing along the last dimension.\n\n Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define\n\n ```Beta(z) = \\prod_j Gamma(z_j) / Gamma(\\sum_j z_j)```\n\n And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define\n `lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)`. In other words,\n the last dimension is treated as the `z` vector.\n\n Note that if `z = [u, v]`, then\n `Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt`, which defines the traditional\n bivariate beta function.\n\n Args:\n x: A rank `n + 1` `Tensor` with type `float`, or `double`.\n name: A name for the operation (optional).\n\n Returns:\n The logarithm of `|Beta(x)|` reducing along the last dimension.\n\n Raises:\n ValueError: If `x` is empty with rank one or less.\n ' with ops.name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name='x') x = control_flow_ops.with_dependencies([check_ops.assert_rank_at_least(x, 1)], x) is_empty = math_ops.equal(0, array_ops.size(x)) def nonempty_lbeta(): log_prod_gamma_x = math_ops.reduce_sum(math_ops.lgamma(x), reduction_indices=[(- 1)]) sum_x = math_ops.reduce_sum(x, reduction_indices=[(- 1)]) log_gamma_sum_x = math_ops.lgamma(sum_x) result = (log_prod_gamma_x - log_gamma_sum_x) return result def empty_lbeta(): assertion = check_ops.assert_rank_at_least(x, 2) with ops.control_dependencies([assertion]): return array_ops.squeeze(x, squeeze_dims=[0]) static_size = x.get_shape().num_elements() if (static_size is not None): if (static_size > 0): return nonempty_lbeta() else: return empty_lbeta() else: return control_flow_ops.cond(is_empty, empty_lbeta, nonempty_lbeta)<|docstring|>Computes `ln(|Beta(x)|)`, reducing along the last dimension. Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define ```Beta(z) = \prod_j Gamma(z_j) / Gamma(\sum_j z_j)``` And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define `lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)`. In other words, the last dimension is treated as the `z` vector. Note that if `z = [u, v]`, then `Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt`, which defines the traditional bivariate beta function. Args: x: A rank `n + 1` `Tensor` with type `float`, or `double`. name: A name for the operation (optional). Returns: The logarithm of `|Beta(x)|` reducing along the last dimension. Raises: ValueError: If `x` is empty with rank one or less.<|endoftext|>
18169c53cfba38bb838dbc7f3135516ec1898acde056685f05f47f228dabb2d9
def einsum(axes, *inputs): '\n A generalized contraction between tensors of arbitrary dimension.\n\n Like numpy.einsum.\n ' match = re.match('([a-z,]+)->([a-z]+)', axes) assert match, ('Indices have incorrect format: %s' % axes) inputs = list(inputs) idx_in = match.group(1).split(',') idx_out = match.group(2) idx_all = set(''.join(idx_in)) assert (len(idx_in) == len(inputs)), ('Expected %d inputs but only got %d' % (len(idx_in), len(inputs))) for (i, (input_, axes_)) in enumerate(zip(inputs, idx_in)): assert (input_.get_shape().ndims == len(axes_)), ('Input %d with axes %s has incorrect number of dimensions (expected %d, got %d)' % (i, axes_, len(axes_), input_.get_shape().ndims)) sorted_idx = sorted(axes_) if (list(axes_) != sorted_idx): permuted = [axes_.find(ax) for ax in sorted_idx] inputs[i] = array_ops.transpose(input_, permuted) idx_in[i] = sorted_idx missing_idx = set(idx_out).difference(idx_all) assert (not missing_idx), ('Unknown ouput axes: %s' % missing_idx) reduction_idx = [] shapes = [[(dim if dim else (- 1)) for dim in tensor.get_shape().as_list()] for tensor in inputs] for (j, ax) in enumerate(sorted(idx_all)): dims = [] for (i, idx) in enumerate(idx_in): if (ax not in idx): shapes[i].insert(j, 1) else: dim = shapes[i][j] if (isinstance(dim, int) and (dim > 1)): dims.append(dim) assert (len(set(dims)) <= 1), ('Dimension mismatch on axis: %s' % ax) if (ax not in idx_out): reduction_idx.append(j) expanded_inputs = [array_ops.reshape(input_, shape) for (input_, shape) in zip(inputs, shapes)] expanded_output = 1 for input_ in expanded_inputs: expanded_output *= input_ return math_ops.reduce_sum(expanded_output, reduction_idx)
A generalized contraction between tensors of arbitrary dimension. Like numpy.einsum.
tensorflow/python/ops/special_math_ops.py
einsum
ssameerr/tensorflow
2
python
def einsum(axes, *inputs): '\n A generalized contraction between tensors of arbitrary dimension.\n\n Like numpy.einsum.\n ' match = re.match('([a-z,]+)->([a-z]+)', axes) assert match, ('Indices have incorrect format: %s' % axes) inputs = list(inputs) idx_in = match.group(1).split(',') idx_out = match.group(2) idx_all = set(.join(idx_in)) assert (len(idx_in) == len(inputs)), ('Expected %d inputs but only got %d' % (len(idx_in), len(inputs))) for (i, (input_, axes_)) in enumerate(zip(inputs, idx_in)): assert (input_.get_shape().ndims == len(axes_)), ('Input %d with axes %s has incorrect number of dimensions (expected %d, got %d)' % (i, axes_, len(axes_), input_.get_shape().ndims)) sorted_idx = sorted(axes_) if (list(axes_) != sorted_idx): permuted = [axes_.find(ax) for ax in sorted_idx] inputs[i] = array_ops.transpose(input_, permuted) idx_in[i] = sorted_idx missing_idx = set(idx_out).difference(idx_all) assert (not missing_idx), ('Unknown ouput axes: %s' % missing_idx) reduction_idx = [] shapes = [[(dim if dim else (- 1)) for dim in tensor.get_shape().as_list()] for tensor in inputs] for (j, ax) in enumerate(sorted(idx_all)): dims = [] for (i, idx) in enumerate(idx_in): if (ax not in idx): shapes[i].insert(j, 1) else: dim = shapes[i][j] if (isinstance(dim, int) and (dim > 1)): dims.append(dim) assert (len(set(dims)) <= 1), ('Dimension mismatch on axis: %s' % ax) if (ax not in idx_out): reduction_idx.append(j) expanded_inputs = [array_ops.reshape(input_, shape) for (input_, shape) in zip(inputs, shapes)] expanded_output = 1 for input_ in expanded_inputs: expanded_output *= input_ return math_ops.reduce_sum(expanded_output, reduction_idx)
def einsum(axes, *inputs): '\n A generalized contraction between tensors of arbitrary dimension.\n\n Like numpy.einsum.\n ' match = re.match('([a-z,]+)->([a-z]+)', axes) assert match, ('Indices have incorrect format: %s' % axes) inputs = list(inputs) idx_in = match.group(1).split(',') idx_out = match.group(2) idx_all = set(.join(idx_in)) assert (len(idx_in) == len(inputs)), ('Expected %d inputs but only got %d' % (len(idx_in), len(inputs))) for (i, (input_, axes_)) in enumerate(zip(inputs, idx_in)): assert (input_.get_shape().ndims == len(axes_)), ('Input %d with axes %s has incorrect number of dimensions (expected %d, got %d)' % (i, axes_, len(axes_), input_.get_shape().ndims)) sorted_idx = sorted(axes_) if (list(axes_) != sorted_idx): permuted = [axes_.find(ax) for ax in sorted_idx] inputs[i] = array_ops.transpose(input_, permuted) idx_in[i] = sorted_idx missing_idx = set(idx_out).difference(idx_all) assert (not missing_idx), ('Unknown ouput axes: %s' % missing_idx) reduction_idx = [] shapes = [[(dim if dim else (- 1)) for dim in tensor.get_shape().as_list()] for tensor in inputs] for (j, ax) in enumerate(sorted(idx_all)): dims = [] for (i, idx) in enumerate(idx_in): if (ax not in idx): shapes[i].insert(j, 1) else: dim = shapes[i][j] if (isinstance(dim, int) and (dim > 1)): dims.append(dim) assert (len(set(dims)) <= 1), ('Dimension mismatch on axis: %s' % ax) if (ax not in idx_out): reduction_idx.append(j) expanded_inputs = [array_ops.reshape(input_, shape) for (input_, shape) in zip(inputs, shapes)] expanded_output = 1 for input_ in expanded_inputs: expanded_output *= input_ return math_ops.reduce_sum(expanded_output, reduction_idx)<|docstring|>A generalized contraction between tensors of arbitrary dimension. Like numpy.einsum.<|endoftext|>
d89a8c858d196172029e6fdb089c8760aeddd2988e674ea7eba0080d1246231e
def timestamp2date(timestamp): '\n 时间戳转换为日期\n Parameters\n ----------\n timestamp: int or str\n 用户账号\n\n Returns\n -------\n datetime:\n 转换好的日期:年-月-日 时:分:秒\n ' time_array = time.localtime(int(timestamp)) datetime = time.strftime('%Y-%m-%d %H:%M:%S', time_array) return datetime
时间戳转换为日期 Parameters ---------- timestamp: int or str 用户账号 Returns ------- datetime: 转换好的日期:年-月-日 时:分:秒
wechatarticles/tools.py
timestamp2date
pengziliu/wechat_articles_spider
1
python
def timestamp2date(timestamp): '\n 时间戳转换为日期\n Parameters\n ----------\n timestamp: int or str\n 用户账号\n\n Returns\n -------\n datetime:\n 转换好的日期:年-月-日 时:分:秒\n ' time_array = time.localtime(int(timestamp)) datetime = time.strftime('%Y-%m-%d %H:%M:%S', time_array) return datetime
def timestamp2date(timestamp): '\n 时间戳转换为日期\n Parameters\n ----------\n timestamp: int or str\n 用户账号\n\n Returns\n -------\n datetime:\n 转换好的日期:年-月-日 时:分:秒\n ' time_array = time.localtime(int(timestamp)) datetime = time.strftime('%Y-%m-%d %H:%M:%S', time_array) return datetime<|docstring|>时间戳转换为日期 Parameters ---------- timestamp: int or str 用户账号 Returns ------- datetime: 转换好的日期:年-月-日 时:分:秒<|endoftext|>
ba02547884dce9cee98e1282a2c025f5d3566564e34ed8a8f0d03e9c0baab020
def save_mongo(data, host=None, port=None, name=None, password='', dbname=None, collname=None): '\n 存储数据到mongo\n Parameters\n ----------\n data: list\n 需要插入的数据\n host: str\n 主机名(默认为本机数据库)\n port: int\n mongo所在主机开放的端口,默认为27017\n username: str\n 用户名\n password: str\n 用户密码\n dbname: str\n 远程连接的数据库名\n collname: str\n 需要插入的集合名(collection)\n Returns\n -------\n None\n ' HOST = 'localhost' PORT = 27017 host = (HOST if (host is None) else host) port = (PORT if (port is None) else port) assert isinstance(host, str) assert isinstance(name, str) assert isinstance(password, str) assert isinstance(dbname, str) assert isinstance(collname, str) if (not isinstance(port, int)): raise TypeError('port must be an instance of int') from pymongo import MongoClient client = MongoClient(host, port) db_auth = client.admin db_auth.authenticate(name, password) coll = client[dbname][collname] coll.insert_many(data)
存储数据到mongo Parameters ---------- data: list 需要插入的数据 host: str 主机名(默认为本机数据库) port: int mongo所在主机开放的端口,默认为27017 username: str 用户名 password: str 用户密码 dbname: str 远程连接的数据库名 collname: str 需要插入的集合名(collection) Returns ------- None
wechatarticles/tools.py
save_mongo
pengziliu/wechat_articles_spider
1
python
def save_mongo(data, host=None, port=None, name=None, password=, dbname=None, collname=None): '\n 存储数据到mongo\n Parameters\n ----------\n data: list\n 需要插入的数据\n host: str\n 主机名(默认为本机数据库)\n port: int\n mongo所在主机开放的端口,默认为27017\n username: str\n 用户名\n password: str\n 用户密码\n dbname: str\n 远程连接的数据库名\n collname: str\n 需要插入的集合名(collection)\n Returns\n -------\n None\n ' HOST = 'localhost' PORT = 27017 host = (HOST if (host is None) else host) port = (PORT if (port is None) else port) assert isinstance(host, str) assert isinstance(name, str) assert isinstance(password, str) assert isinstance(dbname, str) assert isinstance(collname, str) if (not isinstance(port, int)): raise TypeError('port must be an instance of int') from pymongo import MongoClient client = MongoClient(host, port) db_auth = client.admin db_auth.authenticate(name, password) coll = client[dbname][collname] coll.insert_many(data)
def save_mongo(data, host=None, port=None, name=None, password=, dbname=None, collname=None): '\n 存储数据到mongo\n Parameters\n ----------\n data: list\n 需要插入的数据\n host: str\n 主机名(默认为本机数据库)\n port: int\n mongo所在主机开放的端口,默认为27017\n username: str\n 用户名\n password: str\n 用户密码\n dbname: str\n 远程连接的数据库名\n collname: str\n 需要插入的集合名(collection)\n Returns\n -------\n None\n ' HOST = 'localhost' PORT = 27017 host = (HOST if (host is None) else host) port = (PORT if (port is None) else port) assert isinstance(host, str) assert isinstance(name, str) assert isinstance(password, str) assert isinstance(dbname, str) assert isinstance(collname, str) if (not isinstance(port, int)): raise TypeError('port must be an instance of int') from pymongo import MongoClient client = MongoClient(host, port) db_auth = client.admin db_auth.authenticate(name, password) coll = client[dbname][collname] coll.insert_many(data)<|docstring|>存储数据到mongo Parameters ---------- data: list 需要插入的数据 host: str 主机名(默认为本机数据库) port: int mongo所在主机开放的端口,默认为27017 username: str 用户名 password: str 用户密码 dbname: str 远程连接的数据库名 collname: str 需要插入的集合名(collection) Returns ------- None<|endoftext|>
9fa0dc134cc1b10afc8de93e63f9823ea98404dd1a813db592f5342ca75ed322
def save_json(fname, data): '\n 保存数据为txt格式\n Parameters\n ----------\n fname: str\n 保存为txt文件名\n data: list\n 爬取到的数据\n Returns\n -------\n None\n ' assert isinstance(fname, str) if ('.json' not in fname): raise IOError('fname must be json', fname) with open(fname, 'a+') as f: for item in data: f.write(json.dumps(item)) f.write('\n')
保存数据为txt格式 Parameters ---------- fname: str 保存为txt文件名 data: list 爬取到的数据 Returns ------- None
wechatarticles/tools.py
save_json
pengziliu/wechat_articles_spider
1
python
def save_json(fname, data): '\n 保存数据为txt格式\n Parameters\n ----------\n fname: str\n 保存为txt文件名\n data: list\n 爬取到的数据\n Returns\n -------\n None\n ' assert isinstance(fname, str) if ('.json' not in fname): raise IOError('fname must be json', fname) with open(fname, 'a+') as f: for item in data: f.write(json.dumps(item)) f.write('\n')
def save_json(fname, data): '\n 保存数据为txt格式\n Parameters\n ----------\n fname: str\n 保存为txt文件名\n data: list\n 爬取到的数据\n Returns\n -------\n None\n ' assert isinstance(fname, str) if ('.json' not in fname): raise IOError('fname must be json', fname) with open(fname, 'a+') as f: for item in data: f.write(json.dumps(item)) f.write('\n')<|docstring|>保存数据为txt格式 Parameters ---------- fname: str 保存为txt文件名 data: list 爬取到的数据 Returns ------- None<|endoftext|>
5edbcf12464cc48461a45909b8a2084678b2151007a92a06457bd5b1595e888a
def process(self, text: str, verbose=False) -> str: '\n String-to-string coreference resolution through textual replacements.\n ' doc = self.nlp(text) if (not doc._.has_coref): return text processed_text = [] if verbose: print('Coreferencing replacements:') for token in doc: if token._.in_coref: main_subj_token = token._.coref_clusters[0].main main_subj_text = main_subj_token.text if ((not main_subj_text.isalpha()) or (main_subj_text.lower() in SUBJ_BLACKLIST)): processed_text.append(token.text_with_ws) else: if verbose: print('*{}*'.format(main_subj_token.text)) replacement = main_subj_text if (token.text.lower() in SUBJ_POSSESSION): replacement += "'s" if token.text_with_ws.endswith(' '): replacement += ' ' processed_text.append(replacement) else: processed_text.append(token.text_with_ws) if verbose: print() return ''.join(processed_text)
String-to-string coreference resolution through textual replacements.
src/extractor_service/coref.py
process
ansonmiu0214/aspect-based-sentiment-analysis
4
python
def process(self, text: str, verbose=False) -> str: '\n \n ' doc = self.nlp(text) if (not doc._.has_coref): return text processed_text = [] if verbose: print('Coreferencing replacements:') for token in doc: if token._.in_coref: main_subj_token = token._.coref_clusters[0].main main_subj_text = main_subj_token.text if ((not main_subj_text.isalpha()) or (main_subj_text.lower() in SUBJ_BLACKLIST)): processed_text.append(token.text_with_ws) else: if verbose: print('*{}*'.format(main_subj_token.text)) replacement = main_subj_text if (token.text.lower() in SUBJ_POSSESSION): replacement += "'s" if token.text_with_ws.endswith(' '): replacement += ' ' processed_text.append(replacement) else: processed_text.append(token.text_with_ws) if verbose: print() return .join(processed_text)
def process(self, text: str, verbose=False) -> str: '\n \n ' doc = self.nlp(text) if (not doc._.has_coref): return text processed_text = [] if verbose: print('Coreferencing replacements:') for token in doc: if token._.in_coref: main_subj_token = token._.coref_clusters[0].main main_subj_text = main_subj_token.text if ((not main_subj_text.isalpha()) or (main_subj_text.lower() in SUBJ_BLACKLIST)): processed_text.append(token.text_with_ws) else: if verbose: print('*{}*'.format(main_subj_token.text)) replacement = main_subj_text if (token.text.lower() in SUBJ_POSSESSION): replacement += "'s" if token.text_with_ws.endswith(' '): replacement += ' ' processed_text.append(replacement) else: processed_text.append(token.text_with_ws) if verbose: print() return .join(processed_text)<|docstring|>String-to-string coreference resolution through textual replacements.<|endoftext|>
c3d64cd5947152e7285956425cdddb42c2ff655473e88c7f5b0c24f3300582a1
def get_globals(): 'Context variables that are available for every template rendered by\n OSFWebRenderer.\n ' user = _get_current_user() set_status_message(user) user_institutions = ([{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path_rounded_corners} for inst in user.affiliated_institutions.all()] if user else []) location = (geolite2.reader().get(request.remote_addr) if request.remote_addr else None) if (request.host_url != settings.DOMAIN): try: inst_id = Institution.objects.get(domains__icontains=request.host, is_deleted=False)._id request_login_url = '{}institutions/{}'.format(settings.DOMAIN, inst_id) except Institution.DoesNotExist: request_login_url = request.url.replace(request.host_url, settings.DOMAIN) else: request_login_url = request.url return {'private_link_anonymous': is_private_link_anonymous_view(), 'user_name': (user.username if user else ''), 'user_full_name': (user.fullname if user else ''), 'user_id': (user._id if user else ''), 'user_locale': (user.locale if (user and user.locale) else ''), 'user_timezone': (user.timezone if (user and user.timezone) else ''), 'user_url': (user.url if user else ''), 'user_profile_image': (get_profile_image_url(user=user, size=25) if user else ''), 'user_email_verifications': (user.unconfirmed_email_info if user else []), 'user_api_url': (user.api_url if user else ''), 'user_entry_point': (metrics.get_entry_point(user) if user else ''), 'user_institutions': (user_institutions if user else None), 'display_name': (user.fullname if user else ''), 'anon': {'continent': (location or {}).get('continent', {}).get('code', None), 'country': (location or {}).get('country', {}).get('iso_code', None)}, 'use_cdn': settings.USE_CDN_FOR_CLIENT_LIBS, 'sentry_dsn_js': (settings.SENTRY_DSN_JS if sentry.enabled else None), 'dev_mode': settings.DEV_MODE, 'allow_login': settings.ALLOW_LOGIN, 'cookie_name': settings.COOKIE_NAME, 'status': status.pop_status_messages(), 'prev_status': status.pop_previous_status_messages(), 'domain': settings.DOMAIN, 'api_domain': settings.API_DOMAIN, 'disk_saving_mode': settings.DISK_SAVING_MODE, 'language': language, 'noteworthy_links_node': settings.NEW_AND_NOTEWORTHY_LINKS_NODE, 'popular_links_node': settings.POPULAR_LINKS_NODE, 'web_url_for': util.web_url_for, 'api_url_for': util.api_url_for, 'api_v2_url': util.api_v2_url, 'api_v2_domain': settings.API_DOMAIN, 'api_v2_base': util.api_v2_url(''), 'sanitize': sanitize, 'sjson': (lambda s: sanitize.safe_json(s)), 'webpack_asset': paths.webpack_asset, 'osf_url': settings.INTERNAL_DOMAIN, 'waterbutler_url': settings.WATERBUTLER_URL, 'login_url': cas.get_login_url(request_login_url), 'sign_up_url': util.web_url_for('auth_register', _absolute=True, next=request_login_url), 'reauth_url': util.web_url_for('auth_logout', redirect_url=request.url, reauth=True), 'profile_url': cas.get_profile_url(), 'enable_institutions': settings.ENABLE_INSTITUTIONS, 'keen': {'public': {'project_id': settings.KEEN['public']['project_id'], 'write_key': settings.KEEN['public']['write_key']}, 'private': {'project_id': settings.KEEN['private']['project_id'], 'write_key': settings.KEEN['private']['write_key']}}, 'institutional_landing_flag': flag_is_active(request, features.INSTITUTIONAL_LANDING_FLAG), 'maintenance': maintenance.get_maintenance(), 'recaptcha_site_key': settings.RECAPTCHA_SITE_KEY, 'custom_citations': settings.CUSTOM_CITATIONS, 'osf_support_email': settings.OSF_SUPPORT_EMAIL, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, 'footer_links': settings.FOOTER_LINKS, 'features': features, 'waffle': waffle, 'csrf_cookie_name': api_settings.CSRF_COOKIE_NAME, 'permissions': permissions}
Context variables that are available for every template rendered by OSFWebRenderer.
website/routes.py
get_globals
felliott/osf.io
628
python
def get_globals(): 'Context variables that are available for every template rendered by\n OSFWebRenderer.\n ' user = _get_current_user() set_status_message(user) user_institutions = ([{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path_rounded_corners} for inst in user.affiliated_institutions.all()] if user else []) location = (geolite2.reader().get(request.remote_addr) if request.remote_addr else None) if (request.host_url != settings.DOMAIN): try: inst_id = Institution.objects.get(domains__icontains=request.host, is_deleted=False)._id request_login_url = '{}institutions/{}'.format(settings.DOMAIN, inst_id) except Institution.DoesNotExist: request_login_url = request.url.replace(request.host_url, settings.DOMAIN) else: request_login_url = request.url return {'private_link_anonymous': is_private_link_anonymous_view(), 'user_name': (user.username if user else ), 'user_full_name': (user.fullname if user else ), 'user_id': (user._id if user else ), 'user_locale': (user.locale if (user and user.locale) else ), 'user_timezone': (user.timezone if (user and user.timezone) else ), 'user_url': (user.url if user else ), 'user_profile_image': (get_profile_image_url(user=user, size=25) if user else ), 'user_email_verifications': (user.unconfirmed_email_info if user else []), 'user_api_url': (user.api_url if user else ), 'user_entry_point': (metrics.get_entry_point(user) if user else ), 'user_institutions': (user_institutions if user else None), 'display_name': (user.fullname if user else ), 'anon': {'continent': (location or {}).get('continent', {}).get('code', None), 'country': (location or {}).get('country', {}).get('iso_code', None)}, 'use_cdn': settings.USE_CDN_FOR_CLIENT_LIBS, 'sentry_dsn_js': (settings.SENTRY_DSN_JS if sentry.enabled else None), 'dev_mode': settings.DEV_MODE, 'allow_login': settings.ALLOW_LOGIN, 'cookie_name': settings.COOKIE_NAME, 'status': status.pop_status_messages(), 'prev_status': status.pop_previous_status_messages(), 'domain': settings.DOMAIN, 'api_domain': settings.API_DOMAIN, 'disk_saving_mode': settings.DISK_SAVING_MODE, 'language': language, 'noteworthy_links_node': settings.NEW_AND_NOTEWORTHY_LINKS_NODE, 'popular_links_node': settings.POPULAR_LINKS_NODE, 'web_url_for': util.web_url_for, 'api_url_for': util.api_url_for, 'api_v2_url': util.api_v2_url, 'api_v2_domain': settings.API_DOMAIN, 'api_v2_base': util.api_v2_url(), 'sanitize': sanitize, 'sjson': (lambda s: sanitize.safe_json(s)), 'webpack_asset': paths.webpack_asset, 'osf_url': settings.INTERNAL_DOMAIN, 'waterbutler_url': settings.WATERBUTLER_URL, 'login_url': cas.get_login_url(request_login_url), 'sign_up_url': util.web_url_for('auth_register', _absolute=True, next=request_login_url), 'reauth_url': util.web_url_for('auth_logout', redirect_url=request.url, reauth=True), 'profile_url': cas.get_profile_url(), 'enable_institutions': settings.ENABLE_INSTITUTIONS, 'keen': {'public': {'project_id': settings.KEEN['public']['project_id'], 'write_key': settings.KEEN['public']['write_key']}, 'private': {'project_id': settings.KEEN['private']['project_id'], 'write_key': settings.KEEN['private']['write_key']}}, 'institutional_landing_flag': flag_is_active(request, features.INSTITUTIONAL_LANDING_FLAG), 'maintenance': maintenance.get_maintenance(), 'recaptcha_site_key': settings.RECAPTCHA_SITE_KEY, 'custom_citations': settings.CUSTOM_CITATIONS, 'osf_support_email': settings.OSF_SUPPORT_EMAIL, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, 'footer_links': settings.FOOTER_LINKS, 'features': features, 'waffle': waffle, 'csrf_cookie_name': api_settings.CSRF_COOKIE_NAME, 'permissions': permissions}
def get_globals(): 'Context variables that are available for every template rendered by\n OSFWebRenderer.\n ' user = _get_current_user() set_status_message(user) user_institutions = ([{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path_rounded_corners} for inst in user.affiliated_institutions.all()] if user else []) location = (geolite2.reader().get(request.remote_addr) if request.remote_addr else None) if (request.host_url != settings.DOMAIN): try: inst_id = Institution.objects.get(domains__icontains=request.host, is_deleted=False)._id request_login_url = '{}institutions/{}'.format(settings.DOMAIN, inst_id) except Institution.DoesNotExist: request_login_url = request.url.replace(request.host_url, settings.DOMAIN) else: request_login_url = request.url return {'private_link_anonymous': is_private_link_anonymous_view(), 'user_name': (user.username if user else ), 'user_full_name': (user.fullname if user else ), 'user_id': (user._id if user else ), 'user_locale': (user.locale if (user and user.locale) else ), 'user_timezone': (user.timezone if (user and user.timezone) else ), 'user_url': (user.url if user else ), 'user_profile_image': (get_profile_image_url(user=user, size=25) if user else ), 'user_email_verifications': (user.unconfirmed_email_info if user else []), 'user_api_url': (user.api_url if user else ), 'user_entry_point': (metrics.get_entry_point(user) if user else ), 'user_institutions': (user_institutions if user else None), 'display_name': (user.fullname if user else ), 'anon': {'continent': (location or {}).get('continent', {}).get('code', None), 'country': (location or {}).get('country', {}).get('iso_code', None)}, 'use_cdn': settings.USE_CDN_FOR_CLIENT_LIBS, 'sentry_dsn_js': (settings.SENTRY_DSN_JS if sentry.enabled else None), 'dev_mode': settings.DEV_MODE, 'allow_login': settings.ALLOW_LOGIN, 'cookie_name': settings.COOKIE_NAME, 'status': status.pop_status_messages(), 'prev_status': status.pop_previous_status_messages(), 'domain': settings.DOMAIN, 'api_domain': settings.API_DOMAIN, 'disk_saving_mode': settings.DISK_SAVING_MODE, 'language': language, 'noteworthy_links_node': settings.NEW_AND_NOTEWORTHY_LINKS_NODE, 'popular_links_node': settings.POPULAR_LINKS_NODE, 'web_url_for': util.web_url_for, 'api_url_for': util.api_url_for, 'api_v2_url': util.api_v2_url, 'api_v2_domain': settings.API_DOMAIN, 'api_v2_base': util.api_v2_url(), 'sanitize': sanitize, 'sjson': (lambda s: sanitize.safe_json(s)), 'webpack_asset': paths.webpack_asset, 'osf_url': settings.INTERNAL_DOMAIN, 'waterbutler_url': settings.WATERBUTLER_URL, 'login_url': cas.get_login_url(request_login_url), 'sign_up_url': util.web_url_for('auth_register', _absolute=True, next=request_login_url), 'reauth_url': util.web_url_for('auth_logout', redirect_url=request.url, reauth=True), 'profile_url': cas.get_profile_url(), 'enable_institutions': settings.ENABLE_INSTITUTIONS, 'keen': {'public': {'project_id': settings.KEEN['public']['project_id'], 'write_key': settings.KEEN['public']['write_key']}, 'private': {'project_id': settings.KEEN['private']['project_id'], 'write_key': settings.KEEN['private']['write_key']}}, 'institutional_landing_flag': flag_is_active(request, features.INSTITUTIONAL_LANDING_FLAG), 'maintenance': maintenance.get_maintenance(), 'recaptcha_site_key': settings.RECAPTCHA_SITE_KEY, 'custom_citations': settings.CUSTOM_CITATIONS, 'osf_support_email': settings.OSF_SUPPORT_EMAIL, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, 'footer_links': settings.FOOTER_LINKS, 'features': features, 'waffle': waffle, 'csrf_cookie_name': api_settings.CSRF_COOKIE_NAME, 'permissions': permissions}<|docstring|>Context variables that are available for every template rendered by OSFWebRenderer.<|endoftext|>
c49c64b3fe19377588c9171e31e6c99e22cf9114881e20638faf685ead89b5fd
def robots(): 'Serves the robots.txt file.' if os.path.exists(os.path.join(settings.STATIC_FOLDER, 'robots.local.txt')): robots_file = 'robots.local.txt' else: robots_file = 'robots.txt' return send_from_directory(settings.STATIC_FOLDER, robots_file, mimetype='html')
Serves the robots.txt file.
website/routes.py
robots
felliott/osf.io
628
python
def robots(): if os.path.exists(os.path.join(settings.STATIC_FOLDER, 'robots.local.txt')): robots_file = 'robots.local.txt' else: robots_file = 'robots.txt' return send_from_directory(settings.STATIC_FOLDER, robots_file, mimetype='html')
def robots(): if os.path.exists(os.path.join(settings.STATIC_FOLDER, 'robots.local.txt')): robots_file = 'robots.local.txt' else: robots_file = 'robots.txt' return send_from_directory(settings.STATIC_FOLDER, robots_file, mimetype='html')<|docstring|>Serves the robots.txt file.<|endoftext|>
cd230d9ba6c8418165658e847a3dc3642c9995e34ecc8347fa9846295f092123
def sitemap_file(path): 'Serves the sitemap/* files.' if path.endswith('.xml.gz'): mime = 'application/x-gzip' elif path.endswith('.xml'): mime = 'text/xml' else: raise HTTPError(http_status.HTTP_404_NOT_FOUND) return send_from_directory((settings.STATIC_FOLDER + '/sitemaps/'), path, mimetype=mime)
Serves the sitemap/* files.
website/routes.py
sitemap_file
felliott/osf.io
628
python
def sitemap_file(path): if path.endswith('.xml.gz'): mime = 'application/x-gzip' elif path.endswith('.xml'): mime = 'text/xml' else: raise HTTPError(http_status.HTTP_404_NOT_FOUND) return send_from_directory((settings.STATIC_FOLDER + '/sitemaps/'), path, mimetype=mime)
def sitemap_file(path): if path.endswith('.xml.gz'): mime = 'application/x-gzip' elif path.endswith('.xml'): mime = 'text/xml' else: raise HTTPError(http_status.HTTP_404_NOT_FOUND) return send_from_directory((settings.STATIC_FOLDER + '/sitemaps/'), path, mimetype=mime)<|docstring|>Serves the sitemap/* files.<|endoftext|>
3047a3abf48ca8e4cab749bdad5191703fba9652159429ada71af24a9a931b25
def ember_app(path=None): 'Serve the contents of the ember application' ember_app_folder = None fp = (path or 'index.html') ember_app = None for k in EXTERNAL_EMBER_APPS.keys(): if request.path.strip('/').startswith(k): ember_app = EXTERNAL_EMBER_APPS[k] break if (not ember_app): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if settings.PROXY_EMBER_APPS: path = request.path[len(ember_app['path']):] url = urljoin(ember_app['server'], path) resp = requests.get(url, stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT, headers={'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'}) excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection'] headers = [(name, value) for (name, value) in resp.raw.headers.items() if (name.lower() not in excluded_headers)] return Response(resp.content, resp.status_code, headers) ember_app_folder = os.path.abspath(os.path.join(os.getcwd(), ember_app['path'])) if (not ember_app_folder): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.abspath(os.path.join(ember_app_folder, fp)).startswith(ember_app_folder)): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.isfile(os.path.join(ember_app_folder, fp))): fp = 'index.html' return send_from_directory(ember_app_folder, fp)
Serve the contents of the ember application
website/routes.py
ember_app
felliott/osf.io
628
python
def ember_app(path=None): ember_app_folder = None fp = (path or 'index.html') ember_app = None for k in EXTERNAL_EMBER_APPS.keys(): if request.path.strip('/').startswith(k): ember_app = EXTERNAL_EMBER_APPS[k] break if (not ember_app): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if settings.PROXY_EMBER_APPS: path = request.path[len(ember_app['path']):] url = urljoin(ember_app['server'], path) resp = requests.get(url, stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT, headers={'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'}) excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection'] headers = [(name, value) for (name, value) in resp.raw.headers.items() if (name.lower() not in excluded_headers)] return Response(resp.content, resp.status_code, headers) ember_app_folder = os.path.abspath(os.path.join(os.getcwd(), ember_app['path'])) if (not ember_app_folder): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.abspath(os.path.join(ember_app_folder, fp)).startswith(ember_app_folder)): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.isfile(os.path.join(ember_app_folder, fp))): fp = 'index.html' return send_from_directory(ember_app_folder, fp)
def ember_app(path=None): ember_app_folder = None fp = (path or 'index.html') ember_app = None for k in EXTERNAL_EMBER_APPS.keys(): if request.path.strip('/').startswith(k): ember_app = EXTERNAL_EMBER_APPS[k] break if (not ember_app): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if settings.PROXY_EMBER_APPS: path = request.path[len(ember_app['path']):] url = urljoin(ember_app['server'], path) resp = requests.get(url, stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT, headers={'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'}) excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection'] headers = [(name, value) for (name, value) in resp.raw.headers.items() if (name.lower() not in excluded_headers)] return Response(resp.content, resp.status_code, headers) ember_app_folder = os.path.abspath(os.path.join(os.getcwd(), ember_app['path'])) if (not ember_app_folder): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.abspath(os.path.join(ember_app_folder, fp)).startswith(ember_app_folder)): raise HTTPError(http_status.HTTP_404_NOT_FOUND) if (not os.path.isfile(os.path.join(ember_app_folder, fp))): fp = 'index.html' return send_from_directory(ember_app_folder, fp)<|docstring|>Serve the contents of the ember application<|endoftext|>
26f7a9e1cdfbba7e6fdf85c5264947ad3d627e6c5ccb027ca9489185b6498bb0
def make_url_map(app): 'Set up all the routes for the OSF app.\n\n :param app: A Flask/Werkzeug app to bind the rules to.\n ' process_rules(app, [Rule('/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), OsfWebRenderer('', render_mako_string, trust=False)), Rule('/api/v1/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), json_renderer)]) process_rules(app, [Rule(['/<guid>/', '/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, notemplate), Rule(['/api/v1/<guid>/', '/api/v1/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, json_renderer)]) process_rules(app, [Rule('/favicon.ico', 'get', favicon, json_renderer), Rule('/robots.txt', 'get', robots, json_renderer), Rule('/sitemaps/<path>', 'get', sitemap_file, json_renderer)]) if settings.USE_EXTERNAL_EMBER: for prefix in EXTERNAL_EMBER_APPS.keys(): process_rules(app, [Rule(['/<provider>/<guid>/download', '/<provider>/<guid>/download/'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid_download, notemplate, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_app, json_renderer, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) if EXTERNAL_EMBER_APPS.get('ember_osf_web'): process_rules(app, [Rule(ember_osf_web_views.routes, 'get', ember_osf_web_views.use_ember_app, notemplate)]) if ('routes' in EXTERNAL_EMBER_APPS['ember_osf_web']): for route in EXTERNAL_EMBER_APPS['ember_osf_web']['routes']: process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_osf_web_views.use_ember_app, notemplate, endpoint_suffix=('__' + route))], prefix=('/' + route)) process_rules(app, [Rule('/dashboard/', 'get', website_views.dashboard, notemplate), Rule('/myprojects/', 'get', website_views.my_projects, OsfWebRenderer('my_projects.mako', trust=False)), Rule('/reproducibility/', 'get', website_views.reproducibility, notemplate), Rule('/about/', 'get', website_views.redirect_about, notemplate), Rule('/help/', 'get', website_views.redirect_help, notemplate), Rule('/faq/', 'get', website_views.redirect_faq, notemplate), Rule(['/getting-started/', '/getting-started/email/', '/howosfworks/'], 'get', website_views.redirect_getting_started, notemplate), Rule(['/messages/'], 'get', {}, OsfWebRenderer('public/comingsoon.mako', trust=False)), Rule('/meetings/<meeting>/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting.mako', trust=False)), Rule('/view/<meeting>/', 'get', conference_views.redirect_to_conference_results, notemplate), Rule('/view/<meeting>/plain/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting_plain.mako', trust=False), endpoint_suffix='__plain'), Rule('/api/v1/view/<meeting>/', 'get', conference_views.conference_data, json_renderer), Rule('/meetings/', 'get', conference_views.conference_view, OsfWebRenderer('public/pages/meeting_landing.mako', trust=False)), Rule('/api/v1/meetings/submissions/', 'get', conference_views.conference_submissions, json_renderer), Rule('/presentations/', 'get', conference_views.redirect_to_meetings, json_renderer), Rule('/news/', 'get', website_views.redirect_to_cos_news, notemplate), Rule(['/rr/', '/registeredreports/', '/registeredreport/'], 'get', registries_views.registered_reports_landing, OsfWebRenderer('registered_reports_landing.mako', trust=False)), Rule('/erpc/', 'get', closed_challenges_views.erpc_landing_page, OsfWebRenderer('erpc_landing_page.mako', trust=False)), Rule('/prereg/', 'get', prereg.prereg_landing_page, OsfWebRenderer('prereg_landing_page.mako', trust=False)), Rule('/preprints/', 'get', preprint_views.preprint_landing_page, OsfWebRenderer('public/pages/preprint_landing.mako', trust=False)), Rule('/registries/', 'get', registries_views.registries_landing_page, OsfWebRenderer('public/pages/registries_landing.mako', trust=False)), Rule('/reviews/', 'get', reviews_views.reviews_landing_page, OsfWebRenderer('public/pages/reviews_landing.mako', trust=False)), Rule('/preprint/', 'get', preprint_views.preprint_redirect, notemplate), Rule(['/api/v1/<campaign>/draft_registrations/', '/api/v1/draft_registrations/'], 'get', registries_views.draft_registrations, json_renderer)]) process_rules(app, [Rule('/citations/styles/', 'get', citation_views.list_citation_styles, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/<addon>/settings/disable/', '/project/<pid>/node/<nid>/<addon>/settings/disable/'], 'post', addon_views.disable_addon, json_renderer), Rule('/profile/<uid>/<addon>/settings/', 'get', addon_views.get_addon_user_config, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/oauth/connect/<service_name>/', 'get', oauth_views.oauth_connect, json_renderer), Rule('/oauth/callback/<service_name>/', 'get', oauth_views.oauth_callback, OsfWebRenderer('util/oauth_complete.mako', trust=False))]) process_rules(app, [Rule(['/oauth/accounts/<external_account_id>/'], 'delete', oauth_views.oauth_disconnect, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/confirmed_emails/', 'put', auth_views.unconfirmed_email_add, json_renderer), Rule('/confirmed_emails/', 'delete', auth_views.unconfirmed_email_remove, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/comments/timestamps/', '/project/<pid>/node/<nid>/comments/timestamps/'], 'put', project_views.comment.update_comments_timestamp, json_renderer), Rule(['/project/<pid>/citation/', '/project/<pid>/node/<nid>/citation/'], 'get', citation_views.node_citation, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/forms/signin/', 'get', website_views.signin_form, json_renderer), Rule('/forms/forgot_password/', 'get', website_views.forgot_password_form, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/activity/', '/explore/activity/', '/explore/'], 'get', discovery_views.redirect_activity_to_search, notemplate)]) process_rules(app, [Rule('/confirm/<uid>/<token>/', 'get', auth_views.confirm_email_get, notemplate), Rule('/confirm/external/<uid>/<token>/', 'get', auth_views.external_login_confirm_email_get, notemplate), Rule('/resetpassword/<uid>/<token>/', 'get', auth_views.reset_password_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword/<uid>/<token>/', 'post', auth_views.reset_password_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'get', auth_views.reset_password_institution_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'post', auth_views.reset_password_institution_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resend/', 'get', auth_views.resend_confirmation_get, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/resend/', 'post', auth_views.resend_confirmation_post, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'get', auth_views.external_login_email_get, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'post', auth_views.external_login_email_post, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/register/', 'get', auth_views.auth_register, OsfWebRenderer('public/register.mako', trust=False)), Rule(['/login/', '/account/'], 'get', auth_views.auth_login, notemplate), Rule('/api/v1/register/', 'post', auth_views.register_user, json_renderer), Rule('/logout/', 'get', auth_views.auth_logout, notemplate), Rule('/forgotpassword/', 'get', auth_views.forgot_password_get, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword/', 'post', auth_views.forgot_password_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword-institution/', 'get', auth_views.redirect_unsupported_institution, notemplate), Rule('/forgotpassword-institution/', 'post', auth_views.forgot_password_institution_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/login/connected_tools/', 'get', landing_page_views.connected_tools, notemplate), Rule('/login/enriched_profile/', 'get', landing_page_views.enriched_profile, notemplate)]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view, OsfWebRenderer('profile.mako', trust=False)), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id, OsfWebRenderer('profile.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/'], ['get', 'post'], project_views.contributor.claim_user_form, OsfWebRenderer('claim_account.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/verify/<token>/'], ['get', 'post'], project_views.contributor.claim_user_registered, OsfWebRenderer('claim_account_registered.mako', trust=False)), Rule('/settings/', 'get', profile_views.user_profile, OsfWebRenderer('profile/settings.mako', trust=False)), Rule(['/project/<pid>/addons/', '/project/<pid>/node/<nid>/addons/'], 'get', project_views.node.node_addons, OsfWebRenderer('project/addons.mako', trust=False)), Rule('/settings/account/', 'get', profile_views.user_account, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/account/password', 'post', profile_views.user_account_password, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/addons/', 'get', profile_views.user_addons, OsfWebRenderer('profile/addons.mako', trust=False)), Rule('/settings/notifications/', 'get', profile_views.user_notifications, OsfWebRenderer('profile/notifications.mako', trust=False)), Rule('/settings/applications/', 'get', profile_views.oauth_application_list, OsfWebRenderer('profile/oauth_app_list.mako', trust=False)), Rule('/settings/applications/create/', 'get', profile_views.oauth_application_register, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/applications/<client_id>/', 'get', profile_views.oauth_application_detail, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/tokens/', 'get', profile_views.personal_access_token_list, OsfWebRenderer('profile/personal_tokens_list.mako', trust=False)), Rule('/settings/tokens/create/', 'get', profile_views.personal_access_token_register, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)), Rule('/settings/tokens/<_id>/', 'get', profile_views.personal_access_token_detail, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False))]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view_json, json_renderer), Rule('/profile/', 'put', profile_views.update_user, json_renderer), Rule('/resend/', 'put', profile_views.resend_confirmation, json_renderer), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id_json, json_renderer), Rule('/user/<uid>/<pid>/claim/email/', 'post', project_views.contributor.claim_user_post, json_renderer), Rule('/profile/export/', 'post', profile_views.request_export, json_renderer), Rule('/profile/region/', 'put', osfstorage_views.update_region, json_renderer), Rule('/profile/deactivate/', 'post', profile_views.request_deactivation, json_renderer), Rule('/profile/cancel_request_deactivation/', 'post', profile_views.cancel_request_deactivation, json_renderer), Rule('/profile/logins/', 'patch', profile_views.delete_external_identity, json_renderer), Rule('/settings/names/', 'get', profile_views.serialize_names, json_renderer), Rule('/settings/names/', 'put', profile_views.unserialize_names, json_renderer), Rule('/settings/names/impute/', 'get', profile_views.impute_names, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'get', profile_views.serialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'get', profile_views.serialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'get', profile_views.serialize_schools, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'put', profile_views.unserialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'put', profile_views.unserialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'put', profile_views.unserialize_schools, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/search/', 'get', search_views.search_view, OsfWebRenderer('search.mako', trust=False)), Rule('/share/registration/', 'get', {'register': settings.SHARE_REGISTRATION_URL}, json_renderer), Rule('/api/v1/user/search/', 'get', search_views.search_contributor, json_renderer), Rule('/api/v1/search/node/', 'post', project_views.node.search_node, json_renderer)]) process_rules(app, [Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer), Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer), Rule('/share/search/', 'get', website_views.legacy_share_v1_search, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/institutions/<inst_id>/', 'get', institution_views.view_institution, OsfWebRenderer('institution.mako', trust=False))]) process_rules(app, [Rule(['/institutions/<inst_id>/dashboard/'], 'get', institution_views.view_institution_dashboard, notemplate)]) process_rules(app, [Rule('/', 'get', website_views.index, OsfWebRenderer('institution.mako', trust=False)), Rule('/goodbye/', 'get', goodbye, notemplate), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/token_action/<pid>/'], 'get', project_views.node.token_action, notemplate), Rule('/project/<pid>/newnode/', 'post', project_views.node.project_new_node, notemplate), Rule('/project/new/<pid>/beforeTemplate/', 'get', project_views.node.project_before_template, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'get', project_views.node.node_contributors, OsfWebRenderer('project/contributors.mako', trust=False)), Rule(['/project/<pid>/settings/', '/project/<pid>/node/<nid>/settings/'], 'get', project_views.node.node_setting, OsfWebRenderer('project/settings.mako', trust=False)), Rule(['/project/<pid>/permissions/<permissions>/', '/project/<pid>/node/<nid>/permissions/<permissions>/'], 'post', project_views.node.project_set_privacy, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/project/<pid>/forks/', '/project/<pid>/node/<nid>/forks/'], 'get', project_views.node.node_forks, notemplate), Rule(['/project/<pid>/register/', '/project/<pid>/node/<nid>/register/'], 'get', project_views.register.node_register_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/register/<metaschema_id>/', '/project/<pid>/node/<nid>/register/<metaschema_id>/'], 'get', project_views.register.node_register_template_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'get', project_views.node.node_registrations, notemplate), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'post', project_views.drafts.new_draft_registration, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/', '/project/<pid>/node/<nid>/drafts/<draft_id>/'], 'get', project_views.drafts.edit_draft_registration_page, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/register/', '/project/<pid>/node/<nid>/drafts/<draft_id>/register/'], 'get', project_views.drafts.draft_before_register_page, OsfWebRenderer('project/register_draft.mako', trust=False)), Rule(['/project/<pid>/retraction/', '/project/<pid>/node/<nid>/retraction/'], 'get', project_views.register.node_registration_retraction_redirect, notemplate), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'get', project_views.register.node_registration_retraction_get, OsfWebRenderer('project/retract_registration.mako', trust=False)), Rule('/ids/<category>/<path:value>/', 'get', project_views.register.get_referent_by_identifier, notemplate), Rule(['/project/<pid>/analytics/', '/project/<pid>/node/<nid>/analytics/'], 'get', project_views.node.project_statistics, notemplate), Rule(['/project/<pid>/files/', '/project/<pid>/node/<nid>/files/'], 'get', project_views.file.collect_file_trees, OsfWebRenderer('project/files.mako', trust=False), view_kwargs={'mode': 'page'}), Rule(['/<guid>/files/<provider>/<path:path>/', '/project/<pid>/files/<provider>/<path:path>/', '/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule('/download/<fid_or_guid>/', 'get', addon_views.persistent_file_download, json_renderer), Rule(['/api/v1/<guid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, json_renderer), Rule(['/project/<pid>/files/deleted/<trashed_id>/', '/project/<pid>/node/<nid>/files/deleted/<trashed_id>/'], 'get', addon_views.addon_deleted_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/project/<pid>/<provider>/files/<path:path>/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/', '/project/<pid>/<provider>/files/<path:path>/download/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/download/', '/project/<pid>/osffiles/<fid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/download/', '/project/<pid>/osffiles/<fid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/', '/project/<pid>/osffiles/download/<fid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/', '/project/<pid>/files/<fid>/', '/project/<pid>/node/<nid>/files/<fid>/', '/project/<pid>/files/download/<fid>/', '/project/<pid>/node/<nid>/files/download/<fid>/', '/project/<pid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/files/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/<fid>/version/<vid>/', '/project/<pid>/files/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/api/v1/project/<pid>/osffiles/<fid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/', '/api/v1/project/<pid>/files/download/<fid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/', '/api/v1/project/<pid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/files/download/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, json_renderer), Rule(['/quickfiles/<fid>/'], 'get', addon_views.addon_view_or_download_quickfile, json_renderer)]) process_rules(app, [Rule('/email/meeting/', 'post', conference_views.meeting_hook, json_renderer), Rule('/mailchimp/hooks/', 'get', profile_views.mailchimp_get_endpoint, json_renderer), Rule('/mailchimp/hooks/', 'post', profile_views.sync_data_from_mailchimp, json_renderer), Rule('/project/new/', 'post', project_views.node.project_new_post, json_renderer), Rule(['/project/<pid>/contributors_abbrev/', '/project/<pid>/node/<nid>/contributors_abbrev/'], 'get', project_views.contributor.get_node_contributors_abbrev, json_renderer), Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'get', project_views.node.get_pointed, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'post', project_views.node.add_pointers, json_renderer), Rule(['/pointer/'], 'post', project_views.node.add_pointer, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>pointer/'], 'delete', project_views.node.remove_pointer, json_renderer), Rule(['/project/<pid>/drafts/'], 'get', project_views.drafts.get_draft_registrations, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'get', project_views.drafts.get_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'put', project_views.drafts.update_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'delete', project_views.drafts.delete_draft_registration, json_renderer), Rule(['/project/drafts/schemas/'], 'get', project_views.drafts.get_metaschemas, json_renderer), Rule(['/project/<pid>/get_contributors/', '/project/<pid>/node/<nid>/get_contributors/'], 'get', project_views.contributor.get_contributors, json_renderer), Rule(['/project/<pid>/get_contributors_from_parent/', '/project/<pid>/node/<nid>/get_contributors_from_parent/'], 'get', project_views.contributor.get_contributors_from_parent, json_renderer), Rule(['/project/<pid>/contributors/manage/', '/project/<pid>/node/<nid>/contributors/manage/'], 'POST', project_views.contributor.project_manage_contributors, json_renderer), Rule(['/project/<pid>/contributor/remove/', '/project/<pid>/node/<nid>/contributor/remove/'], 'POST', project_views.contributor.project_remove_contributor, json_renderer), Rule(['/project/<pid>/get_editable_children/', '/project/<pid>/node/<nid>/get_editable_children/'], 'get', project_views.node.get_editable_children, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'post', project_views.node.project_generate_private_link_post, json_renderer), Rule(['/project/<pid>/private_link/edit/', '/project/<pid>/node/<nid>/private_link/edit/'], 'put', project_views.node.project_private_link_edit, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'delete', project_views.node.remove_private_link, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'get', project_views.node.private_link_table, json_renderer), Rule(['/project/new/<nid>/'], 'post', project_views.node.project_new_from_template, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'put', project_views.node.update_node, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'delete', project_views.node.component_remove, json_renderer), Rule('/project/<pid>/reorder_components/', 'post', project_views.node.project_reorder_components, json_renderer), Rule(['/project/<pid>/edit/', '/project/<pid>/node/<nid>/edit/'], 'post', project_views.node.edit_node, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'post', project_views.tag.project_add_tag, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'delete', project_views.tag.project_remove_tag, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'post', project_views.contributor.project_contributors_post, json_renderer), Rule(['/project/<pid>/fork/before/', '/project/<pid>/node/<nid>/fork/before/'], 'get', project_views.node.project_before_fork, json_renderer), Rule(['/project/<pid>/pointer/fork/', '/project/<pid>/node/<nid>/pointer/fork/'], 'post', project_views.node.fork_pointer, json_renderer), Rule(['/project/<pid>/beforeregister/', '/project/<pid>/node/<nid>/beforeregister'], 'get', project_views.register.project_before_register, json_renderer), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'post', project_views.register.node_registration_retraction_post, json_renderer), Rule(['/project/<pid>/identifiers/', '/project/<pid>/node/<nid>/identifiers/'], 'post', identifier_views.node_identifiers_post, json_renderer), Rule(['/project/<pid>/files/grid/', '/project/<pid>/node/<nid>/files/grid/'], 'get', project_views.file.grid_data, json_renderer), Rule('/files/auth/', 'get', addon_views.get_auth, json_renderer), Rule(['/project/<pid>/waterbutler/logs/', '/project/<pid>/node/<nid>/waterbutler/logs/'], 'put', addon_views.create_waterbutler_log, json_renderer), Rule(['/registration/<pid>/callbacks/'], 'put', project_views.register.registration_callbacks, json_renderer), Rule('/settings/addons/', 'post', profile_views.user_choose_addons, json_renderer), Rule('/settings/notifications/', 'get', profile_views.user_notifications, json_renderer), Rule('/settings/notifications/', 'post', profile_views.user_choose_mailing_lists, json_renderer), Rule('/subscriptions/', 'get', notification_views.get_subscriptions, json_renderer), Rule(['/project/<pid>/subscriptions/', '/project/<pid>/node/<nid>/subscriptions/'], 'get', notification_views.get_node_subscriptions, json_renderer), Rule(['/project/<pid>/tree/', '/project/<pid>/node/<nid>/tree/'], 'get', project_views.node.get_node_tree, json_renderer), Rule('/subscriptions/', 'post', notification_views.configure_subscription, json_renderer), Rule(['/project/<pid>/settings/addons/', '/project/<pid>/node/<nid>/settings/addons/'], 'post', project_views.node.node_choose_addons, json_renderer), Rule(['/project/<pid>/settings/comments/', '/project/<pid>/node/<nid>/settings/comments/'], 'post', project_views.node.configure_comments, json_renderer), Rule(['/project/<pid>/settings/requests/', '/project/<pid>/node/<nid>/settings/requests/'], 'post', project_views.node.configure_requests, json_renderer), Rule(['/project/<pid>/invite_contributor/', '/project/<pid>/node/<nid>/invite_contributor/'], 'post', project_views.contributor.invite_contributor_post, json_renderer)], prefix='/api/v1') addon_base_path = os.path.abspath('addons') provider_static_path = os.path.abspath('assets') if settings.DEV_MODE: @app.route('/static/addons/<addon>/<path:filename>') def addon_static(addon, filename): addon_path = os.path.join(addon_base_path, addon, 'static') return send_from_directory(addon_path, filename) @app.route('/assets/<filename>') def provider_static(filename): return send_from_directory(provider_static_path, filename) @app.route('/ember-cli-live-reload.js') def ember_cli_live_reload(): req = requests.get('{}/ember-cli-live-reload.js'.format(settings.LIVE_RELOAD_DOMAIN), stream=True) return Response(stream_with_context(req.iter_content()), content_type=req.headers['content-type'])
Set up all the routes for the OSF app. :param app: A Flask/Werkzeug app to bind the rules to.
website/routes.py
make_url_map
felliott/osf.io
628
python
def make_url_map(app): 'Set up all the routes for the OSF app.\n\n :param app: A Flask/Werkzeug app to bind the rules to.\n ' process_rules(app, [Rule('/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), OsfWebRenderer(, render_mako_string, trust=False)), Rule('/api/v1/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), json_renderer)]) process_rules(app, [Rule(['/<guid>/', '/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, notemplate), Rule(['/api/v1/<guid>/', '/api/v1/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, json_renderer)]) process_rules(app, [Rule('/favicon.ico', 'get', favicon, json_renderer), Rule('/robots.txt', 'get', robots, json_renderer), Rule('/sitemaps/<path>', 'get', sitemap_file, json_renderer)]) if settings.USE_EXTERNAL_EMBER: for prefix in EXTERNAL_EMBER_APPS.keys(): process_rules(app, [Rule(['/<provider>/<guid>/download', '/<provider>/<guid>/download/'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid_download, notemplate, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_app, json_renderer, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) if EXTERNAL_EMBER_APPS.get('ember_osf_web'): process_rules(app, [Rule(ember_osf_web_views.routes, 'get', ember_osf_web_views.use_ember_app, notemplate)]) if ('routes' in EXTERNAL_EMBER_APPS['ember_osf_web']): for route in EXTERNAL_EMBER_APPS['ember_osf_web']['routes']: process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_osf_web_views.use_ember_app, notemplate, endpoint_suffix=('__' + route))], prefix=('/' + route)) process_rules(app, [Rule('/dashboard/', 'get', website_views.dashboard, notemplate), Rule('/myprojects/', 'get', website_views.my_projects, OsfWebRenderer('my_projects.mako', trust=False)), Rule('/reproducibility/', 'get', website_views.reproducibility, notemplate), Rule('/about/', 'get', website_views.redirect_about, notemplate), Rule('/help/', 'get', website_views.redirect_help, notemplate), Rule('/faq/', 'get', website_views.redirect_faq, notemplate), Rule(['/getting-started/', '/getting-started/email/', '/howosfworks/'], 'get', website_views.redirect_getting_started, notemplate), Rule(['/messages/'], 'get', {}, OsfWebRenderer('public/comingsoon.mako', trust=False)), Rule('/meetings/<meeting>/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting.mako', trust=False)), Rule('/view/<meeting>/', 'get', conference_views.redirect_to_conference_results, notemplate), Rule('/view/<meeting>/plain/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting_plain.mako', trust=False), endpoint_suffix='__plain'), Rule('/api/v1/view/<meeting>/', 'get', conference_views.conference_data, json_renderer), Rule('/meetings/', 'get', conference_views.conference_view, OsfWebRenderer('public/pages/meeting_landing.mako', trust=False)), Rule('/api/v1/meetings/submissions/', 'get', conference_views.conference_submissions, json_renderer), Rule('/presentations/', 'get', conference_views.redirect_to_meetings, json_renderer), Rule('/news/', 'get', website_views.redirect_to_cos_news, notemplate), Rule(['/rr/', '/registeredreports/', '/registeredreport/'], 'get', registries_views.registered_reports_landing, OsfWebRenderer('registered_reports_landing.mako', trust=False)), Rule('/erpc/', 'get', closed_challenges_views.erpc_landing_page, OsfWebRenderer('erpc_landing_page.mako', trust=False)), Rule('/prereg/', 'get', prereg.prereg_landing_page, OsfWebRenderer('prereg_landing_page.mako', trust=False)), Rule('/preprints/', 'get', preprint_views.preprint_landing_page, OsfWebRenderer('public/pages/preprint_landing.mako', trust=False)), Rule('/registries/', 'get', registries_views.registries_landing_page, OsfWebRenderer('public/pages/registries_landing.mako', trust=False)), Rule('/reviews/', 'get', reviews_views.reviews_landing_page, OsfWebRenderer('public/pages/reviews_landing.mako', trust=False)), Rule('/preprint/', 'get', preprint_views.preprint_redirect, notemplate), Rule(['/api/v1/<campaign>/draft_registrations/', '/api/v1/draft_registrations/'], 'get', registries_views.draft_registrations, json_renderer)]) process_rules(app, [Rule('/citations/styles/', 'get', citation_views.list_citation_styles, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/<addon>/settings/disable/', '/project/<pid>/node/<nid>/<addon>/settings/disable/'], 'post', addon_views.disable_addon, json_renderer), Rule('/profile/<uid>/<addon>/settings/', 'get', addon_views.get_addon_user_config, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/oauth/connect/<service_name>/', 'get', oauth_views.oauth_connect, json_renderer), Rule('/oauth/callback/<service_name>/', 'get', oauth_views.oauth_callback, OsfWebRenderer('util/oauth_complete.mako', trust=False))]) process_rules(app, [Rule(['/oauth/accounts/<external_account_id>/'], 'delete', oauth_views.oauth_disconnect, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/confirmed_emails/', 'put', auth_views.unconfirmed_email_add, json_renderer), Rule('/confirmed_emails/', 'delete', auth_views.unconfirmed_email_remove, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/comments/timestamps/', '/project/<pid>/node/<nid>/comments/timestamps/'], 'put', project_views.comment.update_comments_timestamp, json_renderer), Rule(['/project/<pid>/citation/', '/project/<pid>/node/<nid>/citation/'], 'get', citation_views.node_citation, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/forms/signin/', 'get', website_views.signin_form, json_renderer), Rule('/forms/forgot_password/', 'get', website_views.forgot_password_form, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/activity/', '/explore/activity/', '/explore/'], 'get', discovery_views.redirect_activity_to_search, notemplate)]) process_rules(app, [Rule('/confirm/<uid>/<token>/', 'get', auth_views.confirm_email_get, notemplate), Rule('/confirm/external/<uid>/<token>/', 'get', auth_views.external_login_confirm_email_get, notemplate), Rule('/resetpassword/<uid>/<token>/', 'get', auth_views.reset_password_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword/<uid>/<token>/', 'post', auth_views.reset_password_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'get', auth_views.reset_password_institution_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'post', auth_views.reset_password_institution_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resend/', 'get', auth_views.resend_confirmation_get, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/resend/', 'post', auth_views.resend_confirmation_post, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'get', auth_views.external_login_email_get, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'post', auth_views.external_login_email_post, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/register/', 'get', auth_views.auth_register, OsfWebRenderer('public/register.mako', trust=False)), Rule(['/login/', '/account/'], 'get', auth_views.auth_login, notemplate), Rule('/api/v1/register/', 'post', auth_views.register_user, json_renderer), Rule('/logout/', 'get', auth_views.auth_logout, notemplate), Rule('/forgotpassword/', 'get', auth_views.forgot_password_get, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword/', 'post', auth_views.forgot_password_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword-institution/', 'get', auth_views.redirect_unsupported_institution, notemplate), Rule('/forgotpassword-institution/', 'post', auth_views.forgot_password_institution_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/login/connected_tools/', 'get', landing_page_views.connected_tools, notemplate), Rule('/login/enriched_profile/', 'get', landing_page_views.enriched_profile, notemplate)]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view, OsfWebRenderer('profile.mako', trust=False)), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id, OsfWebRenderer('profile.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/'], ['get', 'post'], project_views.contributor.claim_user_form, OsfWebRenderer('claim_account.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/verify/<token>/'], ['get', 'post'], project_views.contributor.claim_user_registered, OsfWebRenderer('claim_account_registered.mako', trust=False)), Rule('/settings/', 'get', profile_views.user_profile, OsfWebRenderer('profile/settings.mako', trust=False)), Rule(['/project/<pid>/addons/', '/project/<pid>/node/<nid>/addons/'], 'get', project_views.node.node_addons, OsfWebRenderer('project/addons.mako', trust=False)), Rule('/settings/account/', 'get', profile_views.user_account, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/account/password', 'post', profile_views.user_account_password, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/addons/', 'get', profile_views.user_addons, OsfWebRenderer('profile/addons.mako', trust=False)), Rule('/settings/notifications/', 'get', profile_views.user_notifications, OsfWebRenderer('profile/notifications.mako', trust=False)), Rule('/settings/applications/', 'get', profile_views.oauth_application_list, OsfWebRenderer('profile/oauth_app_list.mako', trust=False)), Rule('/settings/applications/create/', 'get', profile_views.oauth_application_register, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/applications/<client_id>/', 'get', profile_views.oauth_application_detail, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/tokens/', 'get', profile_views.personal_access_token_list, OsfWebRenderer('profile/personal_tokens_list.mako', trust=False)), Rule('/settings/tokens/create/', 'get', profile_views.personal_access_token_register, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)), Rule('/settings/tokens/<_id>/', 'get', profile_views.personal_access_token_detail, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False))]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view_json, json_renderer), Rule('/profile/', 'put', profile_views.update_user, json_renderer), Rule('/resend/', 'put', profile_views.resend_confirmation, json_renderer), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id_json, json_renderer), Rule('/user/<uid>/<pid>/claim/email/', 'post', project_views.contributor.claim_user_post, json_renderer), Rule('/profile/export/', 'post', profile_views.request_export, json_renderer), Rule('/profile/region/', 'put', osfstorage_views.update_region, json_renderer), Rule('/profile/deactivate/', 'post', profile_views.request_deactivation, json_renderer), Rule('/profile/cancel_request_deactivation/', 'post', profile_views.cancel_request_deactivation, json_renderer), Rule('/profile/logins/', 'patch', profile_views.delete_external_identity, json_renderer), Rule('/settings/names/', 'get', profile_views.serialize_names, json_renderer), Rule('/settings/names/', 'put', profile_views.unserialize_names, json_renderer), Rule('/settings/names/impute/', 'get', profile_views.impute_names, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'get', profile_views.serialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'get', profile_views.serialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'get', profile_views.serialize_schools, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'put', profile_views.unserialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'put', profile_views.unserialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'put', profile_views.unserialize_schools, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/search/', 'get', search_views.search_view, OsfWebRenderer('search.mako', trust=False)), Rule('/share/registration/', 'get', {'register': settings.SHARE_REGISTRATION_URL}, json_renderer), Rule('/api/v1/user/search/', 'get', search_views.search_contributor, json_renderer), Rule('/api/v1/search/node/', 'post', project_views.node.search_node, json_renderer)]) process_rules(app, [Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer), Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer), Rule('/share/search/', 'get', website_views.legacy_share_v1_search, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/institutions/<inst_id>/', 'get', institution_views.view_institution, OsfWebRenderer('institution.mako', trust=False))]) process_rules(app, [Rule(['/institutions/<inst_id>/dashboard/'], 'get', institution_views.view_institution_dashboard, notemplate)]) process_rules(app, [Rule('/', 'get', website_views.index, OsfWebRenderer('institution.mako', trust=False)), Rule('/goodbye/', 'get', goodbye, notemplate), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/token_action/<pid>/'], 'get', project_views.node.token_action, notemplate), Rule('/project/<pid>/newnode/', 'post', project_views.node.project_new_node, notemplate), Rule('/project/new/<pid>/beforeTemplate/', 'get', project_views.node.project_before_template, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'get', project_views.node.node_contributors, OsfWebRenderer('project/contributors.mako', trust=False)), Rule(['/project/<pid>/settings/', '/project/<pid>/node/<nid>/settings/'], 'get', project_views.node.node_setting, OsfWebRenderer('project/settings.mako', trust=False)), Rule(['/project/<pid>/permissions/<permissions>/', '/project/<pid>/node/<nid>/permissions/<permissions>/'], 'post', project_views.node.project_set_privacy, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/project/<pid>/forks/', '/project/<pid>/node/<nid>/forks/'], 'get', project_views.node.node_forks, notemplate), Rule(['/project/<pid>/register/', '/project/<pid>/node/<nid>/register/'], 'get', project_views.register.node_register_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/register/<metaschema_id>/', '/project/<pid>/node/<nid>/register/<metaschema_id>/'], 'get', project_views.register.node_register_template_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'get', project_views.node.node_registrations, notemplate), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'post', project_views.drafts.new_draft_registration, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/', '/project/<pid>/node/<nid>/drafts/<draft_id>/'], 'get', project_views.drafts.edit_draft_registration_page, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/register/', '/project/<pid>/node/<nid>/drafts/<draft_id>/register/'], 'get', project_views.drafts.draft_before_register_page, OsfWebRenderer('project/register_draft.mako', trust=False)), Rule(['/project/<pid>/retraction/', '/project/<pid>/node/<nid>/retraction/'], 'get', project_views.register.node_registration_retraction_redirect, notemplate), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'get', project_views.register.node_registration_retraction_get, OsfWebRenderer('project/retract_registration.mako', trust=False)), Rule('/ids/<category>/<path:value>/', 'get', project_views.register.get_referent_by_identifier, notemplate), Rule(['/project/<pid>/analytics/', '/project/<pid>/node/<nid>/analytics/'], 'get', project_views.node.project_statistics, notemplate), Rule(['/project/<pid>/files/', '/project/<pid>/node/<nid>/files/'], 'get', project_views.file.collect_file_trees, OsfWebRenderer('project/files.mako', trust=False), view_kwargs={'mode': 'page'}), Rule(['/<guid>/files/<provider>/<path:path>/', '/project/<pid>/files/<provider>/<path:path>/', '/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule('/download/<fid_or_guid>/', 'get', addon_views.persistent_file_download, json_renderer), Rule(['/api/v1/<guid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, json_renderer), Rule(['/project/<pid>/files/deleted/<trashed_id>/', '/project/<pid>/node/<nid>/files/deleted/<trashed_id>/'], 'get', addon_views.addon_deleted_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/project/<pid>/<provider>/files/<path:path>/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/', '/project/<pid>/<provider>/files/<path:path>/download/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/download/', '/project/<pid>/osffiles/<fid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/download/', '/project/<pid>/osffiles/<fid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/', '/project/<pid>/osffiles/download/<fid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/', '/project/<pid>/files/<fid>/', '/project/<pid>/node/<nid>/files/<fid>/', '/project/<pid>/files/download/<fid>/', '/project/<pid>/node/<nid>/files/download/<fid>/', '/project/<pid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/files/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/<fid>/version/<vid>/', '/project/<pid>/files/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/api/v1/project/<pid>/osffiles/<fid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/', '/api/v1/project/<pid>/files/download/<fid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/', '/api/v1/project/<pid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/files/download/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, json_renderer), Rule(['/quickfiles/<fid>/'], 'get', addon_views.addon_view_or_download_quickfile, json_renderer)]) process_rules(app, [Rule('/email/meeting/', 'post', conference_views.meeting_hook, json_renderer), Rule('/mailchimp/hooks/', 'get', profile_views.mailchimp_get_endpoint, json_renderer), Rule('/mailchimp/hooks/', 'post', profile_views.sync_data_from_mailchimp, json_renderer), Rule('/project/new/', 'post', project_views.node.project_new_post, json_renderer), Rule(['/project/<pid>/contributors_abbrev/', '/project/<pid>/node/<nid>/contributors_abbrev/'], 'get', project_views.contributor.get_node_contributors_abbrev, json_renderer), Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'get', project_views.node.get_pointed, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'post', project_views.node.add_pointers, json_renderer), Rule(['/pointer/'], 'post', project_views.node.add_pointer, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>pointer/'], 'delete', project_views.node.remove_pointer, json_renderer), Rule(['/project/<pid>/drafts/'], 'get', project_views.drafts.get_draft_registrations, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'get', project_views.drafts.get_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'put', project_views.drafts.update_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'delete', project_views.drafts.delete_draft_registration, json_renderer), Rule(['/project/drafts/schemas/'], 'get', project_views.drafts.get_metaschemas, json_renderer), Rule(['/project/<pid>/get_contributors/', '/project/<pid>/node/<nid>/get_contributors/'], 'get', project_views.contributor.get_contributors, json_renderer), Rule(['/project/<pid>/get_contributors_from_parent/', '/project/<pid>/node/<nid>/get_contributors_from_parent/'], 'get', project_views.contributor.get_contributors_from_parent, json_renderer), Rule(['/project/<pid>/contributors/manage/', '/project/<pid>/node/<nid>/contributors/manage/'], 'POST', project_views.contributor.project_manage_contributors, json_renderer), Rule(['/project/<pid>/contributor/remove/', '/project/<pid>/node/<nid>/contributor/remove/'], 'POST', project_views.contributor.project_remove_contributor, json_renderer), Rule(['/project/<pid>/get_editable_children/', '/project/<pid>/node/<nid>/get_editable_children/'], 'get', project_views.node.get_editable_children, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'post', project_views.node.project_generate_private_link_post, json_renderer), Rule(['/project/<pid>/private_link/edit/', '/project/<pid>/node/<nid>/private_link/edit/'], 'put', project_views.node.project_private_link_edit, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'delete', project_views.node.remove_private_link, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'get', project_views.node.private_link_table, json_renderer), Rule(['/project/new/<nid>/'], 'post', project_views.node.project_new_from_template, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'put', project_views.node.update_node, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'delete', project_views.node.component_remove, json_renderer), Rule('/project/<pid>/reorder_components/', 'post', project_views.node.project_reorder_components, json_renderer), Rule(['/project/<pid>/edit/', '/project/<pid>/node/<nid>/edit/'], 'post', project_views.node.edit_node, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'post', project_views.tag.project_add_tag, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'delete', project_views.tag.project_remove_tag, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'post', project_views.contributor.project_contributors_post, json_renderer), Rule(['/project/<pid>/fork/before/', '/project/<pid>/node/<nid>/fork/before/'], 'get', project_views.node.project_before_fork, json_renderer), Rule(['/project/<pid>/pointer/fork/', '/project/<pid>/node/<nid>/pointer/fork/'], 'post', project_views.node.fork_pointer, json_renderer), Rule(['/project/<pid>/beforeregister/', '/project/<pid>/node/<nid>/beforeregister'], 'get', project_views.register.project_before_register, json_renderer), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'post', project_views.register.node_registration_retraction_post, json_renderer), Rule(['/project/<pid>/identifiers/', '/project/<pid>/node/<nid>/identifiers/'], 'post', identifier_views.node_identifiers_post, json_renderer), Rule(['/project/<pid>/files/grid/', '/project/<pid>/node/<nid>/files/grid/'], 'get', project_views.file.grid_data, json_renderer), Rule('/files/auth/', 'get', addon_views.get_auth, json_renderer), Rule(['/project/<pid>/waterbutler/logs/', '/project/<pid>/node/<nid>/waterbutler/logs/'], 'put', addon_views.create_waterbutler_log, json_renderer), Rule(['/registration/<pid>/callbacks/'], 'put', project_views.register.registration_callbacks, json_renderer), Rule('/settings/addons/', 'post', profile_views.user_choose_addons, json_renderer), Rule('/settings/notifications/', 'get', profile_views.user_notifications, json_renderer), Rule('/settings/notifications/', 'post', profile_views.user_choose_mailing_lists, json_renderer), Rule('/subscriptions/', 'get', notification_views.get_subscriptions, json_renderer), Rule(['/project/<pid>/subscriptions/', '/project/<pid>/node/<nid>/subscriptions/'], 'get', notification_views.get_node_subscriptions, json_renderer), Rule(['/project/<pid>/tree/', '/project/<pid>/node/<nid>/tree/'], 'get', project_views.node.get_node_tree, json_renderer), Rule('/subscriptions/', 'post', notification_views.configure_subscription, json_renderer), Rule(['/project/<pid>/settings/addons/', '/project/<pid>/node/<nid>/settings/addons/'], 'post', project_views.node.node_choose_addons, json_renderer), Rule(['/project/<pid>/settings/comments/', '/project/<pid>/node/<nid>/settings/comments/'], 'post', project_views.node.configure_comments, json_renderer), Rule(['/project/<pid>/settings/requests/', '/project/<pid>/node/<nid>/settings/requests/'], 'post', project_views.node.configure_requests, json_renderer), Rule(['/project/<pid>/invite_contributor/', '/project/<pid>/node/<nid>/invite_contributor/'], 'post', project_views.contributor.invite_contributor_post, json_renderer)], prefix='/api/v1') addon_base_path = os.path.abspath('addons') provider_static_path = os.path.abspath('assets') if settings.DEV_MODE: @app.route('/static/addons/<addon>/<path:filename>') def addon_static(addon, filename): addon_path = os.path.join(addon_base_path, addon, 'static') return send_from_directory(addon_path, filename) @app.route('/assets/<filename>') def provider_static(filename): return send_from_directory(provider_static_path, filename) @app.route('/ember-cli-live-reload.js') def ember_cli_live_reload(): req = requests.get('{}/ember-cli-live-reload.js'.format(settings.LIVE_RELOAD_DOMAIN), stream=True) return Response(stream_with_context(req.iter_content()), content_type=req.headers['content-type'])
def make_url_map(app): 'Set up all the routes for the OSF app.\n\n :param app: A Flask/Werkzeug app to bind the rules to.\n ' process_rules(app, [Rule('/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), OsfWebRenderer(, render_mako_string, trust=False)), Rule('/api/v1/<path:_>', ['get', 'post'], HTTPError(http_status.HTTP_404_NOT_FOUND), json_renderer)]) process_rules(app, [Rule(['/<guid>/', '/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, notemplate), Rule(['/api/v1/<guid>/', '/api/v1/<guid>/<path:suffix>'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid, json_renderer)]) process_rules(app, [Rule('/favicon.ico', 'get', favicon, json_renderer), Rule('/robots.txt', 'get', robots, json_renderer), Rule('/sitemaps/<path>', 'get', sitemap_file, json_renderer)]) if settings.USE_EXTERNAL_EMBER: for prefix in EXTERNAL_EMBER_APPS.keys(): process_rules(app, [Rule(['/<provider>/<guid>/download', '/<provider>/<guid>/download/'], ['get', 'post', 'put', 'patch', 'delete'], website_views.resolve_guid_download, notemplate, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_app, json_renderer, endpoint_suffix=('__' + prefix))], prefix=('/' + prefix)) if EXTERNAL_EMBER_APPS.get('ember_osf_web'): process_rules(app, [Rule(ember_osf_web_views.routes, 'get', ember_osf_web_views.use_ember_app, notemplate)]) if ('routes' in EXTERNAL_EMBER_APPS['ember_osf_web']): for route in EXTERNAL_EMBER_APPS['ember_osf_web']['routes']: process_rules(app, [Rule(['/', '/<path:path>'], 'get', ember_osf_web_views.use_ember_app, notemplate, endpoint_suffix=('__' + route))], prefix=('/' + route)) process_rules(app, [Rule('/dashboard/', 'get', website_views.dashboard, notemplate), Rule('/myprojects/', 'get', website_views.my_projects, OsfWebRenderer('my_projects.mako', trust=False)), Rule('/reproducibility/', 'get', website_views.reproducibility, notemplate), Rule('/about/', 'get', website_views.redirect_about, notemplate), Rule('/help/', 'get', website_views.redirect_help, notemplate), Rule('/faq/', 'get', website_views.redirect_faq, notemplate), Rule(['/getting-started/', '/getting-started/email/', '/howosfworks/'], 'get', website_views.redirect_getting_started, notemplate), Rule(['/messages/'], 'get', {}, OsfWebRenderer('public/comingsoon.mako', trust=False)), Rule('/meetings/<meeting>/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting.mako', trust=False)), Rule('/view/<meeting>/', 'get', conference_views.redirect_to_conference_results, notemplate), Rule('/view/<meeting>/plain/', 'get', conference_views.conference_results, OsfWebRenderer('public/pages/meeting_plain.mako', trust=False), endpoint_suffix='__plain'), Rule('/api/v1/view/<meeting>/', 'get', conference_views.conference_data, json_renderer), Rule('/meetings/', 'get', conference_views.conference_view, OsfWebRenderer('public/pages/meeting_landing.mako', trust=False)), Rule('/api/v1/meetings/submissions/', 'get', conference_views.conference_submissions, json_renderer), Rule('/presentations/', 'get', conference_views.redirect_to_meetings, json_renderer), Rule('/news/', 'get', website_views.redirect_to_cos_news, notemplate), Rule(['/rr/', '/registeredreports/', '/registeredreport/'], 'get', registries_views.registered_reports_landing, OsfWebRenderer('registered_reports_landing.mako', trust=False)), Rule('/erpc/', 'get', closed_challenges_views.erpc_landing_page, OsfWebRenderer('erpc_landing_page.mako', trust=False)), Rule('/prereg/', 'get', prereg.prereg_landing_page, OsfWebRenderer('prereg_landing_page.mako', trust=False)), Rule('/preprints/', 'get', preprint_views.preprint_landing_page, OsfWebRenderer('public/pages/preprint_landing.mako', trust=False)), Rule('/registries/', 'get', registries_views.registries_landing_page, OsfWebRenderer('public/pages/registries_landing.mako', trust=False)), Rule('/reviews/', 'get', reviews_views.reviews_landing_page, OsfWebRenderer('public/pages/reviews_landing.mako', trust=False)), Rule('/preprint/', 'get', preprint_views.preprint_redirect, notemplate), Rule(['/api/v1/<campaign>/draft_registrations/', '/api/v1/draft_registrations/'], 'get', registries_views.draft_registrations, json_renderer)]) process_rules(app, [Rule('/citations/styles/', 'get', citation_views.list_citation_styles, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/<addon>/settings/disable/', '/project/<pid>/node/<nid>/<addon>/settings/disable/'], 'post', addon_views.disable_addon, json_renderer), Rule('/profile/<uid>/<addon>/settings/', 'get', addon_views.get_addon_user_config, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/oauth/connect/<service_name>/', 'get', oauth_views.oauth_connect, json_renderer), Rule('/oauth/callback/<service_name>/', 'get', oauth_views.oauth_callback, OsfWebRenderer('util/oauth_complete.mako', trust=False))]) process_rules(app, [Rule(['/oauth/accounts/<external_account_id>/'], 'delete', oauth_views.oauth_disconnect, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/confirmed_emails/', 'put', auth_views.unconfirmed_email_add, json_renderer), Rule('/confirmed_emails/', 'delete', auth_views.unconfirmed_email_remove, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/project/<pid>/comments/timestamps/', '/project/<pid>/node/<nid>/comments/timestamps/'], 'put', project_views.comment.update_comments_timestamp, json_renderer), Rule(['/project/<pid>/citation/', '/project/<pid>/node/<nid>/citation/'], 'get', citation_views.node_citation, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/forms/signin/', 'get', website_views.signin_form, json_renderer), Rule('/forms/forgot_password/', 'get', website_views.forgot_password_form, json_renderer)], prefix='/api/v1') process_rules(app, [Rule(['/activity/', '/explore/activity/', '/explore/'], 'get', discovery_views.redirect_activity_to_search, notemplate)]) process_rules(app, [Rule('/confirm/<uid>/<token>/', 'get', auth_views.confirm_email_get, notemplate), Rule('/confirm/external/<uid>/<token>/', 'get', auth_views.external_login_confirm_email_get, notemplate), Rule('/resetpassword/<uid>/<token>/', 'get', auth_views.reset_password_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword/<uid>/<token>/', 'post', auth_views.reset_password_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'get', auth_views.reset_password_institution_get, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resetpassword-institution/<uid>/<token>/', 'post', auth_views.reset_password_institution_post, OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)), Rule('/resend/', 'get', auth_views.resend_confirmation_get, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/resend/', 'post', auth_views.resend_confirmation_post, OsfWebRenderer('resend.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'get', auth_views.external_login_email_get, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/external-login/email', 'post', auth_views.external_login_email_post, OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)), Rule('/register/', 'get', auth_views.auth_register, OsfWebRenderer('public/register.mako', trust=False)), Rule(['/login/', '/account/'], 'get', auth_views.auth_login, notemplate), Rule('/api/v1/register/', 'post', auth_views.register_user, json_renderer), Rule('/logout/', 'get', auth_views.auth_logout, notemplate), Rule('/forgotpassword/', 'get', auth_views.forgot_password_get, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword/', 'post', auth_views.forgot_password_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/forgotpassword-institution/', 'get', auth_views.redirect_unsupported_institution, notemplate), Rule('/forgotpassword-institution/', 'post', auth_views.forgot_password_institution_post, OsfWebRenderer('public/forgot_password.mako', trust=False)), Rule('/login/connected_tools/', 'get', landing_page_views.connected_tools, notemplate), Rule('/login/enriched_profile/', 'get', landing_page_views.enriched_profile, notemplate)]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view, OsfWebRenderer('profile.mako', trust=False)), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id, OsfWebRenderer('profile.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/'], ['get', 'post'], project_views.contributor.claim_user_form, OsfWebRenderer('claim_account.mako', trust=False)), Rule(['/user/<uid>/<pid>/claim/verify/<token>/'], ['get', 'post'], project_views.contributor.claim_user_registered, OsfWebRenderer('claim_account_registered.mako', trust=False)), Rule('/settings/', 'get', profile_views.user_profile, OsfWebRenderer('profile/settings.mako', trust=False)), Rule(['/project/<pid>/addons/', '/project/<pid>/node/<nid>/addons/'], 'get', project_views.node.node_addons, OsfWebRenderer('project/addons.mako', trust=False)), Rule('/settings/account/', 'get', profile_views.user_account, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/account/password', 'post', profile_views.user_account_password, OsfWebRenderer('profile/account.mako', trust=False)), Rule('/settings/addons/', 'get', profile_views.user_addons, OsfWebRenderer('profile/addons.mako', trust=False)), Rule('/settings/notifications/', 'get', profile_views.user_notifications, OsfWebRenderer('profile/notifications.mako', trust=False)), Rule('/settings/applications/', 'get', profile_views.oauth_application_list, OsfWebRenderer('profile/oauth_app_list.mako', trust=False)), Rule('/settings/applications/create/', 'get', profile_views.oauth_application_register, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/applications/<client_id>/', 'get', profile_views.oauth_application_detail, OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)), Rule('/settings/tokens/', 'get', profile_views.personal_access_token_list, OsfWebRenderer('profile/personal_tokens_list.mako', trust=False)), Rule('/settings/tokens/create/', 'get', profile_views.personal_access_token_register, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)), Rule('/settings/tokens/<_id>/', 'get', profile_views.personal_access_token_detail, OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False))]) process_rules(app, [Rule('/profile/', 'get', profile_views.profile_view_json, json_renderer), Rule('/profile/', 'put', profile_views.update_user, json_renderer), Rule('/resend/', 'put', profile_views.resend_confirmation, json_renderer), Rule('/profile/<uid>/', 'get', profile_views.profile_view_id_json, json_renderer), Rule('/user/<uid>/<pid>/claim/email/', 'post', project_views.contributor.claim_user_post, json_renderer), Rule('/profile/export/', 'post', profile_views.request_export, json_renderer), Rule('/profile/region/', 'put', osfstorage_views.update_region, json_renderer), Rule('/profile/deactivate/', 'post', profile_views.request_deactivation, json_renderer), Rule('/profile/cancel_request_deactivation/', 'post', profile_views.cancel_request_deactivation, json_renderer), Rule('/profile/logins/', 'patch', profile_views.delete_external_identity, json_renderer), Rule('/settings/names/', 'get', profile_views.serialize_names, json_renderer), Rule('/settings/names/', 'put', profile_views.unserialize_names, json_renderer), Rule('/settings/names/impute/', 'get', profile_views.impute_names, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'get', profile_views.serialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'get', profile_views.serialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'get', profile_views.serialize_schools, json_renderer), Rule(['/settings/social/', '/settings/social/<uid>/'], 'put', profile_views.unserialize_social, json_renderer), Rule(['/settings/jobs/', '/settings/jobs/<uid>/'], 'put', profile_views.unserialize_jobs, json_renderer), Rule(['/settings/schools/', '/settings/schools/<uid>/'], 'put', profile_views.unserialize_schools, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/search/', 'get', search_views.search_view, OsfWebRenderer('search.mako', trust=False)), Rule('/share/registration/', 'get', {'register': settings.SHARE_REGISTRATION_URL}, json_renderer), Rule('/api/v1/user/search/', 'get', search_views.search_contributor, json_renderer), Rule('/api/v1/search/node/', 'post', project_views.node.search_node, json_renderer)]) process_rules(app, [Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer), Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer), Rule('/share/search/', 'get', website_views.legacy_share_v1_search, json_renderer)], prefix='/api/v1') process_rules(app, [Rule('/institutions/<inst_id>/', 'get', institution_views.view_institution, OsfWebRenderer('institution.mako', trust=False))]) process_rules(app, [Rule(['/institutions/<inst_id>/dashboard/'], 'get', institution_views.view_institution_dashboard, notemplate)]) process_rules(app, [Rule('/', 'get', website_views.index, OsfWebRenderer('institution.mako', trust=False)), Rule('/goodbye/', 'get', goodbye, notemplate), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/token_action/<pid>/'], 'get', project_views.node.token_action, notemplate), Rule('/project/<pid>/newnode/', 'post', project_views.node.project_new_node, notemplate), Rule('/project/new/<pid>/beforeTemplate/', 'get', project_views.node.project_before_template, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'get', project_views.node.node_contributors, OsfWebRenderer('project/contributors.mako', trust=False)), Rule(['/project/<pid>/settings/', '/project/<pid>/node/<nid>/settings/'], 'get', project_views.node.node_setting, OsfWebRenderer('project/settings.mako', trust=False)), Rule(['/project/<pid>/permissions/<permissions>/', '/project/<pid>/node/<nid>/permissions/<permissions>/'], 'post', project_views.node.project_set_privacy, OsfWebRenderer('project/project.mako', trust=False)), Rule(['/project/<pid>/forks/', '/project/<pid>/node/<nid>/forks/'], 'get', project_views.node.node_forks, notemplate), Rule(['/project/<pid>/register/', '/project/<pid>/node/<nid>/register/'], 'get', project_views.register.node_register_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/register/<metaschema_id>/', '/project/<pid>/node/<nid>/register/<metaschema_id>/'], 'get', project_views.register.node_register_template_page, OsfWebRenderer('project/register.mako', trust=False)), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'get', project_views.node.node_registrations, notemplate), Rule(['/project/<pid>/registrations/', '/project/<pid>/node/<nid>/registrations/'], 'post', project_views.drafts.new_draft_registration, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/', '/project/<pid>/node/<nid>/drafts/<draft_id>/'], 'get', project_views.drafts.edit_draft_registration_page, OsfWebRenderer('project/edit_draft_registration.mako', trust=False)), Rule(['/project/<pid>/drafts/<draft_id>/register/', '/project/<pid>/node/<nid>/drafts/<draft_id>/register/'], 'get', project_views.drafts.draft_before_register_page, OsfWebRenderer('project/register_draft.mako', trust=False)), Rule(['/project/<pid>/retraction/', '/project/<pid>/node/<nid>/retraction/'], 'get', project_views.register.node_registration_retraction_redirect, notemplate), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'get', project_views.register.node_registration_retraction_get, OsfWebRenderer('project/retract_registration.mako', trust=False)), Rule('/ids/<category>/<path:value>/', 'get', project_views.register.get_referent_by_identifier, notemplate), Rule(['/project/<pid>/analytics/', '/project/<pid>/node/<nid>/analytics/'], 'get', project_views.node.project_statistics, notemplate), Rule(['/project/<pid>/files/', '/project/<pid>/node/<nid>/files/'], 'get', project_views.file.collect_file_trees, OsfWebRenderer('project/files.mako', trust=False), view_kwargs={'mode': 'page'}), Rule(['/<guid>/files/<provider>/<path:path>/', '/project/<pid>/files/<provider>/<path:path>/', '/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule('/download/<fid_or_guid>/', 'get', addon_views.persistent_file_download, json_renderer), Rule(['/api/v1/<guid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/files/<provider>/<path:path>/', '/api/v1/project/<pid>/node/<nid>/files/<provider>/<path:path>/'], 'get', addon_views.addon_view_or_download_file, json_renderer), Rule(['/project/<pid>/files/deleted/<trashed_id>/', '/project/<pid>/node/<nid>/files/deleted/<trashed_id>/'], 'get', addon_views.addon_deleted_file, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/project/<pid>/<provider>/files/<path:path>/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/', '/project/<pid>/<provider>/files/<path:path>/download/', '/project/<pid>/node/<nid>/<provider>/files/<path:path>/download/', '/project/<pid>/osffiles/<fid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/download/', '/project/<pid>/osffiles/<fid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/', '/project/<pid>/osffiles/download/<fid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/', '/project/<pid>/files/<fid>/', '/project/<pid>/node/<nid>/files/<fid>/', '/project/<pid>/files/download/<fid>/', '/project/<pid>/node/<nid>/files/download/<fid>/', '/project/<pid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/', '/project/<pid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/project/<pid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/', '/project/<pid>/files/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/<fid>/version/<vid>/', '/project/<pid>/files/download/<fid>/version/<vid>/', '/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, OsfWebRenderer('project/view_file.mako', trust=False)), Rule(['/api/v1/project/<pid>/osffiles/<fid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/', '/api/v1/project/<pid>/files/download/<fid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/', '/api/v1/project/<pid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/', '/api/v1/project/<pid>/files/download/<fid>/version/<vid>/', '/api/v1/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/'], 'get', addon_views.addon_view_or_download_file_legacy, json_renderer), Rule(['/quickfiles/<fid>/'], 'get', addon_views.addon_view_or_download_quickfile, json_renderer)]) process_rules(app, [Rule('/email/meeting/', 'post', conference_views.meeting_hook, json_renderer), Rule('/mailchimp/hooks/', 'get', profile_views.mailchimp_get_endpoint, json_renderer), Rule('/mailchimp/hooks/', 'post', profile_views.sync_data_from_mailchimp, json_renderer), Rule('/project/new/', 'post', project_views.node.project_new_post, json_renderer), Rule(['/project/<pid>/contributors_abbrev/', '/project/<pid>/node/<nid>/contributors_abbrev/'], 'get', project_views.contributor.get_node_contributors_abbrev, json_renderer), Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'get', project_views.node.view_project, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'get', project_views.node.get_pointed, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>/pointer/'], 'post', project_views.node.add_pointers, json_renderer), Rule(['/pointer/'], 'post', project_views.node.add_pointer, json_renderer), Rule(['/project/<pid>/pointer/', '/project/<pid>/node/<nid>pointer/'], 'delete', project_views.node.remove_pointer, json_renderer), Rule(['/project/<pid>/drafts/'], 'get', project_views.drafts.get_draft_registrations, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'get', project_views.drafts.get_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'put', project_views.drafts.update_draft_registration, json_renderer), Rule(['/project/<pid>/drafts/<draft_id>/'], 'delete', project_views.drafts.delete_draft_registration, json_renderer), Rule(['/project/drafts/schemas/'], 'get', project_views.drafts.get_metaschemas, json_renderer), Rule(['/project/<pid>/get_contributors/', '/project/<pid>/node/<nid>/get_contributors/'], 'get', project_views.contributor.get_contributors, json_renderer), Rule(['/project/<pid>/get_contributors_from_parent/', '/project/<pid>/node/<nid>/get_contributors_from_parent/'], 'get', project_views.contributor.get_contributors_from_parent, json_renderer), Rule(['/project/<pid>/contributors/manage/', '/project/<pid>/node/<nid>/contributors/manage/'], 'POST', project_views.contributor.project_manage_contributors, json_renderer), Rule(['/project/<pid>/contributor/remove/', '/project/<pid>/node/<nid>/contributor/remove/'], 'POST', project_views.contributor.project_remove_contributor, json_renderer), Rule(['/project/<pid>/get_editable_children/', '/project/<pid>/node/<nid>/get_editable_children/'], 'get', project_views.node.get_editable_children, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'post', project_views.node.project_generate_private_link_post, json_renderer), Rule(['/project/<pid>/private_link/edit/', '/project/<pid>/node/<nid>/private_link/edit/'], 'put', project_views.node.project_private_link_edit, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'delete', project_views.node.remove_private_link, json_renderer), Rule(['/project/<pid>/private_link/', '/project/<pid>/node/<nid>/private_link/'], 'get', project_views.node.private_link_table, json_renderer), Rule(['/project/new/<nid>/'], 'post', project_views.node.project_new_from_template, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'put', project_views.node.update_node, json_renderer), Rule(['/project/<pid>/', '/project/<pid>/node/<nid>/'], 'delete', project_views.node.component_remove, json_renderer), Rule('/project/<pid>/reorder_components/', 'post', project_views.node.project_reorder_components, json_renderer), Rule(['/project/<pid>/edit/', '/project/<pid>/node/<nid>/edit/'], 'post', project_views.node.edit_node, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'post', project_views.tag.project_add_tag, json_renderer), Rule(['/project/<pid>/tags/', '/project/<pid>/node/<nid>/tags/', '/project/<pid>/tags/<tag>/', '/project/<pid>/node/<nid>/tags/<tag>/'], 'delete', project_views.tag.project_remove_tag, json_renderer), Rule(['/project/<pid>/contributors/', '/project/<pid>/node/<nid>/contributors/'], 'post', project_views.contributor.project_contributors_post, json_renderer), Rule(['/project/<pid>/fork/before/', '/project/<pid>/node/<nid>/fork/before/'], 'get', project_views.node.project_before_fork, json_renderer), Rule(['/project/<pid>/pointer/fork/', '/project/<pid>/node/<nid>/pointer/fork/'], 'post', project_views.node.fork_pointer, json_renderer), Rule(['/project/<pid>/beforeregister/', '/project/<pid>/node/<nid>/beforeregister'], 'get', project_views.register.project_before_register, json_renderer), Rule(['/project/<pid>/withdraw/', '/project/<pid>/node/<nid>/withdraw/'], 'post', project_views.register.node_registration_retraction_post, json_renderer), Rule(['/project/<pid>/identifiers/', '/project/<pid>/node/<nid>/identifiers/'], 'post', identifier_views.node_identifiers_post, json_renderer), Rule(['/project/<pid>/files/grid/', '/project/<pid>/node/<nid>/files/grid/'], 'get', project_views.file.grid_data, json_renderer), Rule('/files/auth/', 'get', addon_views.get_auth, json_renderer), Rule(['/project/<pid>/waterbutler/logs/', '/project/<pid>/node/<nid>/waterbutler/logs/'], 'put', addon_views.create_waterbutler_log, json_renderer), Rule(['/registration/<pid>/callbacks/'], 'put', project_views.register.registration_callbacks, json_renderer), Rule('/settings/addons/', 'post', profile_views.user_choose_addons, json_renderer), Rule('/settings/notifications/', 'get', profile_views.user_notifications, json_renderer), Rule('/settings/notifications/', 'post', profile_views.user_choose_mailing_lists, json_renderer), Rule('/subscriptions/', 'get', notification_views.get_subscriptions, json_renderer), Rule(['/project/<pid>/subscriptions/', '/project/<pid>/node/<nid>/subscriptions/'], 'get', notification_views.get_node_subscriptions, json_renderer), Rule(['/project/<pid>/tree/', '/project/<pid>/node/<nid>/tree/'], 'get', project_views.node.get_node_tree, json_renderer), Rule('/subscriptions/', 'post', notification_views.configure_subscription, json_renderer), Rule(['/project/<pid>/settings/addons/', '/project/<pid>/node/<nid>/settings/addons/'], 'post', project_views.node.node_choose_addons, json_renderer), Rule(['/project/<pid>/settings/comments/', '/project/<pid>/node/<nid>/settings/comments/'], 'post', project_views.node.configure_comments, json_renderer), Rule(['/project/<pid>/settings/requests/', '/project/<pid>/node/<nid>/settings/requests/'], 'post', project_views.node.configure_requests, json_renderer), Rule(['/project/<pid>/invite_contributor/', '/project/<pid>/node/<nid>/invite_contributor/'], 'post', project_views.contributor.invite_contributor_post, json_renderer)], prefix='/api/v1') addon_base_path = os.path.abspath('addons') provider_static_path = os.path.abspath('assets') if settings.DEV_MODE: @app.route('/static/addons/<addon>/<path:filename>') def addon_static(addon, filename): addon_path = os.path.join(addon_base_path, addon, 'static') return send_from_directory(addon_path, filename) @app.route('/assets/<filename>') def provider_static(filename): return send_from_directory(provider_static_path, filename) @app.route('/ember-cli-live-reload.js') def ember_cli_live_reload(): req = requests.get('{}/ember-cli-live-reload.js'.format(settings.LIVE_RELOAD_DOMAIN), stream=True) return Response(stream_with_context(req.iter_content()), content_type=req.headers['content-type'])<|docstring|>Set up all the routes for the OSF app. :param app: A Flask/Werkzeug app to bind the rules to.<|endoftext|>
e1f1885d50d0fb7dcee2381642c41a56265dc0faf5a674328070bcc48612b5db
def __call__(self, data, *args, **kwargs): "\n This function has been added to keep our Flask requests compatible with django-waffle, it's been adapted from\n waffle's own middleware code at https://github.com/django-waffle/django-waffle/blob/master/waffle/middleware.py\n " resp = super(OsfWebRenderer, self).__call__(data, *args, **kwargs) max_age = get_setting('MAX_AGE') if hasattr(request, 'waffles'): for k in request.waffles: name = smart_str((get_setting('COOKIE') % k)) (active, rollout) = request.waffles[k] if (rollout and (not active)): age = None else: age = max_age resp.headers.add('Set-Cookie', dump_cookie(name.encode(), str(active), max_age=age, expires='True')) return resp
This function has been added to keep our Flask requests compatible with django-waffle, it's been adapted from waffle's own middleware code at https://github.com/django-waffle/django-waffle/blob/master/waffle/middleware.py
website/routes.py
__call__
felliott/osf.io
628
python
def __call__(self, data, *args, **kwargs): "\n This function has been added to keep our Flask requests compatible with django-waffle, it's been adapted from\n waffle's own middleware code at https://github.com/django-waffle/django-waffle/blob/master/waffle/middleware.py\n " resp = super(OsfWebRenderer, self).__call__(data, *args, **kwargs) max_age = get_setting('MAX_AGE') if hasattr(request, 'waffles'): for k in request.waffles: name = smart_str((get_setting('COOKIE') % k)) (active, rollout) = request.waffles[k] if (rollout and (not active)): age = None else: age = max_age resp.headers.add('Set-Cookie', dump_cookie(name.encode(), str(active), max_age=age, expires='True')) return resp
def __call__(self, data, *args, **kwargs): "\n This function has been added to keep our Flask requests compatible with django-waffle, it's been adapted from\n waffle's own middleware code at https://github.com/django-waffle/django-waffle/blob/master/waffle/middleware.py\n " resp = super(OsfWebRenderer, self).__call__(data, *args, **kwargs) max_age = get_setting('MAX_AGE') if hasattr(request, 'waffles'): for k in request.waffles: name = smart_str((get_setting('COOKIE') % k)) (active, rollout) = request.waffles[k] if (rollout and (not active)): age = None else: age = max_age resp.headers.add('Set-Cookie', dump_cookie(name.encode(), str(active), max_age=age, expires='True')) return resp<|docstring|>This function has been added to keep our Flask requests compatible with django-waffle, it's been adapted from waffle's own middleware code at https://github.com/django-waffle/django-waffle/blob/master/waffle/middleware.py<|endoftext|>
de68bea1f47838a47bf6a4485710d7780b1dcfb2bf956470ad2d9b521f106966
def discover_single_file_for_basin(data_dir: str, basin: str) -> str: "\n Discovers a single dataset file for the specified basin. Discovery will be performed using the pattern\n '{data_dir}/*{basin}*', i.e. the basin ID has to be present in any file name within the directory. Note, that\n basin id '123' e.g. will match the following file names: 123_streamflow.txt, 123.nc, 00123456_daymet_v4_daily_na.nc4,\n streamflow_123.csv. Be sure, that your file names are unique, otherwise only the first occurence will be returned.\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering a dataset file related to the specified basin\n basin: str\n ID of the basin\n\n Returns\n -------\n str\n Path of the file, which is related to the specified basin\n\n " files = glob.glob(f'{data_dir}/**/*{basin}*', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]
Discovers a single dataset file for the specified basin. Discovery will be performed using the pattern '{data_dir}/*{basin}*', i.e. the basin ID has to be present in any file name within the directory. Note, that basin id '123' e.g. will match the following file names: 123_streamflow.txt, 123.nc, 00123456_daymet_v4_daily_na.nc4, streamflow_123.csv. Be sure, that your file names are unique, otherwise only the first occurence will be returned. Parameters ---------- data_dir: str The data directory used for discovering a dataset file related to the specified basin basin: str ID of the basin Returns ------- str Path of the file, which is related to the specified basin
libs/ioutils.py
discover_single_file_for_basin
SebaDro/st-deep-hydro
0
python
def discover_single_file_for_basin(data_dir: str, basin: str) -> str: "\n Discovers a single dataset file for the specified basin. Discovery will be performed using the pattern\n '{data_dir}/*{basin}*', i.e. the basin ID has to be present in any file name within the directory. Note, that\n basin id '123' e.g. will match the following file names: 123_streamflow.txt, 123.nc, 00123456_daymet_v4_daily_na.nc4,\n streamflow_123.csv. Be sure, that your file names are unique, otherwise only the first occurence will be returned.\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering a dataset file related to the specified basin\n basin: str\n ID of the basin\n\n Returns\n -------\n str\n Path of the file, which is related to the specified basin\n\n " files = glob.glob(f'{data_dir}/**/*{basin}*', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]
def discover_single_file_for_basin(data_dir: str, basin: str) -> str: "\n Discovers a single dataset file for the specified basin. Discovery will be performed using the pattern\n '{data_dir}/*{basin}*', i.e. the basin ID has to be present in any file name within the directory. Note, that\n basin id '123' e.g. will match the following file names: 123_streamflow.txt, 123.nc, 00123456_daymet_v4_daily_na.nc4,\n streamflow_123.csv. Be sure, that your file names are unique, otherwise only the first occurence will be returned.\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering a dataset file related to the specified basin\n basin: str\n ID of the basin\n\n Returns\n -------\n str\n Path of the file, which is related to the specified basin\n\n " files = glob.glob(f'{data_dir}/**/*{basin}*', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]<|docstring|>Discovers a single dataset file for the specified basin. Discovery will be performed using the pattern '{data_dir}/*{basin}*', i.e. the basin ID has to be present in any file name within the directory. Note, that basin id '123' e.g. will match the following file names: 123_streamflow.txt, 123.nc, 00123456_daymet_v4_daily_na.nc4, streamflow_123.csv. Be sure, that your file names are unique, otherwise only the first occurence will be returned. Parameters ---------- data_dir: str The data directory used for discovering a dataset file related to the specified basin basin: str ID of the basin Returns ------- str Path of the file, which is related to the specified basin<|endoftext|>
5bae12ef5bde30ebf9f8b6ddeb34522423a6cebe5b6876c594b4f55353f38801
def discover_files_for_basins(data_dir: str, basins: list) -> dict: '\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering dataset files related to the specified basins\n basins: list\n List of basin IDs\n\n Returns\n -------\n dict\n Dict that holds the dataset path to each basin\n\n ' file_dict = {} for basin in basins: file = discover_single_file_for_basin(data_dir, basin) file_dict[basin] = file return file_dict
Parameters ---------- data_dir: str The data directory used for discovering dataset files related to the specified basins basins: list List of basin IDs Returns ------- dict Dict that holds the dataset path to each basin
libs/ioutils.py
discover_files_for_basins
SebaDro/st-deep-hydro
0
python
def discover_files_for_basins(data_dir: str, basins: list) -> dict: '\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering dataset files related to the specified basins\n basins: list\n List of basin IDs\n\n Returns\n -------\n dict\n Dict that holds the dataset path to each basin\n\n ' file_dict = {} for basin in basins: file = discover_single_file_for_basin(data_dir, basin) file_dict[basin] = file return file_dict
def discover_files_for_basins(data_dir: str, basins: list) -> dict: '\n\n Parameters\n ----------\n data_dir: str\n The data directory used for discovering dataset files related to the specified basins\n basins: list\n List of basin IDs\n\n Returns\n -------\n dict\n Dict that holds the dataset path to each basin\n\n ' file_dict = {} for basin in basins: file = discover_single_file_for_basin(data_dir, basin) file_dict[basin] = file return file_dict<|docstring|>Parameters ---------- data_dir: str The data directory used for discovering dataset files related to the specified basins basins: list List of basin IDs Returns ------- dict Dict that holds the dataset path to each basin<|endoftext|>
fe44185e1716e43cd797df396b29e645476c38b01746a0a5e3979455ccf0deb3
def discover_single_camels_us_forcings_file(data_dir: str, forcings_type: str, basin: str): "\n Discovers a single CAMELS-US forcing file by using the pattern '{data_dir}/**/{basin}_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcings timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basin: str\n ID of the basin, the forcings file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered forcings file\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/{basin}_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files[0]
Discovers a single CAMELS-US forcing file by using the pattern '{data_dir}/**/{basin}_lump_{forcings_type}_forcing_leap.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for forcings. forcings_type: str Type of the forcings timeseries, i.e. one of 'daymet', 'maurer', or 'nldas' basin: str ID of the basin, the forcings file will be discovered for. Returns ------- str Path to the discovered forcings file
libs/ioutils.py
discover_single_camels_us_forcings_file
SebaDro/st-deep-hydro
0
python
def discover_single_camels_us_forcings_file(data_dir: str, forcings_type: str, basin: str): "\n Discovers a single CAMELS-US forcing file by using the pattern '{data_dir}/**/{basin}_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcings timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basin: str\n ID of the basin, the forcings file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered forcings file\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/{basin}_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files[0]
def discover_single_camels_us_forcings_file(data_dir: str, forcings_type: str, basin: str): "\n Discovers a single CAMELS-US forcing file by using the pattern '{data_dir}/**/{basin}_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcings timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basin: str\n ID of the basin, the forcings file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered forcings file\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/{basin}_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files[0]<|docstring|>Discovers a single CAMELS-US forcing file by using the pattern '{data_dir}/**/{basin}_lump_{forcings_type}_forcing_leap.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for forcings. forcings_type: str Type of the forcings timeseries, i.e. one of 'daymet', 'maurer', or 'nldas' basin: str ID of the basin, the forcings file will be discovered for. Returns ------- str Path to the discovered forcings file<|endoftext|>
d487b295edf02afa2676fdb526f1439ec750266cfaa74598b28ad7bdd3e9305a
def discover_single_camels_us_streamflow_file(data_dir: str, basin: str): "\n Discovers a single CAMELS-US streamflow file by using the pattern '{data_dir}/**/{basin}_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow.\n basin: str\n ID of the basin, the streamflow file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered streamflow file\n\n " files = glob.glob(f'{data_dir}/**/{basin}_streamflow_qc.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]
Discovers a single CAMELS-US streamflow file by using the pattern '{data_dir}/**/{basin}_streamflow_qc.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for streamflow. basin: str ID of the basin, the streamflow file will be discovered for. Returns ------- str Path to the discovered streamflow file
libs/ioutils.py
discover_single_camels_us_streamflow_file
SebaDro/st-deep-hydro
0
python
def discover_single_camels_us_streamflow_file(data_dir: str, basin: str): "\n Discovers a single CAMELS-US streamflow file by using the pattern '{data_dir}/**/{basin}_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow.\n basin: str\n ID of the basin, the streamflow file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered streamflow file\n\n " files = glob.glob(f'{data_dir}/**/{basin}_streamflow_qc.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]
def discover_single_camels_us_streamflow_file(data_dir: str, basin: str): "\n Discovers a single CAMELS-US streamflow file by using the pattern '{data_dir}/**/{basin}_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow.\n basin: str\n ID of the basin, the streamflow file will be discovered for.\n\n Returns\n -------\n str\n Path to the discovered streamflow file\n\n " files = glob.glob(f'{data_dir}/**/{basin}_streamflow_qc.txt', recursive=True) if (len(files) == 0): raise FileNotFoundError(f"Can't find file for basin {basin} within directory {data_dir}.") if (len(files) > 1): logger.warning(f'Found multiple files for basin {basin} within directory {data_dir}. First one found will be returned.') return files[0]<|docstring|>Discovers a single CAMELS-US streamflow file by using the pattern '{data_dir}/**/{basin}_streamflow_qc.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for streamflow. basin: str ID of the basin, the streamflow file will be discovered for. Returns ------- str Path to the discovered streamflow file<|endoftext|>
52e7966a21901e369812d9f83fde628d59cd80a416afecb782e20c5231b17f83
def discover_multiple_camels_us_forcings_files(data_dir: str, forcings_type: str, basins: list=None): "\n Discovers multiple CAMELS-US forcing files. All files will be considered that follow the pattern\n '{data_dir}/**/*_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcing timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basins: list\n List of basins, the forcings files will be discovered for. If 'None', all present files will be considered\n\n Returns\n -------\n list\n List of forcing file paths for the specified basins.\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/*_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files
Discovers multiple CAMELS-US forcing files. All files will be considered that follow the pattern '{data_dir}/**/*_lump_{forcings_type}_forcing_leap.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for forcings. forcings_type: str Type of the forcing timeseries, i.e. one of 'daymet', 'maurer', or 'nldas' basins: list List of basins, the forcings files will be discovered for. If 'None', all present files will be considered Returns ------- list List of forcing file paths for the specified basins.
libs/ioutils.py
discover_multiple_camels_us_forcings_files
SebaDro/st-deep-hydro
0
python
def discover_multiple_camels_us_forcings_files(data_dir: str, forcings_type: str, basins: list=None): "\n Discovers multiple CAMELS-US forcing files. All files will be considered that follow the pattern\n '{data_dir}/**/*_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcing timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basins: list\n List of basins, the forcings files will be discovered for. If 'None', all present files will be considered\n\n Returns\n -------\n list\n List of forcing file paths for the specified basins.\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/*_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files
def discover_multiple_camels_us_forcings_files(data_dir: str, forcings_type: str, basins: list=None): "\n Discovers multiple CAMELS-US forcing files. All files will be considered that follow the pattern\n '{data_dir}/**/*_lump_{forcings_type}_forcing_leap.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for forcings.\n forcings_type: str\n Type of the forcing timeseries, i.e. one of 'daymet', 'maurer', or 'nldas'\n basins: list\n List of basins, the forcings files will be discovered for. If 'None', all present files will be considered\n\n Returns\n -------\n list\n List of forcing file paths for the specified basins.\n\n " type_dict = {'daymet': 'cida', 'maurer': 'maurer', 'nldas': 'nldas'} if (forcings_type in type_dict): files = glob.glob(f'{data_dir}/**/*_lump_{type_dict[forcings_type]}_forcing_leap.txt', recursive=True) if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] else: raise ValueError(f'Invalid forcings type `{forcings_type}` specified.') return files<|docstring|>Discovers multiple CAMELS-US forcing files. All files will be considered that follow the pattern '{data_dir}/**/*_lump_{forcings_type}_forcing_leap.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for forcings. forcings_type: str Type of the forcing timeseries, i.e. one of 'daymet', 'maurer', or 'nldas' basins: list List of basins, the forcings files will be discovered for. If 'None', all present files will be considered Returns ------- list List of forcing file paths for the specified basins.<|endoftext|>
17e6dc814e1201cfb5e6d72cf36f4c1b61db2d191f4cb2a1a580934a104b2ea6
def discover_multiple_camels_us_streamflow_files(data_dir: str, basins: list=None): "\n Discovers multiple CAMELS-US streamflow files. All files will be considered that follow the pattern\n '{data_dir}/**/*_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow\n basins: list\n List of basins, the streamflow files will be discovered for. If 'None', all present files will be considered.\n\n Returns\n -------\n list\n List of streamflow file paths for the specified basins.\n\n " files = glob.glob(f'{data_dir}/**/*_streamflow_qc.txt') if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] return files
Discovers multiple CAMELS-US streamflow files. All files will be considered that follow the pattern '{data_dir}/**/*_streamflow_qc.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for streamflow basins: list List of basins, the streamflow files will be discovered for. If 'None', all present files will be considered. Returns ------- list List of streamflow file paths for the specified basins.
libs/ioutils.py
discover_multiple_camels_us_streamflow_files
SebaDro/st-deep-hydro
0
python
def discover_multiple_camels_us_streamflow_files(data_dir: str, basins: list=None): "\n Discovers multiple CAMELS-US streamflow files. All files will be considered that follow the pattern\n '{data_dir}/**/*_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow\n basins: list\n List of basins, the streamflow files will be discovered for. If 'None', all present files will be considered.\n\n Returns\n -------\n list\n List of streamflow file paths for the specified basins.\n\n " files = glob.glob(f'{data_dir}/**/*_streamflow_qc.txt') if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] return files
def discover_multiple_camels_us_streamflow_files(data_dir: str, basins: list=None): "\n Discovers multiple CAMELS-US streamflow files. All files will be considered that follow the pattern\n '{data_dir}/**/*_streamflow_qc.txt'.\n\n Parameters\n ----------\n data_dir: str\n Path to the CAMELS-US data directory for streamflow\n basins: list\n List of basins, the streamflow files will be discovered for. If 'None', all present files will be considered.\n\n Returns\n -------\n list\n List of streamflow file paths for the specified basins.\n\n " files = glob.glob(f'{data_dir}/**/*_streamflow_qc.txt') if (basins is not None): files = [f for f in files if any(((basin == os.path.basename(f)[0:8]) for basin in basins))] return files<|docstring|>Discovers multiple CAMELS-US streamflow files. All files will be considered that follow the pattern '{data_dir}/**/*_streamflow_qc.txt'. Parameters ---------- data_dir: str Path to the CAMELS-US data directory for streamflow basins: list List of basins, the streamflow files will be discovered for. If 'None', all present files will be considered. Returns ------- list List of streamflow file paths for the specified basins.<|endoftext|>
caba2bd0b51382239cbbad1f6edb401c25d3d42f90d654f95ba9553e51eeaab1
def load_forcings(path: str, ds_type: str): '\n Load a dataset that contains forcing data\n\n Parameters\n ----------\n path: str\n Path to the forcings dataset\n ds_type: str\n Type of dataset. One of {camels-us, daymet-2d}\n\n Returns\n -------\n Dataset contating forcings timeseries data\n\n ' if (ds_type == 'camels-us'): return load_forcings_camels_us(path) if (ds_type == 'daymet-2d'): return load_forcings_daymet_2d(path) raise ValueError("Unsupported forcings dataset type '{}'".format(ds_type))
Load a dataset that contains forcing data Parameters ---------- path: str Path to the forcings dataset ds_type: str Type of dataset. One of {camels-us, daymet-2d} Returns ------- Dataset contating forcings timeseries data
libs/ioutils.py
load_forcings
SebaDro/st-deep-hydro
0
python
def load_forcings(path: str, ds_type: str): '\n Load a dataset that contains forcing data\n\n Parameters\n ----------\n path: str\n Path to the forcings dataset\n ds_type: str\n Type of dataset. One of {camels-us, daymet-2d}\n\n Returns\n -------\n Dataset contating forcings timeseries data\n\n ' if (ds_type == 'camels-us'): return load_forcings_camels_us(path) if (ds_type == 'daymet-2d'): return load_forcings_daymet_2d(path) raise ValueError("Unsupported forcings dataset type '{}'".format(ds_type))
def load_forcings(path: str, ds_type: str): '\n Load a dataset that contains forcing data\n\n Parameters\n ----------\n path: str\n Path to the forcings dataset\n ds_type: str\n Type of dataset. One of {camels-us, daymet-2d}\n\n Returns\n -------\n Dataset contating forcings timeseries data\n\n ' if (ds_type == 'camels-us'): return load_forcings_camels_us(path) if (ds_type == 'daymet-2d'): return load_forcings_daymet_2d(path) raise ValueError("Unsupported forcings dataset type '{}'".format(ds_type))<|docstring|>Load a dataset that contains forcing data Parameters ---------- path: str Path to the forcings dataset ds_type: str Type of dataset. One of {camels-us, daymet-2d} Returns ------- Dataset contating forcings timeseries data<|endoftext|>
c36278516b842111d233eebd68b4c1aee665d1f75bf8690cdac1d5df3f52b27f
def load_forcings_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS forcing data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing forcing data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed forcing data for a basin\n\n ' colnames = pd.read_csv(path, sep=' ', skiprows=3, nrows=1, header=None) df = pd.read_csv(path, sep='\t', skiprows=4, header=None, decimal='.', names=colnames.iloc[(0, 3:)]) dates = df.iloc[(:, 0)] df = df.drop(columns=df.columns[0]) df['date'] = pd.to_datetime(dates.str.split(expand=True).drop([3], axis=1).rename(columns={0: 'year', 1: 'month', 2: 'day'})) df = df.set_index('date') return df
Loads CAMELS forcing data from raw text files Parameters ---------- path: str Path to the raw text file containing forcing data for a certain basin Returns ------- pd.DataFrame DataFrame containing DateTime indexed forcing data for a basin
libs/ioutils.py
load_forcings_camels_us
SebaDro/st-deep-hydro
0
python
def load_forcings_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS forcing data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing forcing data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed forcing data for a basin\n\n ' colnames = pd.read_csv(path, sep=' ', skiprows=3, nrows=1, header=None) df = pd.read_csv(path, sep='\t', skiprows=4, header=None, decimal='.', names=colnames.iloc[(0, 3:)]) dates = df.iloc[(:, 0)] df = df.drop(columns=df.columns[0]) df['date'] = pd.to_datetime(dates.str.split(expand=True).drop([3], axis=1).rename(columns={0: 'year', 1: 'month', 2: 'day'})) df = df.set_index('date') return df
def load_forcings_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS forcing data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing forcing data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed forcing data for a basin\n\n ' colnames = pd.read_csv(path, sep=' ', skiprows=3, nrows=1, header=None) df = pd.read_csv(path, sep='\t', skiprows=4, header=None, decimal='.', names=colnames.iloc[(0, 3:)]) dates = df.iloc[(:, 0)] df = df.drop(columns=df.columns[0]) df['date'] = pd.to_datetime(dates.str.split(expand=True).drop([3], axis=1).rename(columns={0: 'year', 1: 'month', 2: 'day'})) df = df.set_index('date') return df<|docstring|>Loads CAMELS forcing data from raw text files Parameters ---------- path: str Path to the raw text file containing forcing data for a certain basin Returns ------- pd.DataFrame DataFrame containing DateTime indexed forcing data for a basin<|endoftext|>
32a6e32d457fabfa804972fd983df3f4c090ad47b34561359cb4d00acc88fe15
def load_forcings_gauge_metadata(path: str) -> Tuple[(float, float, float)]: '\n Loads gauge metadata from the header of a CAMELS-USE forcings file.\n\n Parameters\n ----------\n path: str\n Path to the forcings file.\n\n Returns\n -------\n tuple\n (gauge latitude, gauge elevation, basin area [m²])\n\n ' with open(path, 'r') as file: latitude = float(file.readline()) elevation = float(file.readline()) area = float(file.readline()) return (latitude, elevation, area)
Loads gauge metadata from the header of a CAMELS-USE forcings file. Parameters ---------- path: str Path to the forcings file. Returns ------- tuple (gauge latitude, gauge elevation, basin area [m²])
libs/ioutils.py
load_forcings_gauge_metadata
SebaDro/st-deep-hydro
0
python
def load_forcings_gauge_metadata(path: str) -> Tuple[(float, float, float)]: '\n Loads gauge metadata from the header of a CAMELS-USE forcings file.\n\n Parameters\n ----------\n path: str\n Path to the forcings file.\n\n Returns\n -------\n tuple\n (gauge latitude, gauge elevation, basin area [m²])\n\n ' with open(path, 'r') as file: latitude = float(file.readline()) elevation = float(file.readline()) area = float(file.readline()) return (latitude, elevation, area)
def load_forcings_gauge_metadata(path: str) -> Tuple[(float, float, float)]: '\n Loads gauge metadata from the header of a CAMELS-USE forcings file.\n\n Parameters\n ----------\n path: str\n Path to the forcings file.\n\n Returns\n -------\n tuple\n (gauge latitude, gauge elevation, basin area [m²])\n\n ' with open(path, 'r') as file: latitude = float(file.readline()) elevation = float(file.readline()) area = float(file.readline()) return (latitude, elevation, area)<|docstring|>Loads gauge metadata from the header of a CAMELS-USE forcings file. Parameters ---------- path: str Path to the forcings file. Returns ------- tuple (gauge latitude, gauge elevation, basin area [m²])<|endoftext|>
c3c268efdeb85d65c008e45582ad9f6e4bbfe2333a298ef06368fb29bcf1e264
def load_forcings_daymet_2d(path: str) -> xr.Dataset: '\n\n Parameters\n ----------\n path: str\n Path to a Daymet NetCDF dataset\n\n Returns\n -------\n xarray.Dataset\n Dataset hat contains two dimensional Daymet forcings data\n\n ' with xr.open_dataset(path) as ds: return ds
Parameters ---------- path: str Path to a Daymet NetCDF dataset Returns ------- xarray.Dataset Dataset hat contains two dimensional Daymet forcings data
libs/ioutils.py
load_forcings_daymet_2d
SebaDro/st-deep-hydro
0
python
def load_forcings_daymet_2d(path: str) -> xr.Dataset: '\n\n Parameters\n ----------\n path: str\n Path to a Daymet NetCDF dataset\n\n Returns\n -------\n xarray.Dataset\n Dataset hat contains two dimensional Daymet forcings data\n\n ' with xr.open_dataset(path) as ds: return ds
def load_forcings_daymet_2d(path: str) -> xr.Dataset: '\n\n Parameters\n ----------\n path: str\n Path to a Daymet NetCDF dataset\n\n Returns\n -------\n xarray.Dataset\n Dataset hat contains two dimensional Daymet forcings data\n\n ' with xr.open_dataset(path) as ds: return ds<|docstring|>Parameters ---------- path: str Path to a Daymet NetCDF dataset Returns ------- xarray.Dataset Dataset hat contains two dimensional Daymet forcings data<|endoftext|>
0b114c455bd7f3aa13bd449ef7c14a79e70f676913e887deebec28e0d90b6d71
def load_streamflow(path: str, ds_type: str): '\n Load streamflow data\n\n Parameters\n ----------\n path: str\n Path to a streamflow dataset\n ds_type: str\n Type of the streamflow dataset. One of {camels-us}\n\n Returns\n -------\n Dataset containing streamflow timeseries data\n\n ' if (ds_type == 'camels-us'): return load_streamflow_camels_us(path) raise ValueError("Unsupported streamflow dataset type '{}'".format(ds_type))
Load streamflow data Parameters ---------- path: str Path to a streamflow dataset ds_type: str Type of the streamflow dataset. One of {camels-us} Returns ------- Dataset containing streamflow timeseries data
libs/ioutils.py
load_streamflow
SebaDro/st-deep-hydro
0
python
def load_streamflow(path: str, ds_type: str): '\n Load streamflow data\n\n Parameters\n ----------\n path: str\n Path to a streamflow dataset\n ds_type: str\n Type of the streamflow dataset. One of {camels-us}\n\n Returns\n -------\n Dataset containing streamflow timeseries data\n\n ' if (ds_type == 'camels-us'): return load_streamflow_camels_us(path) raise ValueError("Unsupported streamflow dataset type '{}'".format(ds_type))
def load_streamflow(path: str, ds_type: str): '\n Load streamflow data\n\n Parameters\n ----------\n path: str\n Path to a streamflow dataset\n ds_type: str\n Type of the streamflow dataset. One of {camels-us}\n\n Returns\n -------\n Dataset containing streamflow timeseries data\n\n ' if (ds_type == 'camels-us'): return load_streamflow_camels_us(path) raise ValueError("Unsupported streamflow dataset type '{}'".format(ds_type))<|docstring|>Load streamflow data Parameters ---------- path: str Path to a streamflow dataset ds_type: str Type of the streamflow dataset. One of {camels-us} Returns ------- Dataset containing streamflow timeseries data<|endoftext|>
5fb146d712298dc8b92d7af1798fa3f33097a59d0b06ec9fc6157556ed75957b
def load_streamflow_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS streamflow data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing streamflow data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed streamflow data for a basin\n\n ' df = pd.read_csv(path, delim_whitespace=True, header=None, decimal='.', na_values=['-999.00'], names=['gauge_id', 'year', 'month', 'day', 'streamflow', 'qc_flag'], dtype={'gauge_id': str}) df['date'] = pd.to_datetime(df[['year', 'month', 'day']]) df = df.drop(columns=['year', 'month', 'day']).set_index('date') return df
Loads CAMELS streamflow data from raw text files Parameters ---------- path: str Path to the raw text file containing streamflow data for a certain basin Returns ------- pd.DataFrame DataFrame containing DateTime indexed streamflow data for a basin
libs/ioutils.py
load_streamflow_camels_us
SebaDro/st-deep-hydro
0
python
def load_streamflow_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS streamflow data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing streamflow data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed streamflow data for a basin\n\n ' df = pd.read_csv(path, delim_whitespace=True, header=None, decimal='.', na_values=['-999.00'], names=['gauge_id', 'year', 'month', 'day', 'streamflow', 'qc_flag'], dtype={'gauge_id': str}) df['date'] = pd.to_datetime(df[['year', 'month', 'day']]) df = df.drop(columns=['year', 'month', 'day']).set_index('date') return df
def load_streamflow_camels_us(path: str) -> pd.DataFrame: '\n Loads CAMELS streamflow data from raw text files\n\n Parameters\n ----------\n path: str\n Path to the raw text file containing streamflow data for a certain basin\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing DateTime indexed streamflow data for a basin\n\n ' df = pd.read_csv(path, delim_whitespace=True, header=None, decimal='.', na_values=['-999.00'], names=['gauge_id', 'year', 'month', 'day', 'streamflow', 'qc_flag'], dtype={'gauge_id': str}) df['date'] = pd.to_datetime(df[['year', 'month', 'day']]) df = df.drop(columns=['year', 'month', 'day']).set_index('date') return df<|docstring|>Loads CAMELS streamflow data from raw text files Parameters ---------- path: str Path to the raw text file containing streamflow data for a certain basin Returns ------- pd.DataFrame DataFrame containing DateTime indexed streamflow data for a basin<|endoftext|>
e8feb66cabfa5bda2d54bbdb3c2b6087609aca61ca51f2def5dc050ad840f770
def load_camels_us_basin_physical_characteristics(path: str) -> pd.DataFrame: '\n Loads physical characteristics for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'BASIN_ID': str})
Loads physical characteristics for CAMEL-US basins Parameters ---------- path: str Path to the metadata file Returns ------- pd.DataFrame DataFrame containing physical characteristics for CAMEL-US basins
libs/ioutils.py
load_camels_us_basin_physical_characteristics
SebaDro/st-deep-hydro
0
python
def load_camels_us_basin_physical_characteristics(path: str) -> pd.DataFrame: '\n Loads physical characteristics for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'BASIN_ID': str})
def load_camels_us_basin_physical_characteristics(path: str) -> pd.DataFrame: '\n Loads physical characteristics for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'BASIN_ID': str})<|docstring|>Loads physical characteristics for CAMEL-US basins Parameters ---------- path: str Path to the metadata file Returns ------- pd.DataFrame DataFrame containing physical characteristics for CAMEL-US basins<|endoftext|>
50a67429d39e26fd9b9059d3b51236a9b603f5d1c3290aeda8032f48ccc1f941
def load_camels_us_gauge_information(path: str) -> pd.DataFrame: '\n Loads gauge information metadata for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'HUC_02': str, 'GAGE_ID': str})
Loads gauge information metadata for CAMEL-US basins Parameters ---------- path: str Path to the metadata file Returns ------- pd.DataFrame DataFrame containing physical characteristics for CAMEL-US basins
libs/ioutils.py
load_camels_us_gauge_information
SebaDro/st-deep-hydro
0
python
def load_camels_us_gauge_information(path: str) -> pd.DataFrame: '\n Loads gauge information metadata for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'HUC_02': str, 'GAGE_ID': str})
def load_camels_us_gauge_information(path: str) -> pd.DataFrame: '\n Loads gauge information metadata for CAMEL-US basins\n\n Parameters\n ----------\n path: str\n Path to the metadata file\n\n Returns\n -------\n pd.DataFrame\n DataFrame containing physical characteristics for CAMEL-US basins\n\n ' return pd.read_csv(path, delim_whitespace=True, decimal='.', dtype={'HUC_02': str, 'GAGE_ID': str})<|docstring|>Loads gauge information metadata for CAMEL-US basins Parameters ---------- path: str Path to the metadata file Returns ------- pd.DataFrame DataFrame containing physical characteristics for CAMEL-US basins<|endoftext|>
2f69fb6a275a63089a9d5a9e70d5a73c4738095b09be17b2ddaf664c4736a3db
def create_out_dir(output: str, name: str) -> str: '\n Creates a directory in the given output folder for a given name and the current timestamp that can be used for\n storing outputs such as logs, monitoring metrics or saved models\n\n Parameters\n ----------\n output: str\n Output directory\n name: str\n Name of the current run\n\n Returns\n -------\n str\n Path of the created directory\n\n ' timestamp = dt.datetime.now().strftime('%Y%m%d%H%M%S%z') out_dir = os.path.join(output, f'{timestamp}_{name}') if (not os.path.exists(out_dir)): os.mkdir(out_dir) logger.info(f'Created directory {out_dir} for storing outputs.') else: logger.warning(f'Directory {out_dir} already exists.') return out_dir
Creates a directory in the given output folder for a given name and the current timestamp that can be used for storing outputs such as logs, monitoring metrics or saved models Parameters ---------- output: str Output directory name: str Name of the current run Returns ------- str Path of the created directory
libs/ioutils.py
create_out_dir
SebaDro/st-deep-hydro
0
python
def create_out_dir(output: str, name: str) -> str: '\n Creates a directory in the given output folder for a given name and the current timestamp that can be used for\n storing outputs such as logs, monitoring metrics or saved models\n\n Parameters\n ----------\n output: str\n Output directory\n name: str\n Name of the current run\n\n Returns\n -------\n str\n Path of the created directory\n\n ' timestamp = dt.datetime.now().strftime('%Y%m%d%H%M%S%z') out_dir = os.path.join(output, f'{timestamp}_{name}') if (not os.path.exists(out_dir)): os.mkdir(out_dir) logger.info(f'Created directory {out_dir} for storing outputs.') else: logger.warning(f'Directory {out_dir} already exists.') return out_dir
def create_out_dir(output: str, name: str) -> str: '\n Creates a directory in the given output folder for a given name and the current timestamp that can be used for\n storing outputs such as logs, monitoring metrics or saved models\n\n Parameters\n ----------\n output: str\n Output directory\n name: str\n Name of the current run\n\n Returns\n -------\n str\n Path of the created directory\n\n ' timestamp = dt.datetime.now().strftime('%Y%m%d%H%M%S%z') out_dir = os.path.join(output, f'{timestamp}_{name}') if (not os.path.exists(out_dir)): os.mkdir(out_dir) logger.info(f'Created directory {out_dir} for storing outputs.') else: logger.warning(f'Directory {out_dir} already exists.') return out_dir<|docstring|>Creates a directory in the given output folder for a given name and the current timestamp that can be used for storing outputs such as logs, monitoring metrics or saved models Parameters ---------- output: str Output directory name: str Name of the current run Returns ------- str Path of the created directory<|endoftext|>
3da4ec8a0121e6263396b68c2474dc10a9abe0e35d4a03f77277147e340fbd4e
def load_tracker(self, path, tracker_names=None, store=True): '\n Args:\n path(str): path to result\n tracker_name(list): name of tracker\n ' if (not tracker_names): tracker_names = [x.split('/')[(- 1)] for x in glob(path) if os.path.isdir(x)] if isinstance(tracker_names, str): tracker_names = [tracker_names] for name in tracker_names: traj_file = os.path.join(path, name, (self.name + '.txt')) if os.path.exists(traj_file): with open(traj_file, 'r') as f: pred_traj = [list(map(float, x.strip().split(','))) for x in f.readlines()] if (len(pred_traj) != len(self.gt_traj)): print(name, len(pred_traj), len(self.gt_traj), self.name) if store: self.pred_trajs[name] = pred_traj else: return pred_traj else: print(traj_file) self.tracker_names = list(self.pred_trajs.keys())
Args: path(str): path to result tracker_name(list): name of tracker
toolkit/datasets/video.py
load_tracker
qilei123/pysot
4,318
python
def load_tracker(self, path, tracker_names=None, store=True): '\n Args:\n path(str): path to result\n tracker_name(list): name of tracker\n ' if (not tracker_names): tracker_names = [x.split('/')[(- 1)] for x in glob(path) if os.path.isdir(x)] if isinstance(tracker_names, str): tracker_names = [tracker_names] for name in tracker_names: traj_file = os.path.join(path, name, (self.name + '.txt')) if os.path.exists(traj_file): with open(traj_file, 'r') as f: pred_traj = [list(map(float, x.strip().split(','))) for x in f.readlines()] if (len(pred_traj) != len(self.gt_traj)): print(name, len(pred_traj), len(self.gt_traj), self.name) if store: self.pred_trajs[name] = pred_traj else: return pred_traj else: print(traj_file) self.tracker_names = list(self.pred_trajs.keys())
def load_tracker(self, path, tracker_names=None, store=True): '\n Args:\n path(str): path to result\n tracker_name(list): name of tracker\n ' if (not tracker_names): tracker_names = [x.split('/')[(- 1)] for x in glob(path) if os.path.isdir(x)] if isinstance(tracker_names, str): tracker_names = [tracker_names] for name in tracker_names: traj_file = os.path.join(path, name, (self.name + '.txt')) if os.path.exists(traj_file): with open(traj_file, 'r') as f: pred_traj = [list(map(float, x.strip().split(','))) for x in f.readlines()] if (len(pred_traj) != len(self.gt_traj)): print(name, len(pred_traj), len(self.gt_traj), self.name) if store: self.pred_trajs[name] = pred_traj else: return pred_traj else: print(traj_file) self.tracker_names = list(self.pred_trajs.keys())<|docstring|>Args: path(str): path to result tracker_name(list): name of tracker<|endoftext|>
a744034928daf69d239c8e00e6d7e633ba193205555dccd927e4b6d7a7c52304
def draw_box(self, roi, img, linewidth, color, name=None): '\n roi: rectangle or polygon\n img: numpy array img\n linewith: line width of the bbox\n ' if ((len(roi) > 6) and ((len(roi) % 2) == 0)): pts = np.array(roi, np.int32).reshape((- 1), 1, 2) color = tuple(map(int, color)) img = cv2.polylines(img, [pts], True, color, linewidth) pt = (pts[(0, 0, 0)], (pts[(0, 0, 1)] - 5)) if name: img = cv2.putText(img, name, pt, cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) elif (len(roi) == 4): if (not np.isnan(roi[0])): roi = list(map(int, roi)) color = tuple(map(int, color)) img = cv2.rectangle(img, (roi[0], roi[1]), ((roi[0] + roi[2]), (roi[1] + roi[3])), color, linewidth) if name: img = cv2.putText(img, name, (roi[0], (roi[1] - 5)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) return img
roi: rectangle or polygon img: numpy array img linewith: line width of the bbox
toolkit/datasets/video.py
draw_box
qilei123/pysot
4,318
python
def draw_box(self, roi, img, linewidth, color, name=None): '\n roi: rectangle or polygon\n img: numpy array img\n linewith: line width of the bbox\n ' if ((len(roi) > 6) and ((len(roi) % 2) == 0)): pts = np.array(roi, np.int32).reshape((- 1), 1, 2) color = tuple(map(int, color)) img = cv2.polylines(img, [pts], True, color, linewidth) pt = (pts[(0, 0, 0)], (pts[(0, 0, 1)] - 5)) if name: img = cv2.putText(img, name, pt, cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) elif (len(roi) == 4): if (not np.isnan(roi[0])): roi = list(map(int, roi)) color = tuple(map(int, color)) img = cv2.rectangle(img, (roi[0], roi[1]), ((roi[0] + roi[2]), (roi[1] + roi[3])), color, linewidth) if name: img = cv2.putText(img, name, (roi[0], (roi[1] - 5)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) return img
def draw_box(self, roi, img, linewidth, color, name=None): '\n roi: rectangle or polygon\n img: numpy array img\n linewith: line width of the bbox\n ' if ((len(roi) > 6) and ((len(roi) % 2) == 0)): pts = np.array(roi, np.int32).reshape((- 1), 1, 2) color = tuple(map(int, color)) img = cv2.polylines(img, [pts], True, color, linewidth) pt = (pts[(0, 0, 0)], (pts[(0, 0, 1)] - 5)) if name: img = cv2.putText(img, name, pt, cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) elif (len(roi) == 4): if (not np.isnan(roi[0])): roi = list(map(int, roi)) color = tuple(map(int, color)) img = cv2.rectangle(img, (roi[0], roi[1]), ((roi[0] + roi[2]), (roi[1] + roi[3])), color, linewidth) if name: img = cv2.putText(img, name, (roi[0], (roi[1] - 5)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 1) return img<|docstring|>roi: rectangle or polygon img: numpy array img linewith: line width of the bbox<|endoftext|>
2c272c49d0722d3e64eb4059d615ade9c1665b5cd21c61815224c1ee006e2834
def show(self, pred_trajs={}, linewidth=2, show_name=False): "\n pred_trajs: dict of pred_traj, {'tracker_name': list of traj}\n pred_traj should contain polygon or rectangle(x, y, width, height)\n linewith: line width of the bbox\n " assert (self.imgs is not None) video = [] cv2.namedWindow(self.name, cv2.WINDOW_NORMAL) colors = {} if ((len(pred_trajs) == 0) and (len(self.pred_trajs) > 0)): pred_trajs = self.pred_trajs for (i, (roi, img)) in enumerate(zip(self.gt_traj, self.imgs[self.start_frame:(self.end_frame + 1)])): img = img.copy() if (len(img.shape) == 2): img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) else: img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) img = self.draw_box(roi, img, linewidth, (0, 255, 0), ('gt' if show_name else None)) for (name, trajs) in pred_trajs.items(): if (name not in colors): color = tuple(np.random.randint(0, 256, 3)) colors[name] = color else: color = colors[name] img = self.draw_box(trajs[0][i], img, linewidth, color, (name if show_name else None)) cv2.putText(img, str((i + self.start_frame)), (5, 20), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (255, 255, 0), 2) cv2.imshow(self.name, img) cv2.waitKey(40) video.append(img.copy()) return video
pred_trajs: dict of pred_traj, {'tracker_name': list of traj} pred_traj should contain polygon or rectangle(x, y, width, height) linewith: line width of the bbox
toolkit/datasets/video.py
show
qilei123/pysot
4,318
python
def show(self, pred_trajs={}, linewidth=2, show_name=False): "\n pred_trajs: dict of pred_traj, {'tracker_name': list of traj}\n pred_traj should contain polygon or rectangle(x, y, width, height)\n linewith: line width of the bbox\n " assert (self.imgs is not None) video = [] cv2.namedWindow(self.name, cv2.WINDOW_NORMAL) colors = {} if ((len(pred_trajs) == 0) and (len(self.pred_trajs) > 0)): pred_trajs = self.pred_trajs for (i, (roi, img)) in enumerate(zip(self.gt_traj, self.imgs[self.start_frame:(self.end_frame + 1)])): img = img.copy() if (len(img.shape) == 2): img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) else: img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) img = self.draw_box(roi, img, linewidth, (0, 255, 0), ('gt' if show_name else None)) for (name, trajs) in pred_trajs.items(): if (name not in colors): color = tuple(np.random.randint(0, 256, 3)) colors[name] = color else: color = colors[name] img = self.draw_box(trajs[0][i], img, linewidth, color, (name if show_name else None)) cv2.putText(img, str((i + self.start_frame)), (5, 20), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (255, 255, 0), 2) cv2.imshow(self.name, img) cv2.waitKey(40) video.append(img.copy()) return video
def show(self, pred_trajs={}, linewidth=2, show_name=False): "\n pred_trajs: dict of pred_traj, {'tracker_name': list of traj}\n pred_traj should contain polygon or rectangle(x, y, width, height)\n linewith: line width of the bbox\n " assert (self.imgs is not None) video = [] cv2.namedWindow(self.name, cv2.WINDOW_NORMAL) colors = {} if ((len(pred_trajs) == 0) and (len(self.pred_trajs) > 0)): pred_trajs = self.pred_trajs for (i, (roi, img)) in enumerate(zip(self.gt_traj, self.imgs[self.start_frame:(self.end_frame + 1)])): img = img.copy() if (len(img.shape) == 2): img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) else: img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) img = self.draw_box(roi, img, linewidth, (0, 255, 0), ('gt' if show_name else None)) for (name, trajs) in pred_trajs.items(): if (name not in colors): color = tuple(np.random.randint(0, 256, 3)) colors[name] = color else: color = colors[name] img = self.draw_box(trajs[0][i], img, linewidth, color, (name if show_name else None)) cv2.putText(img, str((i + self.start_frame)), (5, 20), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (255, 255, 0), 2) cv2.imshow(self.name, img) cv2.waitKey(40) video.append(img.copy()) return video<|docstring|>pred_trajs: dict of pred_traj, {'tracker_name': list of traj} pred_traj should contain polygon or rectangle(x, y, width, height) linewith: line width of the bbox<|endoftext|>
6e0d2a9ed76393fe36605f14f250a7f09d0a54956242491556f10d468c7717b0
def read_tooltips(gui_name): 'Read and format tooltips, return a dict.' dirname = os.path.dirname(__file__) help_path = os.path.join(dirname, 'help', (gui_name + '.json')) with open(help_path) as fid: raw_tooltips = json.load(fid) format_ = TextWrapper(width=60, fix_sentence_endings=True).fill return {key: format_(text) for (key, text) in raw_tooltips.items()}
Read and format tooltips, return a dict.
mne/gui/_help.py
read_tooltips
hofaflo/mne-python
1,953
python
def read_tooltips(gui_name): dirname = os.path.dirname(__file__) help_path = os.path.join(dirname, 'help', (gui_name + '.json')) with open(help_path) as fid: raw_tooltips = json.load(fid) format_ = TextWrapper(width=60, fix_sentence_endings=True).fill return {key: format_(text) for (key, text) in raw_tooltips.items()}
def read_tooltips(gui_name): dirname = os.path.dirname(__file__) help_path = os.path.join(dirname, 'help', (gui_name + '.json')) with open(help_path) as fid: raw_tooltips = json.load(fid) format_ = TextWrapper(width=60, fix_sentence_endings=True).fill return {key: format_(text) for (key, text) in raw_tooltips.items()}<|docstring|>Read and format tooltips, return a dict.<|endoftext|>
864c9cdd85f94e511ff2cf9ddbba0f537b9127abeef30813f495531b6ba38035
def test_no_args(capsys, monkeypatch): 'With no arguments, awsudo exits with usage.' monkeypatch.setattr(sys, 'argv', ['awsudo']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)
With no arguments, awsudo exits with usage.
awsudo/test/test_main.py
test_no_args
outersystems/awsudo
1
python
def test_no_args(capsys, monkeypatch): monkeypatch.setattr(sys, 'argv', ['awsudo']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)
def test_no_args(capsys, monkeypatch): monkeypatch.setattr(sys, 'argv', ['awsudo']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)<|docstring|>With no arguments, awsudo exits with usage.<|endoftext|>
9e7a4ad343238d597582c3bbf5f1ff0e8ee279548e98578bb921479e17d2df8a
def test_only_option(capsys, monkeypatch): 'With only options, awsudo exits with usage.' monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'default']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)
With only options, awsudo exits with usage.
awsudo/test/test_main.py
test_only_option
outersystems/awsudo
1
python
def test_only_option(capsys, monkeypatch): monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'default']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)
def test_only_option(capsys, monkeypatch): monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'default']) with pytest.raises(SystemExit): main.main() (out, err) = capsys.readouterr() assert ('Usage:' in err)<|docstring|>With only options, awsudo exits with usage.<|endoftext|>
3b9be94fb3264a4b1dcbb2ec8d7cdc4cc157678ddf7a2dd3c18787784067823a
def test_parseArgs_env_profile(monkeypatch): 'Env vars is taken if no option are given.' environ = {'AWS_PROFILE': 'profile'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile') assert (args == ['command'])
Env vars is taken if no option are given.
awsudo/test/test_main.py
test_parseArgs_env_profile
outersystems/awsudo
1
python
def test_parseArgs_env_profile(monkeypatch): environ = {'AWS_PROFILE': 'profile'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile') assert (args == ['command'])
def test_parseArgs_env_profile(monkeypatch): environ = {'AWS_PROFILE': 'profile'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile') assert (args == ['command'])<|docstring|>Env vars is taken if no option are given.<|endoftext|>
99a619f87c8a2aacbd0f27129685a50cdc57b8511e96da6e0d412bc37764f38e
def test_parseArgs_option_over_environ(monkeypatch): 'Options values are taken even if environment variables are set.' environ = {'AWS_PROFILE': 'profile-environ'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'profile-option', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile-option') assert (args == ['command'])
Options values are taken even if environment variables are set.
awsudo/test/test_main.py
test_parseArgs_option_over_environ
outersystems/awsudo
1
python
def test_parseArgs_option_over_environ(monkeypatch): environ = {'AWS_PROFILE': 'profile-environ'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'profile-option', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile-option') assert (args == ['command'])
def test_parseArgs_option_over_environ(monkeypatch): environ = {'AWS_PROFILE': 'profile-environ'} monkeypatch.setattr(os, 'environ', environ) monkeypatch.setattr(sys, 'argv', ['awsudo', '-u', 'profile-option', 'command']) (profile, args) = main.parseArgs() assert (profile == 'profile-option') assert (args == ['command'])<|docstring|>Options values are taken even if environment variables are set.<|endoftext|>
4f70c342c2e1a765df11b8bc76432cd0c2498cd9dfbef0f80fe235dcbbdff5cd
def test_cleanEnvironment(monkeypatch): 'cleanEnvironment strips AWS and boto configuration.' environ = {'AWS_SECRET': 'password1', 'BOTO_CONFIG': 'please work', 'HOME': 'ward bound'} monkeypatch.setattr(os, 'environ', environ) main.cleanEnvironment() assert ('AWS_SECRET' not in environ) assert ('BOTO_CONFIG' not in environ) assert (environ['HOME'] == 'ward bound')
cleanEnvironment strips AWS and boto configuration.
awsudo/test/test_main.py
test_cleanEnvironment
outersystems/awsudo
1
python
def test_cleanEnvironment(monkeypatch): environ = {'AWS_SECRET': 'password1', 'BOTO_CONFIG': 'please work', 'HOME': 'ward bound'} monkeypatch.setattr(os, 'environ', environ) main.cleanEnvironment() assert ('AWS_SECRET' not in environ) assert ('BOTO_CONFIG' not in environ) assert (environ['HOME'] == 'ward bound')
def test_cleanEnvironment(monkeypatch): environ = {'AWS_SECRET': 'password1', 'BOTO_CONFIG': 'please work', 'HOME': 'ward bound'} monkeypatch.setattr(os, 'environ', environ) main.cleanEnvironment() assert ('AWS_SECRET' not in environ) assert ('BOTO_CONFIG' not in environ) assert (environ['HOME'] == 'ward bound')<|docstring|>cleanEnvironment strips AWS and boto configuration.<|endoftext|>
f00f0a7b22ed1aa4dadea8c6f218da3c53add0716bf454ebb45dd06a17143331
def format_date(data_json) -> List[Dict[(str, Union[(str, int, datetime.datetime)])]]: ' Formats the date from a json object. ' for row in data_json: row_date = row['product_url__created_at'] row_date_formatted = eval(row_date) row['product_url__created_at'] = row_date_formatted return data_json
Formats the date from a json object.
utils/utils.py
format_date
yagomichalak/django-test
0
python
def format_date(data_json) -> List[Dict[(str, Union[(str, int, datetime.datetime)])]]: ' ' for row in data_json: row_date = row['product_url__created_at'] row_date_formatted = eval(row_date) row['product_url__created_at'] = row_date_formatted return data_json
def format_date(data_json) -> List[Dict[(str, Union[(str, int, datetime.datetime)])]]: ' ' for row in data_json: row_date = row['product_url__created_at'] row_date_formatted = eval(row_date) row['product_url__created_at'] = row_date_formatted return data_json<|docstring|>Formats the date from a json object.<|endoftext|>
f5efd6170ad467276d974dfca159c444a3db6c84c559240b1bf6bf0d91ca5866
def group_products(data_json) -> Dict[(str, Union[(str, int, datetime.datetime)])]: ' Group duplicate products. ' new_data_list: List[List[Dict[(str, Union[(str, int, datetime.datetime)])]]] = [] products: Dict[(str, List[Any])] = {} for row in data_json: if (row['product_url'] in products): products[row['product_url']].append(row) else: products[row['product_url']] = [row] return products
Group duplicate products.
utils/utils.py
group_products
yagomichalak/django-test
0
python
def group_products(data_json) -> Dict[(str, Union[(str, int, datetime.datetime)])]: ' ' new_data_list: List[List[Dict[(str, Union[(str, int, datetime.datetime)])]]] = [] products: Dict[(str, List[Any])] = {} for row in data_json: if (row['product_url'] in products): products[row['product_url']].append(row) else: products[row['product_url']] = [row] return products
def group_products(data_json) -> Dict[(str, Union[(str, int, datetime.datetime)])]: ' ' new_data_list: List[List[Dict[(str, Union[(str, int, datetime.datetime)])]]] = [] products: Dict[(str, List[Any])] = {} for row in data_json: if (row['product_url'] in products): products[row['product_url']].append(row) else: products[row['product_url']] = [row] return products<|docstring|>Group duplicate products.<|endoftext|>
5080d6357d423828944d682d71977d4ad03d0df35f12c76cea91011b8b68b968
def setup(bot: commands.Bot): 'Setup the discipline cog' bot.add_cog(Discipline(bot))
Setup the discipline cog
bot/cogs/discipline.py
setup
Things-N-Stuff/Turt
0
python
def setup(bot: commands.Bot): bot.add_cog(Discipline(bot))
def setup(bot: commands.Bot): bot.add_cog(Discipline(bot))<|docstring|>Setup the discipline cog<|endoftext|>
32f29117ad1b32aac7db7dc59c7209521b07808f51e5dd1339d7b31fbac5eddf
@commands.Command @server_only() @whitelist_only() async def warn(self, ctx, user: discord.User, severity: int, reason: str): "\n Permissions Requirement: \n Warning Whitelisted Users: Server Owner\n Warning Non-Whitelisted Users: Server Whitelisted\n Parameters:\n user - An @ mention or the userid of the user to be warned.\n severity - How severe the offense was. This number is added to the user's account for this server.\n reason - The reason for warning the user. It is recommended to issue warnings to whitelisted users who warn without adequate reason, or even for whitelist status to be revoked in extreme cases.\n Description:\n Warn a user for something they did and add <severity> severity points to their account for this server.\n Once a punishable number of severity points has been reached, then a punishment is automatically issued [2][3].\n If multiple punishable severity levels are surpassed, then the longest ban is issued.\n Punishments: [1]\n 10 severity points - banned for 1 hour\n 20 severity points - banned for 1 day\n 30 severity points - banned for 1 week\n Every 10 severity points afterwards will result in a 1 month ban (30 days)\n When banning users, Turt bot will send a DM including a server invite [4].\n After the ban has expired, Turt bot will unban the user [3].\n\n Notes:\n [1] Ban punishments round up to the next hour.\n [2] Turt bot cannot ban users with roles higher in the role hierarchy. If a user should be banned, consult someone who is higher in the hierarchy.\n [3] It is recommended to turn on the `ban members` permission for Turt, or Turt will be unable to ban or automatically unban users when their ban has expired.\n [4] It is recommended to turn on the `manage server` permission for Turt, or Turt will be unable to send server invites to banned users.\n " bans_in_hours = [1, 24, 168, 720] bans_strings = ['1 hour', '1 day', '1 week', '30 days'] cursor = self.bot.sql.cursor conn = self.bot.sql.conn user_id = user.id if (user_id == ctx.author.id): (await ctx.channel.send('You cannot warn yourself.')) return if (user_id != ctx.guild.owner_id): cursor.execute('SELECT * FROM whitelist WHERE serverid=? AND userid=?', (ctx.guild.id, user_id)) result = cursor.fetchone() if (result is not None): (await ctx.channel.send('Only the server owner can warn whitelisted users.')) return if (user is None): (await ctx.channel.send('That user does not exist on this server')) return if (user.id == config.bot_user_id): (await ctx.channel.send('You cannot warn me.')) return if user.bot: (await ctx.channel.send('You cannot warn bots.')) return if (severity < 0): (await ctx.channel.send('You cannot give users negative severity points.')) return if (len(reason.strip()) == 0): (await ctx.channel.send('Warn reason cannot be whitespace.')) return cursor.execute('SELECT severitypoints FROM warnings WHERE userid=? AND serverid=?', (user_id, ctx.guild.id)) severity_points = cursor.fetchone() if (severity_points is None): cursor.execute('INSERT INTO warnings VALUES (?,?,?,?)', (user_id, ctx.guild.id, 0, (- 1))) conn.commit() severity_points = 0 else: severity_points = severity_points[0] total_severity_points = (severity_points + severity) cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) conn.commit() punished = False current_time_in_hours = int(math.ceil((time.time() / 3600))) end_hour = current_time_in_hours ban_level = 0 if ((math.ceil((severity_points / 10)) <= math.floor((total_severity_points / 10))) and ((severity_points % 10) != 0)): index = (math.floor((total_severity_points / 10)) - 1) if (index > 3): index = 3 punished = True end_hour += bans_in_hours[index] ban_level = index if (severity == 0): punished = False if punished: server = (await self.bot.fetch_guild(ctx.guild.id)) bot_user = (await server.fetch_member(config.bot_user_id)) member = (await server.fetch_member(user_id)) if (bot_user.guild_permissions.ban_members and (bot_user.top_role.position > member.top_role.position)): ban_embed = discord.Embed() ban_embed.color = discord.Colour.red() ban_embed.set_author(name=f'Last warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been banned from {ctx.guild.name} for {bans_strings[ban_level]}.' ban_embed.description = f'''The last straw: {reason}''' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.set_footer(text='Note: This server invite may not be active when your ban has expired') if bot_user.guild_permissions.manage_guild: invites = (await server.invites()) if (len(invites) == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! {ctx.guild.name} does not have any active invites!', inline=False) else: got_indefinite_invite = False longest_invite = invites[0] for invite in invites: if (invite.max_age == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(invite), inline=False) got_indefinite_invite = True break if (invite.max_age > longest_invite.max_age): longest_invite = invite if (not got_indefinite_invite): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(longest_invite), inline=False) else: ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! Turt bot does not have permission to give out invites!', inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been banned from the server for {bans_strings[ban_level]}.' ban_embed.set_thumbnail(url=member.avatar_url) ban_embed.remove_field(2) ban_embed.set_footer(text='') (await ctx.channel.send(embed=ban_embed)) (await ctx.guild.ban(user, reason=reason, delete_message_days=0)) self.bot.sql.cursor.execute('UPDATE warnings SET EndTime=? WHERE userid=? AND serverid=?', (end_hour, user_id, ctx.guild.id)) self.bot.sql.cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) self.bot.sql.conn.commit() else: (await ctx.channel.send((((f'Turt bot is unable to ban {user.mention} due to insufficient role status or' + f''' Turt is unable to ban users on this server. {user.name} has accumulated ''') + f'{total_severity_points} severity points, so it is recommended that {user.name} be banned ') + f'for {bans_strings[ban_level]}.'))) else: ban_embed = discord.Embed() ban_embed.color = discord.Colour.orange() ban_embed.set_author(name=f'Warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been warned in {ctx.guild.name}.' ban_embed.description = f'Reason: {reason}' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.add_field(name='Ban Punishments', value=((('10 severity points: 1 Hour\n' + '20 severity points: 1 Day\n') + '30 severity points: 1 Week\n') + 'Every 10 severity points afterwards: 1 Month (30 days)'), inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been warned.' ban_embed.set_thumbnail(url=user.avatar_url) (await ctx.channel.send(embed=ban_embed))
Permissions Requirement: Warning Whitelisted Users: Server Owner Warning Non-Whitelisted Users: Server Whitelisted Parameters: user - An @ mention or the userid of the user to be warned. severity - How severe the offense was. This number is added to the user's account for this server. reason - The reason for warning the user. It is recommended to issue warnings to whitelisted users who warn without adequate reason, or even for whitelist status to be revoked in extreme cases. Description: Warn a user for something they did and add <severity> severity points to their account for this server. Once a punishable number of severity points has been reached, then a punishment is automatically issued [2][3]. If multiple punishable severity levels are surpassed, then the longest ban is issued. Punishments: [1] 10 severity points - banned for 1 hour 20 severity points - banned for 1 day 30 severity points - banned for 1 week Every 10 severity points afterwards will result in a 1 month ban (30 days) When banning users, Turt bot will send a DM including a server invite [4]. After the ban has expired, Turt bot will unban the user [3]. Notes: [1] Ban punishments round up to the next hour. [2] Turt bot cannot ban users with roles higher in the role hierarchy. If a user should be banned, consult someone who is higher in the hierarchy. [3] It is recommended to turn on the `ban members` permission for Turt, or Turt will be unable to ban or automatically unban users when their ban has expired. [4] It is recommended to turn on the `manage server` permission for Turt, or Turt will be unable to send server invites to banned users.
bot/cogs/discipline.py
warn
Things-N-Stuff/Turt
0
python
@commands.Command @server_only() @whitelist_only() async def warn(self, ctx, user: discord.User, severity: int, reason: str): "\n Permissions Requirement: \n Warning Whitelisted Users: Server Owner\n Warning Non-Whitelisted Users: Server Whitelisted\n Parameters:\n user - An @ mention or the userid of the user to be warned.\n severity - How severe the offense was. This number is added to the user's account for this server.\n reason - The reason for warning the user. It is recommended to issue warnings to whitelisted users who warn without adequate reason, or even for whitelist status to be revoked in extreme cases.\n Description:\n Warn a user for something they did and add <severity> severity points to their account for this server.\n Once a punishable number of severity points has been reached, then a punishment is automatically issued [2][3].\n If multiple punishable severity levels are surpassed, then the longest ban is issued.\n Punishments: [1]\n 10 severity points - banned for 1 hour\n 20 severity points - banned for 1 day\n 30 severity points - banned for 1 week\n Every 10 severity points afterwards will result in a 1 month ban (30 days)\n When banning users, Turt bot will send a DM including a server invite [4].\n After the ban has expired, Turt bot will unban the user [3].\n\n Notes:\n [1] Ban punishments round up to the next hour.\n [2] Turt bot cannot ban users with roles higher in the role hierarchy. If a user should be banned, consult someone who is higher in the hierarchy.\n [3] It is recommended to turn on the `ban members` permission for Turt, or Turt will be unable to ban or automatically unban users when their ban has expired.\n [4] It is recommended to turn on the `manage server` permission for Turt, or Turt will be unable to send server invites to banned users.\n " bans_in_hours = [1, 24, 168, 720] bans_strings = ['1 hour', '1 day', '1 week', '30 days'] cursor = self.bot.sql.cursor conn = self.bot.sql.conn user_id = user.id if (user_id == ctx.author.id): (await ctx.channel.send('You cannot warn yourself.')) return if (user_id != ctx.guild.owner_id): cursor.execute('SELECT * FROM whitelist WHERE serverid=? AND userid=?', (ctx.guild.id, user_id)) result = cursor.fetchone() if (result is not None): (await ctx.channel.send('Only the server owner can warn whitelisted users.')) return if (user is None): (await ctx.channel.send('That user does not exist on this server')) return if (user.id == config.bot_user_id): (await ctx.channel.send('You cannot warn me.')) return if user.bot: (await ctx.channel.send('You cannot warn bots.')) return if (severity < 0): (await ctx.channel.send('You cannot give users negative severity points.')) return if (len(reason.strip()) == 0): (await ctx.channel.send('Warn reason cannot be whitespace.')) return cursor.execute('SELECT severitypoints FROM warnings WHERE userid=? AND serverid=?', (user_id, ctx.guild.id)) severity_points = cursor.fetchone() if (severity_points is None): cursor.execute('INSERT INTO warnings VALUES (?,?,?,?)', (user_id, ctx.guild.id, 0, (- 1))) conn.commit() severity_points = 0 else: severity_points = severity_points[0] total_severity_points = (severity_points + severity) cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) conn.commit() punished = False current_time_in_hours = int(math.ceil((time.time() / 3600))) end_hour = current_time_in_hours ban_level = 0 if ((math.ceil((severity_points / 10)) <= math.floor((total_severity_points / 10))) and ((severity_points % 10) != 0)): index = (math.floor((total_severity_points / 10)) - 1) if (index > 3): index = 3 punished = True end_hour += bans_in_hours[index] ban_level = index if (severity == 0): punished = False if punished: server = (await self.bot.fetch_guild(ctx.guild.id)) bot_user = (await server.fetch_member(config.bot_user_id)) member = (await server.fetch_member(user_id)) if (bot_user.guild_permissions.ban_members and (bot_user.top_role.position > member.top_role.position)): ban_embed = discord.Embed() ban_embed.color = discord.Colour.red() ban_embed.set_author(name=f'Last warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been banned from {ctx.guild.name} for {bans_strings[ban_level]}.' ban_embed.description = f'The last straw: {reason}' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.set_footer(text='Note: This server invite may not be active when your ban has expired') if bot_user.guild_permissions.manage_guild: invites = (await server.invites()) if (len(invites) == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! {ctx.guild.name} does not have any active invites!', inline=False) else: got_indefinite_invite = False longest_invite = invites[0] for invite in invites: if (invite.max_age == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(invite), inline=False) got_indefinite_invite = True break if (invite.max_age > longest_invite.max_age): longest_invite = invite if (not got_indefinite_invite): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(longest_invite), inline=False) else: ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! Turt bot does not have permission to give out invites!', inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been banned from the server for {bans_strings[ban_level]}.' ban_embed.set_thumbnail(url=member.avatar_url) ban_embed.remove_field(2) ban_embed.set_footer(text=) (await ctx.channel.send(embed=ban_embed)) (await ctx.guild.ban(user, reason=reason, delete_message_days=0)) self.bot.sql.cursor.execute('UPDATE warnings SET EndTime=? WHERE userid=? AND serverid=?', (end_hour, user_id, ctx.guild.id)) self.bot.sql.cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) self.bot.sql.conn.commit() else: (await ctx.channel.send((((f'Turt bot is unable to ban {user.mention} due to insufficient role status or' + f' Turt is unable to ban users on this server. {user.name} has accumulated ') + f'{total_severity_points} severity points, so it is recommended that {user.name} be banned ') + f'for {bans_strings[ban_level]}.'))) else: ban_embed = discord.Embed() ban_embed.color = discord.Colour.orange() ban_embed.set_author(name=f'Warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been warned in {ctx.guild.name}.' ban_embed.description = f'Reason: {reason}' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.add_field(name='Ban Punishments', value=((('10 severity points: 1 Hour\n' + '20 severity points: 1 Day\n') + '30 severity points: 1 Week\n') + 'Every 10 severity points afterwards: 1 Month (30 days)'), inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been warned.' ban_embed.set_thumbnail(url=user.avatar_url) (await ctx.channel.send(embed=ban_embed))
@commands.Command @server_only() @whitelist_only() async def warn(self, ctx, user: discord.User, severity: int, reason: str): "\n Permissions Requirement: \n Warning Whitelisted Users: Server Owner\n Warning Non-Whitelisted Users: Server Whitelisted\n Parameters:\n user - An @ mention or the userid of the user to be warned.\n severity - How severe the offense was. This number is added to the user's account for this server.\n reason - The reason for warning the user. It is recommended to issue warnings to whitelisted users who warn without adequate reason, or even for whitelist status to be revoked in extreme cases.\n Description:\n Warn a user for something they did and add <severity> severity points to their account for this server.\n Once a punishable number of severity points has been reached, then a punishment is automatically issued [2][3].\n If multiple punishable severity levels are surpassed, then the longest ban is issued.\n Punishments: [1]\n 10 severity points - banned for 1 hour\n 20 severity points - banned for 1 day\n 30 severity points - banned for 1 week\n Every 10 severity points afterwards will result in a 1 month ban (30 days)\n When banning users, Turt bot will send a DM including a server invite [4].\n After the ban has expired, Turt bot will unban the user [3].\n\n Notes:\n [1] Ban punishments round up to the next hour.\n [2] Turt bot cannot ban users with roles higher in the role hierarchy. If a user should be banned, consult someone who is higher in the hierarchy.\n [3] It is recommended to turn on the `ban members` permission for Turt, or Turt will be unable to ban or automatically unban users when their ban has expired.\n [4] It is recommended to turn on the `manage server` permission for Turt, or Turt will be unable to send server invites to banned users.\n " bans_in_hours = [1, 24, 168, 720] bans_strings = ['1 hour', '1 day', '1 week', '30 days'] cursor = self.bot.sql.cursor conn = self.bot.sql.conn user_id = user.id if (user_id == ctx.author.id): (await ctx.channel.send('You cannot warn yourself.')) return if (user_id != ctx.guild.owner_id): cursor.execute('SELECT * FROM whitelist WHERE serverid=? AND userid=?', (ctx.guild.id, user_id)) result = cursor.fetchone() if (result is not None): (await ctx.channel.send('Only the server owner can warn whitelisted users.')) return if (user is None): (await ctx.channel.send('That user does not exist on this server')) return if (user.id == config.bot_user_id): (await ctx.channel.send('You cannot warn me.')) return if user.bot: (await ctx.channel.send('You cannot warn bots.')) return if (severity < 0): (await ctx.channel.send('You cannot give users negative severity points.')) return if (len(reason.strip()) == 0): (await ctx.channel.send('Warn reason cannot be whitespace.')) return cursor.execute('SELECT severitypoints FROM warnings WHERE userid=? AND serverid=?', (user_id, ctx.guild.id)) severity_points = cursor.fetchone() if (severity_points is None): cursor.execute('INSERT INTO warnings VALUES (?,?,?,?)', (user_id, ctx.guild.id, 0, (- 1))) conn.commit() severity_points = 0 else: severity_points = severity_points[0] total_severity_points = (severity_points + severity) cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) conn.commit() punished = False current_time_in_hours = int(math.ceil((time.time() / 3600))) end_hour = current_time_in_hours ban_level = 0 if ((math.ceil((severity_points / 10)) <= math.floor((total_severity_points / 10))) and ((severity_points % 10) != 0)): index = (math.floor((total_severity_points / 10)) - 1) if (index > 3): index = 3 punished = True end_hour += bans_in_hours[index] ban_level = index if (severity == 0): punished = False if punished: server = (await self.bot.fetch_guild(ctx.guild.id)) bot_user = (await server.fetch_member(config.bot_user_id)) member = (await server.fetch_member(user_id)) if (bot_user.guild_permissions.ban_members and (bot_user.top_role.position > member.top_role.position)): ban_embed = discord.Embed() ban_embed.color = discord.Colour.red() ban_embed.set_author(name=f'Last warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been banned from {ctx.guild.name} for {bans_strings[ban_level]}.' ban_embed.description = f'The last straw: {reason}' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.set_footer(text='Note: This server invite may not be active when your ban has expired') if bot_user.guild_permissions.manage_guild: invites = (await server.invites()) if (len(invites) == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! {ctx.guild.name} does not have any active invites!', inline=False) else: got_indefinite_invite = False longest_invite = invites[0] for invite in invites: if (invite.max_age == 0): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(invite), inline=False) got_indefinite_invite = True break if (invite.max_age > longest_invite.max_age): longest_invite = invite if (not got_indefinite_invite): ban_embed.add_field(name=f'Invite (For when ban expires)', value=str(longest_invite), inline=False) else: ban_embed.add_field(name=f'Invite (For when ban expires)', value=f'Sorry! Turt bot does not have permission to give out invites!', inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been banned from the server for {bans_strings[ban_level]}.' ban_embed.set_thumbnail(url=member.avatar_url) ban_embed.remove_field(2) ban_embed.set_footer(text=) (await ctx.channel.send(embed=ban_embed)) (await ctx.guild.ban(user, reason=reason, delete_message_days=0)) self.bot.sql.cursor.execute('UPDATE warnings SET EndTime=? WHERE userid=? AND serverid=?', (end_hour, user_id, ctx.guild.id)) self.bot.sql.cursor.execute('UPDATE warnings SET severitypoints=? WHERE userid=? AND serverid=?', (total_severity_points, user_id, ctx.guild.id)) self.bot.sql.conn.commit() else: (await ctx.channel.send((((f'Turt bot is unable to ban {user.mention} due to insufficient role status or' + f' Turt is unable to ban users on this server. {user.name} has accumulated ') + f'{total_severity_points} severity points, so it is recommended that {user.name} be banned ') + f'for {bans_strings[ban_level]}.'))) else: ban_embed = discord.Embed() ban_embed.color = discord.Colour.orange() ban_embed.set_author(name=f'Warned by {ctx.author.display_name}', icon_url=ctx.author.avatar_url) ban_embed.title = f'You have been warned in {ctx.guild.name}.' ban_embed.description = f'Reason: {reason}' ban_embed.add_field(name='Severity Points Given', value=severity, inline=True) ban_embed.add_field(name='Total Severity Points', value=total_severity_points, inline=True) ban_embed.add_field(name='Ban Punishments', value=((('10 severity points: 1 Hour\n' + '20 severity points: 1 Day\n') + '30 severity points: 1 Week\n') + 'Every 10 severity points afterwards: 1 Month (30 days)'), inline=False) if (user.dm_channel is None): (await user.create_dm()) (await user.dm_channel.send(embed=ban_embed)) ban_embed.title = f'{user.display_name} has been warned.' ban_embed.set_thumbnail(url=user.avatar_url) (await ctx.channel.send(embed=ban_embed))<|docstring|>Permissions Requirement: Warning Whitelisted Users: Server Owner Warning Non-Whitelisted Users: Server Whitelisted Parameters: user - An @ mention or the userid of the user to be warned. severity - How severe the offense was. This number is added to the user's account for this server. reason - The reason for warning the user. It is recommended to issue warnings to whitelisted users who warn without adequate reason, or even for whitelist status to be revoked in extreme cases. Description: Warn a user for something they did and add <severity> severity points to their account for this server. Once a punishable number of severity points has been reached, then a punishment is automatically issued [2][3]. If multiple punishable severity levels are surpassed, then the longest ban is issued. Punishments: [1] 10 severity points - banned for 1 hour 20 severity points - banned for 1 day 30 severity points - banned for 1 week Every 10 severity points afterwards will result in a 1 month ban (30 days) When banning users, Turt bot will send a DM including a server invite [4]. After the ban has expired, Turt bot will unban the user [3]. Notes: [1] Ban punishments round up to the next hour. [2] Turt bot cannot ban users with roles higher in the role hierarchy. If a user should be banned, consult someone who is higher in the hierarchy. [3] It is recommended to turn on the `ban members` permission for Turt, or Turt will be unable to ban or automatically unban users when their ban has expired. [4] It is recommended to turn on the `manage server` permission for Turt, or Turt will be unable to send server invites to banned users.<|endoftext|>
999a79186a76f31002c39fcd72a38d04f3c35c37087b04580df77b6c8a799422
def test_valid_http(): 'Test building a UnstructuredData.' validator = RecordValidator(True) ud = UnstructuredData('http://example.com', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')
Test building a UnstructuredData.
tests/zeffTestSuite/record/test_unstructureddata.py
test_valid_http
ziff/ZeffClient
1
python
def test_valid_http(): validator = RecordValidator(True) ud = UnstructuredData('http://example.com', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')
def test_valid_http(): validator = RecordValidator(True) ud = UnstructuredData('http://example.com', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')<|docstring|>Test building a UnstructuredData.<|endoftext|>
f93155fe10d4af289a2afc49cf9a006d6bf45e356098856046ade6c07d98a866
def test_valid_file(): 'Test building a UnstructuredData with file.' validator = RecordValidator(True) ud = UnstructuredData(f'file://{__file__}', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')
Test building a UnstructuredData with file.
tests/zeffTestSuite/record/test_unstructureddata.py
test_valid_file
ziff/ZeffClient
1
python
def test_valid_file(): validator = RecordValidator(True) ud = UnstructuredData(f'file://{__file__}', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')
def test_valid_file(): validator = RecordValidator(True) ud = UnstructuredData(f'file://{__file__}', FileType.TEXT) assert (ud.record is None) validator.validate_unstructured_data(ud) assert (ud.accessible == 'OK')<|docstring|>Test building a UnstructuredData with file.<|endoftext|>
202cbedc1382a3ce183d1a6c4ed5045a5af7e9f11ac50dc669774a5428ce0a13
def test_missing_file(): 'Test building a UnstructuredData with missing file.' validator = RecordValidator(True) ud = UnstructuredData('file:///spam', FileType.TEXT) validator.validate_unstructured_data(ud) assert (ud.accessible == 'file missing')
Test building a UnstructuredData with missing file.
tests/zeffTestSuite/record/test_unstructureddata.py
test_missing_file
ziff/ZeffClient
1
python
def test_missing_file(): validator = RecordValidator(True) ud = UnstructuredData('file:///spam', FileType.TEXT) validator.validate_unstructured_data(ud) assert (ud.accessible == 'file missing')
def test_missing_file(): validator = RecordValidator(True) ud = UnstructuredData('file:///spam', FileType.TEXT) validator.validate_unstructured_data(ud) assert (ud.accessible == 'file missing')<|docstring|>Test building a UnstructuredData with missing file.<|endoftext|>