hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
01960b8fa7f7373860466315b126d7283e568ad3 | 1,908 | =begin
#Phone.com API
#This is a Phone.com api PhoneCom definition
OpenAPI spec version: 1.0.0
Contact: apisupport@phone.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
PhoneCom Codegen version: 2.4.4
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for PhoneComClient::ListSchedules
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'ListSchedules' do
before do
# run before each test
@instance = PhoneComClient::ListSchedules.new
end
after do
# run after each test
end
describe 'test an instance of ListSchedules' do
it 'should create an instance of ListSchedules' do
expect(@instance).to be_instance_of(PhoneComClient::ListSchedules)
end
end
describe 'test attribute "filters"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "sort"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "offset"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "limit"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "items"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 26.5 | 102 | 0.719078 |
ffe017f5738d7398c4730acee9e0972155e2df48 | 308 | #!/usr/bin/env ruby
file_path = File.expand_path("../day-02-input.txt", __FILE__)
input = File.read(file_path)
valid = input.split("\n").select do |i|
pos1, pos2, ch, pwd = i.match(/(\d+)-(\d+)\s(\w):\s(\w+)/i).captures
( pwd[pos1.to_i-1] == ch ) ^ ( pwd[pos2.to_i-1] == ch )
end
puts valid.count
| 25.666667 | 70 | 0.597403 |
acaa12fedc7656fbb880f89cb5652761c24316b0 | 238 | require 'support/helpers/bootstrap_modal_helpers'
require 'support/helpers/session_helpers'
RSpec.configure do |config|
config.include BootstrapModalHelpers, type: :feature
config.include Features::SessionHelpers, type: :feature
end
| 29.75 | 57 | 0.823529 |
e8a8eeef0b09688b373499a5287c15a82ba3d93e | 735 | def maximum_gap(nums)
return 0 if nums.size<2||nums.max==nums.min
bucketLen=[1, (nums.max-nums.min)/nums.size].max
lowest=nums.min ## necessary!
buckets=[]
bucketMin=[]
bucketMax=[]
((nums.max-nums.min+1)/bucketLen + 1).times { buckets<<[] }
nums.each do |n|
i=(n-lowest)/bucketLen ## If not declared lowest, the machine will evaluate nums.min every time !
if !buckets[i].include? n
buckets[i]<< n
bucketMax[i]=n if bucketMax[i]==nil ||bucketMax[i]<n
bucketMin[i]=n if bucketMin[i]==nil ||bucketMin[i]>n
end
end
last=bucketMax[0]
gap=0
1.upto(buckets.size-1) do |i|
if buckets[i].size>0
gap=[bucketMin[i]-last, gap].max
last=bucketMax[i]
end
end
gap
end
| 27.222222 | 102 | 0.629932 |
ed897ff8a68a8005086c480ac66cd238387df22b | 31 | require "reflexive/application" | 31 | 31 | 0.870968 |
216c32aff32df6ae01cecb38db6905a00e481ffe | 131 | require 'test_helper'
class TechnologyCountryTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.375 | 53 | 0.725191 |
61e19b043bf2d81fc94cb2c739ec6a700b45ad10 | 980 | cask "plistedit-pro" do
version "1.9.2"
sha256 "5db33711689cc911a33af8f5140e27436df392e13f0fece3de0ba11ac4e0f839"
url "https://www.fatcatsoftware.com/plisteditpro/PlistEditPro.zip"
appcast "https://www.fatcatsoftware.com/plisteditpro/plisteditpro_appcast.xml"
name "PlistEdit Pro"
homepage "https://www.fatcatsoftware.com/plisteditpro/"
auto_updates true
depends_on macos: ">= :high_sierra"
app "PlistEdit Pro.app"
binary "#{appdir}/PlistEdit Pro.app/Contents/MacOS/pledit"
zap trash: [
"~/Library/Application Support/PlistEdit Pro",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.fatcatsoftware.pledpro.sfl2",
"~/Library/Application Support/com.fatcatsoftware.pledpro",
"~/Library/Caches/com.apple.helpd/Generated/com.fatcatsoftware.pledpro.help*",
"~/Library/Caches/com.fatcatsoftware.pledpro",
"~/Library/Preferences/com.fatcatsoftware.pledpro.plist",
]
end
| 39.2 | 147 | 0.771429 |
e20352684b271f6553435e6d109513113805e6f0 | 886 | Pod::Spec.new do |s|
s.name = 'CDYelpFusionKit'
s.version = '3.0.1'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.summary = 'An extensive Swift wrapper for the Yelp Fusion API.'
s.description = <<-DESC
This Swift wrapper covers all possible network endpoints and responses for the Yelp Fusion API.
DESC
s.homepage = 'https://github.com/chrisdhaan/CDYelpFusionKit'
s.author = { 'Christopher de Haan' => 'contact@christopherdehaan.me' }
s.source = { :git => 'https://github.com/chrisdhaan/CDYelpFusionKit.git', :tag => s.version.to_s }
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '10.0'
s.watchos.deployment_target = '3.0'
s.swift_versions = ['5.1', '5.2']
s.source_files = 'Source/*.swift'
s.resources = ['Resources/*.xcassets']
s.dependency 'Alamofire', '5.4.3'
end
| 35.44 | 100 | 0.656885 |
62ed4d53bb75acdacbb1bdc9ebf9719dd636777a | 308 | class AddPrimaryLocaleToEditions < ActiveRecord::Migration
def up
add_column :editions, :primary_locale, :string, default: "en", null: false
Edition.reset_column_information
Edition.update_all("primary_locale = locale")
end
def down
remove_column :editions, :primary_locale
end
end
| 23.692308 | 78 | 0.753247 |
28bf5b7fa9d69e791fcec7c408c74863bf25358d | 2,771 | require 'test_helper'
class CollectionPointTest < ActiveSupport::TestCase
# tests for validation
test "should not create new local without name" do
local = CollectionPoint.new
local.state = "State"
local.city = "City"
local.address = "Address"
assert_not local.save, "Tried to save local without name."
end
test "should not create new local without state" do
local = CollectionPoint.new
local.name = "Name"
local.city = "City"
local.address = "Address"
assert_not local.save, "Tried to save local without state."
end
test "should not create new local without city" do
local = CollectionPoint.new
local.name = "Name"
local.state = "State"
local.address = "Address"
assert_not local.save, "Tried to save local without city."
end
test "should not create new local without address" do
local = CollectionPoint.new
local.name = "Name"
local.state = "State"
local.city = "City"
assert_not local.save, "Tried to save local without address."
end
test "should not create new local with invalid characters in name" do
local = CollectionPoint.new
local.name = "Name["
assert_not local.save, "Tried to save local with invalid characteres in name."
end
test "should not create new local with invalid characters in city" do
local = CollectionPoint.new
local.city = "City["
assert_not local.save, "Tried to save local with invalid characteres in city."
end
test "should not create new local with invalid characters in address" do
local = CollectionPoint.new
local.address = "Address, 10["
assert_not local.save, "Tried to save local with invalid characteres in address"
end
test "should not create new local with invalid characters in phone" do
local = CollectionPoint.new
local.phone = "xxxxx"
assert_not local.save, "Tried to save local with invalid characteres in address"
end
test "should not create new local with invalid phone size - bigger" do
local = CollectionPoint.new
local.phone = "1234567890123456"
assert_not local.save, "Tried to save local with invalid phone size"
end
test "should not create new local with invalid phone size - smaller" do
local = CollectionPoint.new
local.phone = "12"
assert_not local.save, "Tried to save local with invalid phone size"
end
test "should not create new local with invalid name size - smaller" do
local = CollectionPoint.new
local.name = "A"
assert_not local.save, "Tried to save local with invalid name size"
end
test "should not create new local with invalid city size - smaller" do
local = CollectionPoint.new
local.name = "AE"
assert_not local.save, "Tried to save local with invalid city size"
end
end
| 36.460526 | 84 | 0.712378 |
33b57e82b7c8ce833cc6c479a52f1630df4044a5 | 10,677 | class AppPerfAgentWorker < ActiveJob::Base
queue_as :app_perf
attr_accessor :license_key,
:name,
:host,
:hostname,
:data,
:user,
:application,
:protocol_version
def perform(params, body)
#AppPerfRpm.without_tracing do
json = decompress_params(body)
self.license_key = params.fetch("license_key") { nil }
self.protocol_version = params.fetch("protocol_version") { nil }
self.hostname = json.fetch("host")
self.name = json.fetch("name") { nil }
if self.license_key.nil? ||
self.protocol_version.nil?
return
end
self.data = Array(json.fetch("data"))
self.user = User.where(license_key: license_key).first
self.application = Application.where(:license_key => license_key).first_or_initialize
self.application.user = user
self.application.name = name
self.application.save
self.host = Host.where(:name => hostname).first_or_create
if protocol_version.to_i.eql?(2)
errors, remaining_data = data.partition {|d| d[0] == "error" }
metrics, spans = Array(remaining_data).partition {|d| d[0] == "metric" }
if metrics.present?
process_metric_data(metrics)
end
if errors.present? && application.present?
process_error_data(errors)
end
if spans.present? && application.present?
process_version_2(spans)
end
end
#end
end
private
def decompress_params(body)
compressed_body = Base64.decode64(body)
data = Zlib::Inflate.inflate(compressed_body)
MessagePack.unpack(data)
end
def load_data(data)
data
.map {|datum|
begin
_layer, _trace_key, _start, _duration, _serialized_opts = datum
_opts = _serialized_opts
rescue => ex
Rails.logger.error "SERIALIZATION ERROR"
Rails.logger.error ex.message.to_s
Rails.logger.error _serialized_opts.inspect
_opts = {}
end
trace_key = generate_trace_key(_trace_key)
begin
[_layer, trace_key, _start.to_f, _duration.to_f, _opts]
rescue => ex
Rails.logger.error "LOAD DATA ERROR"
Rails.logger.error "DATA: #{datum.inspect}"
Rails.logger.error "PARSED DATA: #{[_layer, _trace_key, _start, _duration, _serialized_opts].inspect}"
raise
end
}
end
def load_layers(data)
existing_layers = application.layers.all
layer_names = data.map {|d| d[0] }.compact.uniq
new_layers = (layer_names - existing_layers.map(&:name)).map {|l|
layer = application.layers.where(:name => l).first_or_initialize
layer.save
layer
}
(new_layers + existing_layers).uniq {|l| l.name }
end
def load_database_types(data)
existing_database_types = application.database_types.all
database_type_names = data
.map {|d| d[4]["adapter"] }
.compact
.uniq
new_database_types = (database_type_names - existing_database_types.map(&:name)).map {|adapter|
database_type = application.database_types.where(
:name => adapter
).first_or_initialize
database_type.save
database_type
}
(new_database_types + existing_database_types).uniq {|l| l.name }
end
def load_traces(data)
traces = []
timestamps = data
.group_by {|datum| datum[1] }
.flat_map {|trace_key, events| { trace_key => events.map {|e| e[2] }.max } }
.reduce({}) { |h, v| h.merge v }
durations = data
.group_by {|datum| datum[1] }
.flat_map {|trace_key, events| { trace_key => events.map {|e| e[3] }.max } }
.reduce({}) { |h, v| h.merge v }
trace_keys = data.map {|d| d[1] }.compact.uniq
existing_traces = application.traces.where(:trace_key => trace_keys)
trace_keys.each {|trace_key|
timestamp = Time.at(timestamps[trace_key])
duration = durations[trace_key]
trace = existing_traces.find {|t| t.trace_key == trace_key }
if trace.nil?
trace = application.traces.new(:trace_key => trace_key)
end
trace.host = host
trace.trace_key = trace_key
# Set timestamp if never set, or incoming timestamp is earlier than
# the oldest recorded already.
if trace.timestamp.nil? || trace.timestamp > timestamp
trace.timestamp = timestamp
end
# Set the duration if never set, or the incoming duration is slower
# than the previous.
if trace.duration.nil? || trace.duration < duration
trace.duration = duration
end
if trace.new_record?
traces << trace
else
trace.save
end
}
ids = Trace.import(traces).ids
application.traces.where(:trace_key => trace_keys).all
end
def process_version_2(data)
events = []
spans = []
database_calls = []
backtraces = []
data = load_data(data)
layers = load_layers(data)
database_types = load_database_types(data)
traces = load_traces(data)
data.each do |_layer, _trace_key, _start, _duration, _opts|
hash = {}
layer = layers.find {|l| l.name == _layer }
endpoint = nil
database_call = nil
url = _opts.fetch("url") { nil }
domain = _opts.fetch("domain") { nil }
controller = _opts.fetch("controller") { nil }
action = _opts.fetch("action") { nil }
query = _opts.fetch("query") { nil }
adapter = _opts.fetch("adapter") { nil }
_backtrace = _opts.delete("backtrace")
timestamp = Time.at(_start)
duration = _duration
span = {}
span[:host_id] = host.id
span[:layer_id] = layer.id
span[:timestamp] = timestamp
span[:duration] = _duration
span[:trace_key] = _trace_key
span[:uuid] = SecureRandom.uuid.to_s
span[:payload] = _opts
if query
database_type = database_types.find {|dt| dt.name == adapter }
database_call = application.database_calls.new(
:uuid => span[:uuid],
:database_type_id => database_type.id,
:host_id => host.id,
:layer_id => layer.id,
:statement => query,
:timestamp => timestamp,
:duration => _duration
)
database_calls << database_call
end
if _backtrace
backtrace = Backtrace.new
backtrace.backtrace = _backtrace
backtrace.backtraceable_id = span[:uuid]
backtrace.backtraceable_type = "Span"
backtraces << backtrace
end
spans << span
end
all_events = []
spans.select {|s| s[:trace_key] }.group_by {|s| s[:trace_key] }.each_pair do |trace_key, events|
trace = traces.find {|t| t.trace_key == trace_key }
next if trace.nil?
timestamp = events.map {|e| e[:timestamp] }.min
duration = events.map {|e| e[:duration] }.max
url = (events.find {|e| e[:payload]["url"] } || {}).fetch(:payload, {}).fetch("url") { nil }
domain = (events.find {|e| e[:payload]["domain"] } || {}).fetch(:payload, {}).fetch("domain") { nil }
controller = (events.find {|e| e[:payload]["controller"] } || {}).fetch(:payload, {}).fetch("controller") { nil }
action = (events.find {|e| e[:payload]["action"] } || {}).fetch(:payload, {}).fetch("action") { nil }
events.each { |e|
e[:payload]["url"] ||= url
e[:payload]["domain"] ||= domain
e[:payload]["controller"] ||= controller
e[:payload]["action"] ||= action
e[:trace_key] = trace.trace_key
}
existing_spans = trace.spans.all
new_spans = events.map {|s| application.spans.new(s) }
all_spans = existing_spans + new_spans
all_spans.each {|s| s.exclusive_duration = get_exclusive_duration(s, all_spans) }
all_spans.select {|s| s.id.present? }.each(&:save)
all_events += new_spans
end
Backtrace.import(backtraces)
Span.import(all_events)
DatabaseCall.import(database_calls)
end
def get_exclusive_duration(span, spans)
children_data = span_children_data(span, spans)
children_data.size > 0 ? children_duration(children_data) : span.duration
end
def span_children_data(span, spans)
spans
.select {|s| span.parent_of?(s) }
end
def children_duration(children)
children
.map {|span| span.duration.to_f / 1000 }
.inject(0) {|sum, x| sum + x }
end
def generate_trace_key(seed = nil)
if seed.nil?
Digest::SHA1.hexdigest([Time.now, rand].join)
else
Digest::SHA1.hexdigest(seed)
end
end
def process_analytic_event_data(data)
analytic_event_data = []
data.each do |datum|
datum[:host_id] = host.id
analytic_event_data << application.analytic_event_data.new(datum)
end
AnalyticEventDatum.import(analytic_event_data)
end
def process_error_data(data)
error_data = []
data.select {|d| d.first.eql?("error") }.each do |datum|
_, trace_key, timestamp, data = datum
message, backtrace, fingerprint = generate_fingerprint(data["message"], data["backtrace"])
error_message = application.error_messages.where(:fingerprint => fingerprint).first_or_initialize
error_message.error_class ||= data["error_class"]
error_message.error_message ||= message
error_message.last_error_at = Time.now
error_message.save
error_data << application.error_data.new do |error_datum|
error_datum.host = host
error_datum.error_message = error_message
error_datum.transaction_id = trace_key
error_datum.message = message
error_datum.backtrace = backtrace
error_datum.source = data["source"]
error_datum.timestamp = timestamp
end
end
ErrorDatum.import(error_data)
end
def process_metric_data(data)
metrics = {}
metric_data = []
data.select {|d| d.first.eql?("metric") }.each do |datum|
_, timestamp, key, value, tags = *datum
if key && value
metrics[key] ||= Metric.where(name: key, application_id: application.try(:id)).first_or_create
metric_data << metrics[key].metric_data.new do |metric_datum|
metric_datum.host = host
metric_datum.value = value
metric_datum.tags = tags || {}
metric_datum.timestamp = Time.at(timestamp)
end
end
end
MetricDatum.import(metric_data)
end
def generate_fingerprint(message, backtrace)
message, fingerprint = ErrorMessage.generate_fingerprint(message)
return message, backtrace, fingerprint
end
end
| 30.681034 | 119 | 0.621523 |
ff12b8f0a442490edecf4e650cb1c1a93b66c402 | 2,823 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2015-2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/framework/dyna_trace_agent'
describe JavaBuildpack::Framework::DynaTraceAgent do
include_context 'component_helper'
let(:configuration) do
{ 'default_agent_name' => nil }
end
it 'does not detect without dynatrace-n/a service' do
expect(component.detect).to be_nil
end
context do
before do
allow(services).to receive(:one_service?).with(/dynatrace/, 'server').and_return(true)
allow(services).to receive(:find_service).and_return('credentials' => { 'server' => 'test-host-name' })
end
it 'detects with dynatrace-n/a service' do
expect(component.detect).to eq("dyna-trace-agent=#{version}")
end
it 'expands DynaTrace agent zip',
cache_fixture: 'stub-dyna-trace-agent.jar' do
component.compile
expect(sandbox + 'agent/lib64/libdtagent.so').to exist
end
it 'updates JAVA_OPTS' do
component.release
expect(java_opts).to include('-agentpath:$PWD/.java-buildpack/dyna_trace_agent/agent/lib64/'\
'libdtagent.so=name=test-application-name_Monitoring,server=test-host-name')
end
context do
let(:configuration) { { 'default_agent_name' => 'different-agent-name' } }
it 'updates JAVA_OPTS with configured agent name' do
component.release
expect(java_opts).to include('-agentpath:$PWD/.java-buildpack/dyna_trace_agent/agent/lib64/'\
'libdtagent.so=name=different-agent-name,server=test-host-name')
end
end
end
context do
before do
allow(services).to receive(:one_service?).with(/dynatrace/, 'server').and_return(true)
allow(services).to receive(:find_service).and_return('credentials' => { 'server' => 'test-host-name',
'profile' => 'test-profile' })
end
it 'updates JAVA_OPTS with custom profile' do
component.release
expect(java_opts).to include('-agentpath:$PWD/.java-buildpack/dyna_trace_agent/agent/lib64/'\
'libdtagent.so=name=test-application-name_test-profile,server=test-host-name')
end
end
end
| 33.607143 | 109 | 0.691817 |
87dffa1bd026a759c7bbd8771f94a1d231be6c6a | 115 | class ChangeUsersToPlayers < ActiveRecord::Migration[5.2]
def change
rename_table :users, :players
end
end
| 19.166667 | 57 | 0.756522 |
ac728443964aed850ebb80da00de003c805934f7 | 2,572 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../dummy/config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require "capybara/rspec"
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
#Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
Dir[File.dirname(__FILE__) + "/support/**/*.rb"].each {|f| require f}
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
end
| 47.62963 | 86 | 0.747278 |
f82283397338ff62311641c2df35871317f08f60 | 2,353 | require 'spec_helper'
describe 'collectd::plugin::filter::target', type: :define do
let :facts do
{
osfamily: 'Debian',
concat_basedir: tmpfilename('collectd-filter'),
id: 'root',
kernel: 'Linux',
path: '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
collectd_version: '5.0'
}
end
let(:title) { 'MyTarget' }
let(:default_params) { { chain: 'MyChain' } }
let(:concat_fragment_target) { '/etc/collectd/conf.d/filter-chain-MyChain.conf' }
context 'Add target set to rule with options' do
let(:concat_fragment_order) { '10_MyRule_30_MyTarget' }
let(:concat_fragment_name) { '/etc/collectd/conf.d/filter-chain-MyChain.conf_10_MyRule_30_MyTarget' }
let(:params) do
default_params.merge(plugin: 'set',
rule: 'MyRule',
options: {
'PluginInstance' => 'coretemp',
'TypeInstance' => 'core3'
})
end
it 'Will ensure that plugin is loaded' do
should contain_collectd__plugin('target_set').with(order: '02')
end
it 'Will add target to rule' do
should contain_concat__fragment(concat_fragment_name).with(
order: concat_fragment_order,
target: concat_fragment_target
)
should contain_concat__fragment(concat_fragment_name).with(content: /<Target "set">/)
should contain_concat__fragment(concat_fragment_name).with(content: /PluginInstance "coretemp"/)
should contain_concat__fragment(concat_fragment_name).with(content: /TypeInstance "core3"/)
end
end
context 'Add builtin target return without rule to chain' do
let(:concat_fragment_order) { '20_50_MyTarget' }
let(:concat_fragment_name) { '/etc/collectd/conf.d/filter-chain-MyChain.conf_20_50_MyTarget' }
let(:params) do
default_params.merge(plugin: 'return')
end
it 'Builtin plugin should not be tried to load' do
should_not contain_collectd__plugin('target_return')
end
it 'Will add target to chain' do
should contain_concat__fragment(concat_fragment_name).with(
order: concat_fragment_order,
target: concat_fragment_target
)
should contain_concat__fragment(concat_fragment_name).with(content: /Target "return"/)
end
end
end
| 37.951613 | 105 | 0.658309 |
114e2adfd508b897d2f204add676b32b4010972e | 2,153 | class HelmAT2 < Formula
desc "Kubernetes package manager"
homepage "https://helm.sh/"
url "https://github.com/helm/helm.git",
tag: "v2.17.0",
revision: "a690bad98af45b015bd3da1a41f6218b1a451dbe"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "432e81bffefbb026bd50058e920a424b1805b84efc634d78c93dfedb9fec3d5a" => :big_sur
sha256 "831c4f5b7cf7fc1ab53364eeb2eeb6eff8babdbc51817b406b65a948ac6258c2" => :catalina
sha256 "ab7ef44ce55c8b3597a2cb6dfe0ef93b74b389e6a4d6ab09c9a1ebe8dce5e594" => :mojave
sha256 "a1c5cb86cce4fe2941c94309c8c75cd00ed9fae2e6edc6ea67aacadcf2f13c9e" => :high_sierra
sha256 "66f0d78ff5fde1dc9d15aab9a8ca648adabf92e5f357f7fc9b515b59e92ad77f" => :x86_64_linux
end
keg_only :versioned_formula
# See: https://helm.sh/blog/helm-v2-deprecation-timeline/
deprecate! date: "2020-11-13", because: :deprecated_upstream
depends_on "glide" => :build
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GLIDE_HOME"] = HOMEBREW_CACHE/"glide_home/#{name}"
ENV.prepend_create_path "PATH", buildpath/"bin"
ENV["TARGETS"] = "darwin/amd64"
dir = buildpath/"src/k8s.io/helm"
dir.install buildpath.children - [buildpath/".brew_home"]
cd dir do
system "make", "bootstrap"
system "make", "build"
bin.install "bin/helm"
bin.install "bin/tiller"
man1.install Dir["docs/man/man1/*"]
output = Utils.safe_popen_read({ "SHELL" => "bash" }, bin/"helm", "completion", "bash")
(bash_completion/"helm").write output
output = Utils.safe_popen_read({ "SHELL" => "zsh" }, bin/"helm", "completion", "zsh")
(zsh_completion/"_helm").write output
prefix.install_metafiles
end
end
test do
system "#{bin}/helm", "create", "foo"
assert File.directory? "#{testpath}/foo/charts"
version_output = shell_output("#{bin}/helm version --client 2>&1")
assert_match "GitTreeState:\"clean\"", version_output
if build.stable?
assert_match stable.instance_variable_get(:@resource).instance_variable_get(:@specs)[:revision], version_output
end
end
end
| 34.174603 | 117 | 0.704598 |
ffb27e252abcd991a06e96c5006798e74e98da6d | 523 | # HTMLページを作成します。
class Page
require 'erb'
require 'fileutils'
# インデックスページを作成します。
def self.create_index(output_path, price, programs)
# ERBファイルを読み込む
erb = ERB.new(File.read(File.expand_path("../../template/index.html.erb", __FILE__)))
File.write("#{output_path}/index.html", erb.result(binding))
# 整形用にBootstrapをコピーする。
FileUtils.cp(File.expand_path("../../template/bootstrap.min.css", __FILE__), "#{output_path}/bootstrap.min.css")
# 作成したパスを返却する。
"#{output_path}/index.html"
end
end | 27.526316 | 116 | 0.692161 |
399ef3765aa854c9f8b848591d7935869675cb46 | 1,496 | require File.expand_path("../base", __FILE__)
require "acceptance/support/matchers/have_color"
describe "vagrant and color output" do
include_context "acceptance"
# This is a check to see if the `expect` program is installed on this
# computer. Some tests require this and if this doesn't exist then the
# test itself will be skipped.
def self.has_expect?
`which expect`
$?.success?
end
it "outputs color if there is a TTY", :if => has_expect? do
environment.workdir.join("color.exp").open("w+") do |f|
f.puts(<<-SCRIPT)
spawn #{environment.replace_command("vagrant")} status
expect default {}
SCRIPT
end
result = execute("expect", "color.exp")
result.stdout.should have_color
end
it "doesn't output color if there is a TTY but --no-color is present", :if => has_expect? do
environment.workdir.join("color.exp").open("w+") do |f|
f.puts(<<-SCRIPT)
spawn #{environment.replace_command("vagrant")} status --no-color
expect default {}
SCRIPT
end
result = execute("expect", "color.exp")
result.stdout.should_not have_color
end
it "doesn't output color in the absense of a TTY" do
# This should always output an error, which on a TTY would
# output color. We check that this doesn't output color.
# If `vagrant status` itself is broken, another acceptance test
# should catch that. We just assume it works here.
result = execute("vagrant", "status")
result.stdout.should_not have_color
end
end
| 31.166667 | 94 | 0.696524 |
1ccc569eba51bc41bd0242f6b04d8f582eb72d03 | 1,147 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rspec/its/version'
Gem::Specification.new do |spec|
spec.name = "rspec-its"
spec.version = RSpec::Its::VERSION
spec.authors = ["Peter Alfvin"]
spec.email = ["palfvin@gmail.com"]
spec.description = %q{RSpec extension gem for attribute matching}
spec.summary = %q{Provides "its" method formerly part of rspec-core}
spec.homepage = "https://github.com/rspec/rspec-its"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency 'rspec-core', '>= 3.0.0'
spec.add_runtime_dependency 'rspec-expectations', '>= 3.0.0'
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'rake', '~> 10.1.0'
spec.add_development_dependency 'cucumber', '~> 1.3.8'
spec.add_development_dependency 'aruba', '~> 0.5'
end
| 39.551724 | 76 | 0.655623 |
01473a7ba85908e3d103239a62353d5242b2b7c1 | 543 | require "simplecov"
require "coveralls"
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
])
SimpleCov.start { add_filter "/spec/" }
require "lita-doubler"
require "lita/rspec"
# A compatibility mode is provided for older plugins upgrading from Lita 3. Since this plugin
# was generated with Lita 4, the compatibility mode should be left disabled.
Lita.version_3_compatibility_mode = false
| 36.2 | 93 | 0.697974 |
91d1c0b85079a25833904ebd5c0d4290e3c0199a | 4,101 | # encoding: utf-8
module DateAndTimeFormatting
def self.included(base)
base.class_eval do
include DateAndTimeFormatting::InstanceMethods
include OrdinalizedFormatting
extend DateAndTimeFormatting::ClassMethods
end
end
module InstanceMethods
# Formats a date/time instance using a defined format
#
# ==== Parameters
# format<Symbol>:: of the format key from Date.date_formats
#
# ==== Returns
# String:: formatted string
#
# ==== Example
# Time.now.formatted(:rfc822) # => "Sun, 16 Nov 2007 00:21:16 -0800"
# Time.now.formatted(:db) # => "2008-11-16 00:22:09"
#
# You can also add your own formats using +Date.add_format+ when your app loads.
#
# # Add the following to your init.rb
# Rango::BootLoader.before_app_loads do
# Date.add_format(:matt, "%H:%M:%S %Y-%m-%d")
# end
#
# # Format a Time instance with the format you just specified
# Time.now.formatted(:matt) # => "00:00:00 2007-11-02"
#
#--
# @public
def formatted(format = :default)
self.strftime(Date.formats[format])
end
end
module ClassMethods
@@formats = {
:db => "%Y-%m-%d %H:%M:%S",
:time => "%H:%M", # 21:12
:date => "%Y-%m-%d", # 2008-12-04
:short => "%d %b %H:%M", # 01 Sep 21:12
:long => "%B %d, %Y %H:%M",
:rfc822 => "%a, %d %b %Y %H:%M:%S %z"
}
# Lists the date and time formats
#
# ==== Returns
# Hash:: a hash with all formats available
# --
# @public
def formats
@@formats
end
# Adds a date and time format
#
# Because this operation is not thread safe, you should define
# custom formats when you load you application. The recommended way
# to do that, is to use the before_app_loads bootloader.
#
# If you want to add a format at runtime, you will need to use a mutex
# and synchronize it yourself.
#
# ==== Parameters
# key<Symbol>:: name of the format
# format<Hash>:: time format to use
#
# ==== Returns
# Hash:: a hash with all formats available
#
# ==== Example
#
# Rango::BootLoader.before_app_loads do
# Date.add_format(:matt, "%H:%M:%S %Y-%m-%d")
# end
#
#
# --
# @public
def add_format(key, format)
formats.merge!({key => format})
end
# Resets the date and time formats
# --
# @private
def reset_formats
original_formats = [:db, :time, :short, :date, :long, :long_ordinal, :rfc822]
formats = @@formats.delete_if{|format, v| !original_formats.include?(format)}
end
end
end
module Ordinalize
# Ordinalize turns a number into an ordinal string used to denote the
# position in an ordered sequence such as 1st, 2nd, 3rd, 4th.
#
# ==== Examples
# 1.ordinalize # => "1st"
# 2.ordinalize # => "2nd"
# 1002.ordinalize # => "1002nd"
# 1003.ordinalize # => "1003rd"
def ordinalize
if (11..13).include?(self % 100)
"#{self}th"
else
case self % 10
when 1; "#{self}st"
when 2; "#{self}nd"
when 3; "#{self}rd"
else "#{self}th"
end
end
end
end
Integer.send :include, Ordinalize
# Time.now.to_ordinalized_s :long
# => "February 28th, 2006 21:10"
module OrdinalizedFormatting
def to_ordinalized_s(format = :default)
format = Date.formats[format]
return self.to_s if format.nil?
strftime_ordinalized(format)
end
# Gives you a relative date in an attractive format
#
# ==== Parameters
# format<String>:: strftime string used to format a time/date object
# locale<String, Symbol>:: An optional value which can be used by localization plugins
#
# ==== Returns
# String:: Ordinalized time/date object
#
# ==== Examples
# 5.days.ago.strftime_ordinalized('%b %d, %Y') # =>
def strftime_ordinalized(fmt, format=nil)
strftime(fmt.gsub(/(^|[^-])%d/, '\1_%d_')).gsub(/_(\d+)_/) { |s| s.to_i.ordinalize }
end
end
| 25.792453 | 88 | 0.582297 |
ed9a227d7c344163139f18e34bc8a1d086eb49e7 | 790 | require 'kubeclient'
describe NOVAHawk::Providers::Kubernetes::ContainerManager::RefresherMixin do
let(:client) { double("client") }
let(:dummy) { (Class.new { include NOVAHawk::Providers::Kubernetes::ContainerManager::RefresherMixin }).new }
context 'when an exception is thrown' do
before { allow(client).to receive(:get_pods) { raise KubeException.new(0, 'oh-no', nil) } }
context 'and there is no default value' do
it 'should raise' do
expect { dummy.fetch_entities(client, [{:name => 'pods'}]) }.to raise_error(KubeException)
end
end
context 'and there is a default value' do
it 'should be returned' do
expect(dummy.fetch_entities(client, [{:name => 'pods', :default => []}])).to eq('pod' => [])
end
end
end
end
| 34.347826 | 111 | 0.656962 |
bb6ee4b63b39b5fbc5acabbef260a03afbd992ae | 117 | class AddSshLoginToUsers < ActiveRecord::Migration
def change
add_column :users, :ssh_login, :string
end
end
| 19.5 | 50 | 0.760684 |
1c27d817fc4b83e34f6d80fe2035384fec3fe216 | 1,113 | require 'optparse'
require "vagrant"
require Vagrant.source_root.join("plugins/commands/up/start_mixins")
module VagrantPlugins
module CommandReload
class Command < Vagrant.plugin("2", :command)
# We assume that the `up` plugin exists and that we'll have access
# to this.
include VagrantPlugins::CommandUp::StartMixins
def self.synopsis
"restarts vagrant machine, loads new Vagrantfile configuration"
end
def execute
options = {}
options[:provision_ignore_sentinel] = false
opts = OptionParser.new do |o|
o.banner = "Usage: vagrant reload [vm-name]"
o.separator ""
build_start_options(o, options)
end
# Parse the options
argv = parse_options(opts)
return if !argv
# Validate the provisioners
validate_provisioner_flags!(options)
@logger.debug("'reload' each target VM...")
with_target_vms(argv) do |machine|
machine.action(:reload, options)
end
# Success, exit status 0
0
end
end
end
end
| 24.195652 | 72 | 0.625337 |
33e6a0911b46442be8eb5de9adfecbb6a8ee19c1 | 1,463 | module Spree
module Marketing
class List
class FavourableProducts < Spree::Marketing::List
include Spree::Marketing::ActsAsMultiList
# Constants
NAME_TEXT = 'Most Selling Products'
ENTITY_KEY = 'entity_id'
ENTITY_TYPE = 'Spree::Product'
TIME_FRAME = 1.month
FAVOURABLE_PRODUCT_COUNT = 10
AVAILABLE_REPORTS = [:cart_additions_by, :purchases_by, :product_views_by]
def user_ids
# FIXME: There might be a case where a guest user have placed an order
# And we also have his email but we are leaving those emails for now.
Spree::Order.joins(line_items: { variant: :product })
.of_registered_users
.where('spree_orders.completed_at >= :time_frame', time_frame: computed_time)
.where('spree_products.id = ?', entity_id)
.group(:user_id)
.pluck(:user_id)
end
def self.data
Spree::Order.joins(line_items: { variant: :product })
.where('spree_orders.completed_at >= :time_frame', time_frame: computed_time)
.group("spree_products.id")
.order("COUNT(spree_orders.id) DESC")
.limit(FAVOURABLE_PRODUCT_COUNT)
.pluck("spree_products.id")
end
private_class_method :data
end
end
end
end
| 36.575 | 99 | 0.574163 |
1c9291e384384cc997c7634ccdb2e19f0db6dc7e | 5,617 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_02_21_000514) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.bigint "byte_size", null: false
t.string "checksum", null: false
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "authentications", force: :cascade do |t|
t.integer "user_id", null: false
t.string "provider", null: false
t.string "uid", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["provider", "uid"], name: "index_authentications_on_provider_and_uid"
end
create_table "friend_requests", force: :cascade do |t|
t.integer "sender_id"
t.integer "receiver_id"
t.integer "friend_request_status", default: 0
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["receiver_id"], name: "index_friend_requests_on_receiver_id"
t.index ["sender_id", "receiver_id"], name: "index_friend_requests_on_sender_id_and_receiver_id", unique: true
t.index ["sender_id"], name: "index_friend_requests_on_sender_id"
end
create_table "information", force: :cascade do |t|
t.text "title", null: false
t.text "content"
t.datetime "display_time", null: false
t.integer "order", default: 0
t.integer "information_type", default: 0
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "message_button_lists", force: :cascade do |t|
t.bigint "room_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["room_id"], name: "index_message_button_lists_on_room_id"
end
create_table "message_buttons", force: :cascade do |t|
t.string "content", limit: 20
t.integer "message_no"
t.integer "message_type", default: 0
t.bigint "message_button_list_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["message_button_list_id"], name: "index_message_buttons_on_message_button_list_id"
end
create_table "messages", force: :cascade do |t|
t.text "content"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "user_id"
t.bigint "room_id"
t.index ["room_id"], name: "index_messages_on_room_id"
t.index ["user_id"], name: "index_messages_on_user_id"
end
create_table "rooms", force: :cascade do |t|
t.string "name"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.bigint "owner_id"
t.datetime "updated_at_message"
t.index ["owner_id"], name: "index_rooms_on_owner_id"
end
create_table "use_type_settings", force: :cascade do |t|
t.integer "use_type", default: 0
t.boolean "use_text_input", default: false, null: false
t.boolean "use_button_input", default: false, null: false
t.bigint "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "use_mail_notification", default: false
t.index ["user_id"], name: "index_use_type_settings_on_user_id"
end
create_table "users", force: :cascade do |t|
t.string "email", null: false
t.string "crypted_password"
t.string "salt"
t.string "name", limit: 20, null: false
t.integer "role", default: 0
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "introduction"
t.text "uuid"
t.string "remember_me_token"
t.datetime "remember_me_token_expires_at"
t.string "reset_password_token"
t.datetime "reset_password_token_expires_at"
t.datetime "reset_password_email_sent_at"
t.integer "access_count_to_reset_password_page", default: 0
t.string "image"
t.datetime "post_to_operation_sent_at"
t.string "screen_name", default: ""
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["remember_me_token"], name: "index_users_on_remember_me_token"
t.index ["reset_password_token"], name: "index_users_on_reset_password_token"
t.index ["uuid"], name: "index_users_on_uuid", unique: true
end
end
| 39.27972 | 126 | 0.716753 |
874d866cff13e7f246bbc3321130147567e18069 | 485 | class UsersController < ApplicationController
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
flash[:notice] = "You've successfully signed up!"
session[:user_id] = @user.id
redirect_to "/"
else
flash[:alert] = "There was a problem signing up."
redirect_to '/signup'
end
end
private
def user_params
params.require(:user).permit(:email, :password, :password_confirmation)
end
end | 20.208333 | 75 | 0.649485 |
d5cb85896ee7b4464a16bb14543c46b18943010f | 7,325 | require File.expand_path("spec_helper", File.dirname(__FILE__))
describe RandomWord, "enumerator" do
subject {RandomWord.enumerator(["aaa", "bbb", "ccc"])}
it "can get you the next word in its list" do
expect(subject.next).to be_one_of(["aaa", "bbb", "ccc"])
end
it "raises error when it runs out of words" do
3.times{subject.next}
expect{subject.next}.to raise_error(StopIteration)
end
it "make sure each word is only returned once" do
already_received = []
3.times do
expect(new_word = subject.next).not_to be_one_of(already_received)
already_received << new_word
end
end
end
describe RandomWord do
after(:all) do
RandomWord.instance_eval{ @nouns, @adjs, @words = nil, nil, nil } # reset rspec effects
end
it "can return a random noun enumerator" do
expect(RandomWord.nouns).to respond_to(:next)
end
it "can return a random adj enumerator" do
expect(RandomWord.adjs).to respond_to(:next)
end
it "can return a random word enumerator" do
expect(RandomWord.words).to respond_to(:next)
end
it "can return a random phrase enumerator" do
expect(RandomWord.phrases.next).to be_a(String)
end
end
describe RandomWord, "#exclude" do
let(:word_list) { ["aaa","ccc","c", "cab", "abc", "ace", "dad"] }
[
{:name => "normal words", :exclude => "ccc", :expected => Set.new(["aaa","c", "cab", "abc", "ace", "dad"])},
{:name => "regex", :exclude => /c/, :expected => Set.new(["aaa", "dad"])},
{:name => "list", :exclude => [/c/,/d/], :expected => Set.new(["aaa"])},
].each do |rec|
it "will not return an excluded #{rec[:name]}" do
subject = RandomWord.enumerator(word_list, rec[:exclude])
received_words = []
loop do
received_words << subject.next
end rescue StopIteration
expect(Set.new(received_words)).to eq(rec[:expected])
end
end
end
describe "RandomWord#nouns", "with exclusions" do
subject{ RandomWord.nouns }
before(:each) do
expect(RandomWord).to receive(:load_word_list).and_return(["aaa","bbb", "ccc"])
end
after(:each) do
RandomWord.exclude_list.clear
RandomWord.instance_eval{ @nouns, @adjs, @words = nil, nil, nil } # reset rspec effects
end
it "will not return an excluded word" do
RandomWord.exclude_list << "ccc"
received_words = []
loop do
received_words << subject.next
end
expect(received_words).not_to include "ccc"
expect(received_words).to include "aaa"
expect(received_words).to include "bbb"
end
end
describe "RandomWord#words", "with exclusions" do
subject{ RandomWord.words }
before(:each) do
expect(RandomWord).to receive(:load_word_list).twice.and_return(["aaa","bbb", "ccc"])
end
after(:each) do
RandomWord.exclude_list.clear
RandomWord.instance_eval{ @nouns, @adjs, @words = nil, nil, nil } # reset rspec effects
end
it "will not return an excluded word" do
RandomWord.exclude_list << "ccc"
received_words = []
loop do
received_words << subject.next
end
expect(received_words).not_to include "ccc"
expect(received_words).to include "aaa"
expect(received_words).to include "bbb"
end
end
shared_examples 'allows constraints on word length' do |method|
context 'when constraining' do
let(:word_list) { %w(aa bbb cccc ddddd) }
before(:each) do
expect(RandomWord).to receive(:load_word_list).and_return(word_list)
end
after(:each) do
RandomWord.instance_eval{ @nouns, @adjs, @words = nil, nil, nil }
end
let(:next_words) do
[].tap do |next_words|
loop do
begin
next_words << RandomWord.send(method, length_constraints).next
rescue StopIteration
# We've tried all the words in the short test list we're using.
break
end
end
end
end
context 'by maximum length' do
let(:length_constraints) { {not_longer_than: 2} }
it 'excludes the correct words' do
expect(next_words).to match_array %w(aa)
end
end
context 'by minimum length' do
let(:length_constraints) { {not_shorter_than: 4} }
it 'excludes the correct words' do
expect(next_words).to match_array %w(cccc ddddd)
end
end
context 'by both minimum and maximum length' do
let(:length_constraints) { {not_shorter_than: 3, not_longer_than: 4} }
it 'excludes the correct words' do
expect(next_words).to match_array %w(bbb cccc)
end
end
context 'by a perverse minimum length' do
let(:length_constraints) { {not_shorter_than: -1234} }
it 'includes all words' do
expect(next_words).to match_array word_list
end
end
context 'by a perverse maximum length' do
let(:length_constraints) { {not_longer_than: -34234} }
it 'excludes all words' do
expect(next_words).to be_empty
end
end
context 'and all words are within the constraints' do
let(:length_constraints) { {not_shorter_than: 2, not_longer_than: 5} }
it 'includes all words' do
expect(next_words).to match_array word_list
end
end
end
end
shared_examples 'changing constraints in subsequent calls' do |method|
context 'when changing constraints in subsequent calls' do
let(:word_list) { %w(defenestrate as can jubilant orangutan hat) }
before(:each) do
expect(RandomWord).to receive(:load_word_list).and_return(word_list)
end
after(:each) do
RandomWord.instance_eval{ @nouns, @adjs, @words = nil, nil, nil }
end
it 'applies the new constraints' do
short_words = %w(as can hat)
long_words = %w(defenestrate jubilant orangutan)
3.times { expect(short_words).to include RandomWord.send(method, not_longer_than: 3).next }
3.times { expect(long_words).to include RandomWord.send(method, not_longer_than: 150).next }
expect { RandomWord.send(method).next }.to raise_exception StopIteration
end
end
end
shared_examples 'with a seed specified' do |method|
context 'when setting seed' do
let(:word_list) { %w(defenestrate as can jubilant orangutan hat) }
before(:each) do
expect(RandomWord).to receive(:load_word_list).and_return(word_list)
end
after(:each) do
RandomWord.instance_eval{ @nouns, @adjs = nil, nil }
end
it 'applies the new constraints' do
RandomWord.rng = Random.new 1234
short_words = %w(as can hat)
long_words = %w(defenestrate jubilant orangutan)
3.times { expect(short_words).to include RandomWord.send(method, not_longer_than: 3).next }
3.times { expect(long_words).to include RandomWord.send(method, not_longer_than: 150).next }
expect { RandomWord.send(method).next }.to raise_exception StopIteration
end
end
end
describe RandomWord do
context '#nouns' do
include_examples 'allows constraints on word length', :nouns
include_examples 'changing constraints in subsequent calls', :nouns
include_examples 'with a seed specified', :nouns
end
context '#adjs' do
include_examples 'allows constraints on word length', :adjs
include_examples 'changing constraints in subsequent calls', :adjs
include_examples 'with a seed specified', :adjs
end
end
| 28.173077 | 112 | 0.668942 |
f7b3758b376f3a60295b02ea993d11c2f48eb3c1 | 5,015 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "financetracker_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.380531 | 114 | 0.762512 |
d500329c8e15d2f8c8bd62b1dd4b0742635f8cc9 | 101 | class Review < ApplicationRecord
belongs_to :reviewer, class_name: 'User'
belongs_to :talk
end
| 14.428571 | 42 | 0.762376 |
337a1e169fed070e0f128b2f6c2abcbe0ca7395b | 1,595 | module Xcodeproj
class Command
class ProjectDiff < Command
def self.banner
%{Shows the difference between two projects:
$ project-diff PROJECT_1 PROJECT_2
It shows the difference in a UUID agnostic fashion.
To reduce the noise (and to simplify implementation) differences in the
order of arrays are ignored.}
end
def self.options
[ ["--ignore KEY", "A key to ignore in the comparison. Can be specified multiple times."] ].concat(super)
end
def initialize(argv)
@path_project1 = argv.shift_argument
@path_project2 = argv.shift_argument
unless @path_project1 && @path_project2
raise Informative, "Two project paths are required."
end
@keys_to_ignore = []
while (idx = argv.index('--ignore'))
@keys_to_ignore << argv.delete_at(idx + 1)
argv.delete_at(idx)
end
super unless argv.empty?
end
def run
hash_1 = Project.new(@path_project1).to_tree_hash.dup
hash_2 = Project.new(@path_project2).to_tree_hash.dup
@keys_to_ignore.each do |key|
Differ.clean_hash!(hash_1, key)
Differ.clean_hash!(hash_2, key)
end
diff = Differ.project_diff(hash_1, hash_2, @path_project1, @path_project2)
require 'yaml'
yaml = diff.to_yaml
yaml = yaml.gsub(@path_project1, @path_project1.cyan)
yaml = yaml.gsub(@path_project2, @path_project2.magenta)
yaml = yaml.gsub(":diff:", "diff:".yellow)
puts yaml
end
end
end
end
| 28.482143 | 113 | 0.627586 |
1d0ebc9cdddb0ffd0d7ba37da0082ddaa59bf85e | 2,689 | require "test_helper"
module AppUp
module Hooks
class DummyShell
attr_reader :history
def initialize
@history = {
enqueue: [],
notify: [],
}
end
def enqueue(*args)
history[:enqueue] << args
end
def notify(*args)
history[:notify] << args
end
end
class RailsUpTest < Minitest::Test
def setup
super
@shell = DummyShell.new
end
def test_bundle_directories
files = [
"folder/sub_folder/Gemfile",
"other_folder/other_subfolder/more/Gemfile.lock",
"some_gem/thing/thing.gemspec",
"unused/Gemthingspec",
]
hook = RailsUp.new(@shell, files, {})
hook.run
assert_equal 3, @shell.history[:enqueue].select {|c| c[0]==:run}.size
assert @shell.history[:enqueue].include?([:run, RailsUp::BUNDLE_COMMAND, dir: [".", "folder", "sub_folder"]]), @shell.history
assert @shell.history[:enqueue].include?([:run, RailsUp::BUNDLE_COMMAND, dir: [".", "other_folder", "other_subfolder", "more"]]), @shell.history
assert @shell.history[:enqueue].include?([:run, RailsUp::BUNDLE_COMMAND, dir: [".", "some_gem", "thing"]]), @shell.history
end
def test_migrate_directories
files = [
"folder/sub_folder/db/migrate/migration.rb",
"other_folder/other_subfolder/more/db/migrate/migration.rb",
"unused/db/config.rb",
]
hook = RailsUp.new(@shell, files, {})
hook.stubs(:migrate).with("test").returns("migrate_test")
hook.stubs(:migrate).with("development").returns("migrate_development")
hook.run
assert_equal 4, @shell.history[:enqueue].select {|c| c[0]==:run}.size
assert @shell.history[:enqueue].include?([:run, "migrate_test", dir: [".", "folder", "sub_folder"]]), @shell.history
assert @shell.history[:enqueue].include?([:run, "migrate_development", dir: [".", "folder", "sub_folder"]]), @shell.history
assert @shell.history[:enqueue].include?([:run, "migrate_test", dir: [".", "other_folder", "other_subfolder", "more"]]), @shell.history
assert @shell.history[:enqueue].include?([:run, "migrate_development", dir: [".", "other_folder", "other_subfolder", "more"]]), @shell.history
end
def test_migrate__drops_db
no_drop_hook = RailsUp.new("stub", "stub", {})
assert_equal nil, no_drop_hook.send(:migrate, "test").match(/db:drop/)
no_drop_hook = RailsUp.new("stub", "stub", {db_reset: true})
assert no_drop_hook.send(:migrate, "test").match(/db:drop/)
end
end
end
end
| 34.474359 | 152 | 0.600595 |
6a9e1e33fe5f16a944fff2cb9612299c9b3be43b | 882 | require 'spec_helper'
describe 'HSS script' do
before :all do
ENV['HSS_CONFIG'] = './spec/test/config.yml'
end
it 'prints the version' do
expect(`hss version`.strip).to match(/\d+\.\d+\.\d+/)
end
it 'prints a list of examples' do
expect(`hss`.split("\n").first).to eql 'How to use:'
end
it 'supports a debug parameter' do
ENV['HSS_DEBUG'] = 'yes'
expect(`hss g`.strip).to eql 'ssh git@github.com'
ENV.delete 'HSS_DEBUG'
end
it 'support overriding the base command' do
ENV['HSS_COMMAND'] = 'echo'
expect(`hss g`.strip).to eql 'git@github.com'
ENV.delete 'HSS_COMMAND'
end
it 'connects via SSH' do
expect(`hss l echo 'hello \\"world\\"'`.strip).to eql 'hello "world"'
end
it 'connects via SCP' do
`scp -S hss spec/test/config.yml l:#{Dir.pwd}/scp_test`
expect(File.exist?('scp_test')).to be_truthy
end
end
| 27.5625 | 73 | 0.639456 |
bf9ec39a3ede758ab4a74b04b506295064e35fb3 | 3,369 | # -*- coding: binary -*-
# Copyright (c) 2010, patrickHVE@googlemail.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * The names of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL patrickHVE@googlemail.com BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
require 'rex/post/meterpreter/packet'
module Rex
module Post
module Meterpreter
module Extensions
module Stdapi
module Railgun
TLV_TYPE_EXTENSION_RAILGUN = 0
TLV_TYPE_RAILGUN_SIZE_OUT = TLV_META_TYPE_UINT | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 1)
TLV_TYPE_RAILGUN_STACKBLOB = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 2)
TLV_TYPE_RAILGUN_BUFFERBLOB_IN = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 3)
TLV_TYPE_RAILGUN_BUFFERBLOB_INOUT = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 4)
TLV_TYPE_RAILGUN_BACK_BUFFERBLOB_OUT = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 5)
TLV_TYPE_RAILGUN_BACK_BUFFERBLOB_INOUT = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 6)
TLV_TYPE_RAILGUN_BACK_RET = TLV_META_TYPE_QWORD | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 7)
TLV_TYPE_RAILGUN_BACK_ERR = TLV_META_TYPE_UINT | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 8)
TLV_TYPE_RAILGUN_DLLNAME = TLV_META_TYPE_STRING | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 9)
TLV_TYPE_RAILGUN_FUNCNAME = TLV_META_TYPE_STRING | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 10)
TLV_TYPE_RAILGUN_MULTI_GROUP = TLV_META_TYPE_GROUP | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 11)
TLV_TYPE_RAILGUN_MEM_ADDRESS = TLV_META_TYPE_QWORD | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 12 )
TLV_TYPE_RAILGUN_MEM_DATA = TLV_META_TYPE_RAW | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 13 )
TLV_TYPE_RAILGUN_MEM_LENGTH = TLV_META_TYPE_UINT | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 14 )
TLV_TYPE_RAILGUN_CALLCONV = TLV_META_TYPE_STRING | (TLV_TYPE_EXTENSION_RAILGUN + TLV_EXTENSIONS + 15)
end; end; end; end; end; end
| 60.160714 | 113 | 0.78688 |
edeafe259dba50a51f5dc5779483c31e7bfd99c1 | 2,750 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UpdateDailyEmailActiveUserRollups, type: :model, freeze_time: true do
before do
# Because EmailActiveUserRollup are not in primary db they are not rolled
# back; so we do it manually here instead.
EmailActiveUserRollup.destroy_all
end
it 'creates new hourly aggregations by stream kind for non category streams' do
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 15),
stream_kind: 'email',
stream_id: 1,
author_id: 0,
post_id: 0,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 1,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 2,
viewer_id: nil
)
Impression.create(
created_at: DateTime.new(2017, 1, 2, 12, 15),
stream_kind: 'email',
author_id: 0,
post_id: 3,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 2, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 1,
viewer_id: 2
)
described_class.call(date: Date.new(2017, 1, 2))
expect(EmailActiveUserRollup.count).to eq(1)
email_mau = EmailActiveUserRollup.first
expect(email_mau.day_total).to eq(2)
expect(email_mau.thirty_day_total).to eq(2)
end
it 'updates existing records' do
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 15),
stream_kind: 'email',
stream_id: 1,
author_id: 0,
post_id: 0,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 1,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 1, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 2,
viewer_id: nil
)
Impression.create(
created_at: DateTime.new(2017, 1, 2, 12, 15),
stream_kind: 'email',
author_id: 0,
post_id: 3,
viewer_id: 1
)
Impression.create(
created_at: DateTime.new(2017, 1, 2, 12, 45),
stream_kind: 'email',
author_id: 0,
post_id: 1,
viewer_id: 2
)
EmailActiveUserRollup.create(
day: Date.new(2017, 1, 2),
day_total: 0,
thirty_day_total: 0
)
described_class.call(date: Date.new(2017, 1, 2))
expect(EmailActiveUserRollup.count).to eq(1)
email_mau = EmailActiveUserRollup.first
expect(email_mau.day_total).to eq(2)
expect(email_mau.thirty_day_total).to eq(2)
end
end
| 25.943396 | 84 | 0.618545 |
ab548ae972abd77f124deaed40aacfcad84d4e48 | 586 | require 'spec_helper'
describe Ducktrap::Node::Key::Evaluator, '#output' do
subject { object.key }
let(:object) { class_under_test.new(context, input) }
let(:class_under_test) do
Class.new(described_class) do
public :key
end
end
let(:key) { double('Key') }
let(:value) { double('Value') }
let(:context) { double('Context', :key => key, :operand => operand) }
let(:operand) { Ducktrap::Node::Static.new(:forward, :inverse) }
let(:input) { { key => value } }
it { should be(key) }
it_should_behave_like 'an idempotent method'
end
| 21.703704 | 71 | 0.627986 |
d5a356246b9165e1131a9a42d84a784388fe64ca | 8,824 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Delorean" do
let(:engine) {
Delorean::Engine.new("ZZZ")
}
it "should handle MAX as a node name" do
engine.parse defn("MAX:",
" a = [1, 2, 3, 0, -10].max()",
)
r = engine.evaluate("MAX", "a")
r.should == 3
end
it "should handle COMPACT" do
engine.parse defn("A:",
" a = [1, 2, nil, -3, 4].compact",
" b = {'a': 1, 'b': nil, 'c': nil}.compact()",
)
expect(engine.evaluate("A", "a")).to eq([1, 2, -3, 4])
expect(engine.evaluate("A", "b")).to eq({"a" => 1})
end
it "should handle MIN" do
engine.parse defn("A:",
" a = [1, 2, -3, 4].min()",
)
r = engine.evaluate("A", "a")
r.should == -3
end
it "should handle ROUND" do
engine.parse defn("A:",
" a = 12.3456.round(2)",
" b = 12.3456.round(1)",
" c = 12.3456.round()",
)
r = engine.evaluate("A", ["a", "b", "c"])
r.should == [12.35, 12.3, 12]
end
it "should handle TRUNCATE" do
engine.parse defn("A:",
" a = 12.3456.truncate(2)",
" b = 12.3456.truncate(1)",
" c = 12.3456.truncate()",
)
r = engine.evaluate("A", ["a", "b", "c"])
r.should == [12.34, 12.3, 12]
end
it "should handle FLOOR" do
engine.parse defn("A:",
" a = [12.3456.floor(), 13.7890.floor()]",
)
r = engine.evaluate("A", "a")
r.should == [12, 13]
end
it "should handle TO_F" do
engine.parse defn("A:",
" a = 12.3456.to_f()",
" b = '12.3456'.to_f()",
" c = '12'.to_f()",
" d = '2018-05-04 10:56:27 -0700'.to_time.to_f",
)
r = engine.evaluate("A", ["a", "b", "c", "d"])
r.should == [12.3456, 12.3456, 12, 1525456587.0]
end
it "should handle ABS" do
engine.parse defn("A:",
" a = (-123).abs()",
" b = (-1.1).abs()",
" c = 2.3.abs()",
" d = 0.abs()",
)
r = engine.evaluate("A", ["a", "b", "c", "d"])
r.should == [123, 1.1, 2.3, 0]
end
it "should handle STRING" do
engine.parse defn("A:",
" a = 'hello'.to_s()",
" b = 12.3456.to_s()",
" c = [1,2,3].to_s()",
)
r = engine.evaluate("A", ["a", "b", "c"])
r.should == ["hello", '12.3456', [1,2,3].to_s]
end
it "should handle FETCH" do
engine.parse defn("A:",
" h = {'a':123, 1:111}",
" a = h.fetch('a')",
" b = h.fetch(1)",
" c = h.fetch('xxx', 456)",
)
r = engine.evaluate("A", ["a", "b", "c"])
r.should == [123, 111, 456]
end
it "should handle TIMEPART" do
engine.parse defn("A:",
" p =?",
" h = p.hour()",
" m = p.min()",
" s = p.sec()",
" d = p.to_date()",
" e = p.to_date.to_s.to_date",
)
p = Time.now
params = {"p" => p}
r = engine.evaluate("A", %w{h m s d e}, params)
r.should == [p.hour, p.min, p.sec, p.to_date, p.to_date]
# Non time argument should raise an error
expect { engine.evaluate("A", ["m"], {"p" => 123}) }.to raise_error
end
it "should handle DATEPART" do
engine.parse defn("A:",
" p =?",
" y = p.year()",
" d = p.day()",
" m = p.month()",
)
p = Date.today
r = engine.evaluate("A", ["y", "d", "m"], {"p" => p})
r.should == [p.year, p.day, p.month]
# Non date argument should raise an error
expect {
engine.evaluate("A", ["y", "d", "m"], {"p" => 123})
}.to raise_error
end
it "should handle FLATTEN" do
x = [[1,2,[3]], 4, 5, [6]]
engine.parse defn("A:",
" a = #{x}",
" b = a.flatten() + a.flatten(1)"
)
engine.evaluate("A", "b").should == x.flatten + x.flatten(1)
end
it "should handle ZIP" do
a = [1, 2]
b = [4, 5, 6]
c = [7, 8]
engine.parse defn("A:",
" a = #{a}",
" b = #{b}",
" c = #{c}",
" d = a.zip(b) + a.zip(b, c)",
)
expect(engine.evaluate("A", "d")).to eq a.zip(b) + a.zip(b, c)
end
it "should handle ERR" do
engine.parse defn("A:",
" a = ERR('hello')",
" b = ERR('xx', 1, 2, 3)",
)
expect { engine.evaluate("A", "a") }.to raise_error('hello')
lambda {
r = engine.evaluate("A", "b")
}.should raise_error("xx, 1, 2, 3")
end
it "should handle RUBY" do
x = [[1, 2, [-3]], 4, 5, [6], -3, 4, 5, 0]
engine.parse defn("A:",
" a = #{x}",
" b = a.flatten()",
" c = a.flatten(1)",
" d = b+c",
" dd = d.flatten()",
" e = dd.sort()",
" f = e.uniq()",
" g = e.length",
" gg = a.length()",
" l = a.member(5)",
" m = [a.member(5), a.member(55)]",
" n = {'a':1, 'b':2, 'c':3}.length()",
" o = 'hello'.length",
)
engine.evaluate("A", "c").should == x.flatten(1)
d = engine.evaluate("A", "d").should == x.flatten + x.flatten(1)
dd = engine.evaluate("A", "dd")
engine.evaluate("A", "e").should == dd.sort
engine.evaluate("A", "f").should == dd.sort.uniq
engine.evaluate("A", "g").should == dd.length
engine.evaluate("A", "gg").should == x.length
engine.evaluate("A", "m").should == [x.member?(5), x.member?(55)]
engine.evaluate("A", "n").should == 3
engine.evaluate("A", "o").should == 5
end
it "should be able to call function on hash" do
# FIXME: this is actually a Delorean design issue. How do
# whitelisted functions interact with attrs? In this case, we
# return nil since there is no Delorean 'length' attr in the hash.
skip 'Delorean design issue to be resolved'
engine.parse defn("A:",
" n = {}.length",
" m = {'length':100}.length",
)
engine.evaluate("A", "n").should == 0
engine.evaluate("A", "m").should == 100
end
it "should be able to call hash except" do
engine.parse defn("A:",
" h = {'a': 1, 'b':2, 'c': 3}",
" e = h.except('a', 'c')",
)
expect(engine.evaluate("A", "e")).to eq({"b"=>2})
end
it "should handle RUBY slice function" do
x = [[1, 2, [-3]], 4, [5, 6], -3, 4, 5, 0]
engine.parse defn("A:",
" a = #{x}",
" b = a.slice(0, 4)",
)
engine.evaluate("A", "b").should == x.slice(0,4)
end
it "should handle RUBY empty? function" do
engine.parse defn("A:",
" a0 = []",
" b0 = {}",
" c0 = {-}",
" a1 = [1,2,3]",
" b1 = {'a': 1, 'b':2}",
" c1 = {1,2,3}",
" res = [a0.empty, b0.empty(), c0.empty, a1.empty, b1.empty(), c1.empty]",
)
engine.evaluate("A", "res").should == [true, true, true, false, false, false]
end
it "should handle BETWEEN" do
engine.parse defn("A:",
" a = 1.23",
" b = [a.between(10,20), a.between(1,3)]",
)
expect(engine.evaluate("A", "b")).to eq([false, true])
end
it "should handle MATCH" do
engine.parse defn("A:",
" a = 'this is a test'.match('(.*)( is )(.*)')",
" b = [a[0], a[1], a[2], a[3], a[4]]",
)
expect(engine.evaluate("A", "b")).
to eq(["this is a test", "this", " is ", "a test", nil])
end
end
| 30.745645 | 99 | 0.3784 |
ffb084d6a238376067b62661abbd1159a553307e | 40 | module Sedatabi
VERSION = "1.0.0"
end
| 10 | 19 | 0.675 |
e8aadc3faf6fa0154045f5aed538171e324c02b8 | 64 | module Material
module Jekyll
VERSION = "0.1.0"
end
end
| 10.666667 | 21 | 0.65625 |