hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f843dd11ce0db515712eab205949f9411a5dd971 | 9,253 | module Grape
module Validations
##
# All validators must inherit from this class.
#
class Validator
attr_reader :attrs
def initialize(attrs, options, required, scope)
@attrs = Array(attrs)
@required = required
@scope = scope
if options.is_a?(Hash) && !options.empty?
raise Grape::Exceptions.UnknownOptions.new(options.keys)
end
end
def validate!(params)
attributes = AttributesIterator.new(self, @scope, params)
attributes.each do |resource_params, attr_name|
if @required || resource_params.has_key?(attr_name)
validate_param!(attr_name, resource_params)
end
end
end
class AttributesIterator
include Enumerable
def initialize(validator, scope, params)
@attrs = validator.attrs
@params = scope.params(params)
@params = (@params.is_a?(Array) ? @params : [@params])
end
def each
@params.each do |resource_params|
@attrs.each do |attr_name|
yield resource_params, attr_name
end
end
end
end
private
def self.convert_to_short_name(klass)
ret = klass.name.gsub(/::/, '/')
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
.tr("-", "_")
.downcase
File.basename(ret, '_validator')
end
end
##
# Base class for all validators taking only one param.
class SingleOptionValidator < Validator
def initialize(attrs, options, required, scope)
@option = options
super
end
end
# We define Validator::inherited here so SingleOptionValidator
# will not be considered a validator.
class Validator
def self.inherited(klass)
short_name = convert_to_short_name(klass)
Validations.register_validator(short_name, klass)
end
end
class << self
attr_accessor :validators
end
self.validators = {}
def self.register_validator(short_name, klass)
validators[short_name] = klass
end
class ParamsScope
attr_accessor :element, :parent
def initialize(opts, &block)
@element = opts[:element]
@parent = opts[:parent]
@api = opts[:api]
@optional = opts[:optional] || false
@type = opts[:type]
@declared_params = []
instance_eval(&block)
configure_declared_params
end
def should_validate?(parameters)
return false if @optional && params(parameters).respond_to?(:all?) && params(parameters).all?(&:blank?)
return true if parent.nil?
parent.should_validate?(parameters)
end
def requires(*attrs, &block)
orig_attrs = attrs.clone
opts = attrs.last.is_a?(Hash) ? attrs.pop : nil
if opts && opts[:using]
require_required_and_optional_fields(attrs.first, opts)
else
validate_attributes(attrs, opts, &block)
block_given? ? new_scope(orig_attrs, &block) :
push_declared_params(attrs)
end
end
def optional(*attrs, &block)
orig_attrs = attrs
validations = {}
validations.merge!(attrs.pop) if attrs.last.is_a?(Hash)
validations[:type] ||= Array if block_given?
validates(attrs, validations)
block_given? ? new_scope(orig_attrs, true, &block) :
push_declared_params(attrs)
end
def group(element, &block)
requires(element, &block)
end
def params(params)
params = @parent.params(params) if @parent
if @element
if params.is_a?(Array)
params = params.flat_map { |el| el[@element] || {} }
elsif params.is_a?(Hash)
params = params[@element] || {}
else
params = {}
end
end
params
end
def use(*names)
named_params = @api.settings[:named_params] || {}
names.each do |name|
params_block = named_params.fetch(name) do
raise "Params :#{name} not found!"
end
instance_eval(¶ms_block)
end
end
alias_method :use_scope, :use
alias_method :includes, :use
def full_name(name)
return "#{@parent.full_name(@element)}[#{name}]" if @parent
name.to_s
end
protected
def push_declared_params(attrs)
@declared_params.concat attrs
end
private
def require_required_and_optional_fields(context, opts)
if context == :all
optional_fields = Array(opts[:except])
required_fields = opts[:using].keys - optional_fields
else # context == :none
required_fields = Array(opts[:except])
optional_fields = opts[:using].keys - required_fields
end
required_fields.each do |field|
requires(field, opts[:using][field])
end
optional_fields.each do |field|
optional(field, opts[:using][field])
end
end
def validate_attributes(attrs, opts, &block)
validations = { presence: true }
validations.merge!(opts) if opts
validations[:type] ||= Array if block
validates(attrs, validations)
end
def new_scope(attrs, optional = false, &block)
opts = attrs[1] || { type: Array }
raise ArgumentError unless opts.keys.to_set.subset? [:type].to_set
ParamsScope.new(api: @api, element: attrs.first, parent: self, optional: optional, type: opts[:type], &block)
end
# Pushes declared params to parent or settings
def configure_declared_params
if @parent
@parent.push_declared_params [element => @declared_params]
else
@api.settings.peek[:declared_params] ||= []
@api.settings[:declared_params].concat @declared_params
end
end
def validates(attrs, validations)
doc_attrs = { required: validations.keys.include?(:presence) }
# special case (type = coerce)
validations[:coerce] = validations.delete(:type) if validations.key?(:type)
coerce_type = validations[:coerce]
doc_attrs[:type] = coerce_type.to_s if coerce_type
desc = validations.delete(:desc)
doc_attrs[:desc] = desc if desc
default = validations[:default]
doc_attrs[:default] = default if default
values = validations[:values]
doc_attrs[:values] = values if values
values = (values.is_a?(Proc) ? values.call : values)
# default value should be present in values array, if both exist
if default && values && !values.include?(default)
raise Grape::Exceptions::IncompatibleOptionValues.new(:default, default, :values, values)
end
# type should be compatible with values array, if both exist
if coerce_type && values && values.any? { |v| !v.kind_of?(coerce_type) }
raise Grape::Exceptions::IncompatibleOptionValues.new(:type, coerce_type, :values, values)
end
doc_attrs[:documentation] = validations.delete(:documentation) if validations.key?(:documentation)
full_attrs = attrs.collect { |name| { name: name, full_name: full_name(name) } }
@api.document_attribute(full_attrs, doc_attrs)
# Validate for presence before any other validators
if validations.has_key?(:presence) && validations[:presence]
validate('presence', validations[:presence], attrs, doc_attrs)
validations.delete(:presence)
end
# Before we run the rest of the validators, lets handle
# whatever coercion so that we are working with correctly
# type casted values
if validations.has_key? :coerce
validate('coerce', validations[:coerce], attrs, doc_attrs)
validations.delete(:coerce)
end
validations.each do |type, options|
validate(type, options, attrs, doc_attrs)
end
end
def validate(type, options, attrs, doc_attrs)
validator_class = Validations.validators[type.to_s]
if validator_class
(@api.settings.peek[:validations] ||= []) << validator_class.new(attrs, options, doc_attrs[:required], self)
else
raise Grape::Exceptions::UnknownValidator.new(type)
end
end
end
# This module is mixed into the API Class.
module ClassMethods
def reset_validations!
settings.peek[:declared_params] = []
settings.peek[:validations] = []
end
def params(&block)
ParamsScope.new(api: self, type: Hash, &block)
end
def document_attribute(names, opts)
@last_description ||= {}
@last_description[:params] ||= {}
Array(names).each do |name|
@last_description[:params][name[:full_name].to_s] ||= {}
@last_description[:params][name[:full_name].to_s].merge!(opts)
end
end
end
end
end
# Load all defined validations.
Dir[File.expand_path('../validations/*.rb', __FILE__)].each do |path|
require(path)
end
| 29.944984 | 118 | 0.602399 |
612701344d26dd7faadd85879ece12ee72fe54c5 | 350 | FactoryBot.define do
factory :link_tracking do
ip_address { Faker::Internet.ip_v4_address }
factory :link_tracking_database_association do
association :database, factory: :database_default_values
end
factory :link_tracking_rspec do
association :database, factory: :database_basic, status: 1
end
end
end
| 25 | 65 | 0.728571 |
21cde1cfd7ceb7af18301ccc600dd6411c348488 | 1,201 | rbx = defined?(RUBY_ENGINE) && 'rbx' == RUBY_ENGINE
def already_installed(dep)
!Gem::DependencyInstaller.new(:domain => :local).find_gems_with_sources(dep).empty? ||
!Gem::DependencyInstaller.new(:domain => :local,:prerelease => true).find_gems_with_sources(dep).empty?
end
if rbx
require 'rubygems'
require 'rubygems/command.rb'
require 'rubygems/dependency.rb'
require 'rubygems/dependency_installer.rb'
begin
Gem::Command.build_args = ARGV
rescue NoMethodError
end
dep = [
Gem::Dependency.new("rubysl-bigdecimal", '~> 2.0'),
Gem::Dependency.new("rubysl-singleton", '~> 2.0'),
Gem::Dependency.new("racc", '~> 1.4')
].reject{|d| already_installed(d) }
begin
puts "Installing base gem"
inst = Gem::DependencyInstaller.new
dep.each {|d| inst.install d }
rescue
inst = Gem::DependencyInstaller.new(:prerelease => true)
begin
dep.each {|d| inst.install d }
rescue Exception => e
puts e
puts e.backtrace.join "\n "
exit(1)
end
end unless dep.size == 0
end
# create dummy rakefile to indicate success
f = File.open(File.join(File.dirname(__FILE__), "Rakefile"), "w")
f.write("task :default\n")
f.close | 27.295455 | 105 | 0.670275 |
086d92b8851acbf11b8c330894dfc139833896b8 | 1,426 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/config"
require "gapic/config/method"
require "google/cloud/compute/v1/version"
require "google/cloud/compute/v1/service_attachments/credentials"
require "google/cloud/compute/v1/service_attachments/rest"
module Google
module Cloud
module Compute
module V1
# To load this service and instantiate a REST client:
#
# require "google/cloud/compute/v1/service_attachments"
# client = ::Google::Cloud::Compute::V1::ServiceAttachments::Rest::Client.new
#
module ServiceAttachments
end
end
end
end
end
helper_path = ::File.join __dir__, "service_attachments", "helpers.rb"
require "google/cloud/compute/v1/service_attachments/helpers" if ::File.file? helper_path
| 31.688889 | 89 | 0.732819 |
d5c5458ad9ce3116fec8e7ddfb11a3eb97a1bac6 | 6,361 | # typed: false
# frozen_string_literal: true
require "formula"
require "keg"
require "cli/parser"
require "cask/cmd"
require "cask/caskroom"
module Homebrew
module_function
def outdated_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`outdated` [<options>] [<formula>|<cask>]
List installed casks and formulae that have an updated version available. By default, version
information is displayed in interactive shells, and suppressed otherwise.
EOS
switch "-q", "--quiet",
description: "List only the names of outdated kegs (takes precedence over `--verbose`)."
switch "-v", "--verbose",
description: "Include detailed version information."
switch "--formula",
description: "Only output outdated formulae."
switch "--cask",
description: "Only output outdated casks."
flag "--json",
description: "Print output in JSON format. There are two versions: v1 and v2. " \
"v1 is deprecated and is currently the default if no version is specified. " \
"v2 prints outdated formulae and casks. "
switch "--fetch-HEAD",
description: "Fetch the upstream repository to detect if the HEAD installation of the "\
"formula is outdated. Otherwise, the repository's HEAD will only be checked for "\
"updates when a new stable or development version has been released."
switch "--greedy",
description: "Print outdated casks with `auto_updates` or `version :latest`."
conflicts "--quiet", "--verbose", "--json"
conflicts "--formula", "--cask"
end
end
def outdated
args = outdated_args.parse
case (j = json_version(args.json))
when :v1, :default
odeprecated "brew outdated --json#{j == :v1 ? "=v1" : ""}", "brew outdated --json=v2"
outdated = if args.formula? || !args.cask?
outdated_formulae args: args
else
outdated_casks args: args
end
puts JSON.generate(json_info(outdated, args: args))
when :v2
formulae, casks = if args.formula?
[outdated_formulae(args: args), []]
elsif args.cask?
[[], outdated_casks(args: args)]
else
outdated_formulae_casks args: args
end
json = {
"formulae" => json_info(formulae, args: args),
"casks" => json_info(casks, args: args),
}
puts JSON.generate(json)
outdated = formulae + casks
else
outdated = if args.formula?
outdated_formulae args: args
elsif args.cask?
outdated_casks args: args
else
outdated_formulae_casks(args: args).flatten
end
print_outdated(outdated, args: args)
end
Homebrew.failed = args.named.present? && outdated.present?
end
def print_outdated(formulae_or_casks, args:)
formulae_or_casks.each do |formula_or_cask|
if formula_or_cask.is_a?(Formula)
f = formula_or_cask
if verbose?
outdated_kegs = f.outdated_kegs(fetch_head: args.fetch_HEAD?)
current_version = if f.alias_changed?
latest = f.latest_formula
"#{latest.name} (#{latest.pkg_version})"
elsif f.head? && outdated_kegs.any? { |k| k.version.to_s == f.pkg_version.to_s }
# There is a newer HEAD but the version number has not changed.
"latest HEAD"
else
f.pkg_version.to_s
end
outdated_versions = outdated_kegs.group_by { |keg| Formulary.from_keg(keg).full_name }
.sort_by { |full_name, _kegs| full_name }
.map do |full_name, kegs|
"#{full_name} (#{kegs.map(&:version).join(", ")})"
end.join(", ")
pinned_version = " [pinned at #{f.pinned_version}]" if f.pinned?
puts "#{outdated_versions} < #{current_version}#{pinned_version}"
else
puts f.full_installed_specified_name
end
else
c = formula_or_cask
puts c.outdated_info(args.greedy?, verbose?, false)
end
end
end
def json_info(formulae_or_casks, args:)
formulae_or_casks.map do |formula_or_cask|
if formula_or_cask.is_a?(Formula)
f = formula_or_cask
outdated_versions = f.outdated_kegs(fetch_head: args.fetch_HEAD?).map(&:version)
current_version = if f.head? && outdated_versions.any? { |v| v.to_s == f.pkg_version.to_s }
"HEAD"
else
f.pkg_version.to_s
end
{ name: f.full_name,
installed_versions: outdated_versions.map(&:to_s),
current_version: current_version,
pinned: f.pinned?,
pinned_version: f.pinned_version }
else
c = formula_or_cask
c.outdated_info(args.greedy?, verbose?, true)
end
end
end
def verbose?
($stdout.tty? || super) && !quiet?
end
def json_version(version)
version_hash = {
nil => nil,
true => :default,
"v1" => :v1,
"v2" => :v2,
}
raise UsageError, "invalid JSON version: #{version}" unless version_hash.include?(version)
version_hash[version]
end
def outdated_formulae(args:)
select_outdated((args.named.to_resolved_formulae.presence || Formula.installed), args: args).sort
end
def outdated_casks(args:)
if args.named.present?
select_outdated(args.named.to_casks, args: args)
else
select_outdated(Cask::Caskroom.casks(config: Cask::Config.from_args(args)), args: args)
end
end
def outdated_formulae_casks(args:)
formulae, casks = args.named.to_resolved_formulae_to_casks
if formulae.blank? && casks.blank?
formulae = Formula.installed
casks = Cask::Caskroom.casks(config: Cask::Config.from_args(args))
end
[select_outdated(formulae, args: args).sort, select_outdated(casks, args: args)]
end
def select_outdated(formulae_or_casks, args:)
formulae_or_casks.select do |formula_or_cask|
if formula_or_cask.is_a?(Formula)
formula_or_cask.outdated?(fetch_head: args.fetch_HEAD?)
else
formula_or_cask.outdated?(greedy: args.greedy?)
end
end
end
end
| 31.029268 | 108 | 0.612482 |
337b3444243cb819f59cfcc88f323a2cb05321f7 | 1,261 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-directoryservice/types'
require_relative 'aws-sdk-directoryservice/client_api'
require_relative 'aws-sdk-directoryservice/client'
require_relative 'aws-sdk-directoryservice/errors'
require_relative 'aws-sdk-directoryservice/resource'
require_relative 'aws-sdk-directoryservice/customizations'
# This module provides support for AWS Directory Service. This module is available in the
# `aws-sdk-directoryservice` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Directory Service all
# extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::DirectoryService::Errors::ServiceError
# # rescues all service API errors
# end
#
# See {Errors} for more information.
#
# @service
module Aws::DirectoryService
GEM_VERSION = '1.12.0'
end
| 26.270833 | 89 | 0.760508 |
7957571fa39dfaf8cfb09248e5353aea2265ac01 | 1,046 | # frozen_string_literal: true
module Jekyll
module Diagrams
class BasicRenderer
include Rendering
class << self
def render(context, content, diagram = nil)
new(context, content, diagram).render
end
end
def initialize(context, content, diagram = nil)
@context = context
@content = content
@block_name = diagram || self.class.name.split('::').last
.sub(/Renderer$/, '').downcase
end
def render
config = Utils.config_for(@context, @block_name)
output = render_svg(@content, config)
wrap_class(output)
rescue StandardError => e
wrap_class(Utils.handle_error(@context, e))
end
private
def render_svg(_code, _config)
raise NotImplementedError
end
def wrap_class(content)
<<~CONTENT
<div class='jekyll-diagrams diagrams #{@block_name}'>
#{content}
</div>"
CONTENT
end
end
end
end
| 22.73913 | 67 | 0.567878 |
7ad1b8c827fcd6f54ce4a2813545fdf333f201c0 | 3,071 | #
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "chefdk"
default_version "local_source"
# For the specific super-special version "local_source", build the source from
# the local git checkout. This is what you'd want to occur by default if you
# just ran omnibus build locally.
version("local_source") do
source path: "#{project.files_path}/../..",
# Since we are using the local repo, we try to not copy any files
# that are generated in the process of bundle installing omnibus.
# If the install steps are well-behaved, this should not matter
# since we only perform bundle and gem installs from the
# omnibus cache source directory, but we do this regardless
# to maintain consistency between what a local build sees and
# what a github based build will see.
options: { exclude: [ "omnibus/vendor" ] }
end
# For any version other than "local_source", fetch from github.
if version != "local_source"
source git: "git://github.com/chef/chef-dk.git"
end
relative_path "chef-dk"
if windows?
dependency "ruby-windows"
dependency "ruby-windows-devkit"
else
dependency "libffi" if debian?
dependency "ruby"
end
dependency "rubygems"
dependency "bundler"
dependency "appbundler"
dependency "chef"
dependency "test-kitchen"
dependency "inspec"
dependency "kitchen-inspec"
dependency "kitchen-vagrant"
dependency "berkshelf"
dependency "chef-vault"
dependency "foodcritic"
dependency "ohai"
dependency "rubocop"
# This is a TK dependency but isn't declared in that software definition
# because it is an optional dependency but we want to give it to ChefDK users
dependency "winrm-transport"
dependency "openssl-customization"
dependency "knife-windows"
dependency "knife-spork"
dependency "fauxhai"
dependency "chefspec"
dependency "chef-provisioning"
dependency "chefdk-env-customization" if windows?
build do
env = with_standard_compiler_flags(with_embedded_path).merge(
# Rubocop pulls in nokogiri 1.5.11, so needs PKG_CONFIG_PATH and
# NOKOGIRI_USE_SYSTEM_LIBRARIES until rubocop stops doing that
"PKG_CONFIG_PATH" => "#{install_dir}/embedded/lib/pkgconfig",
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true",
)
bundle "install", env: env
gem "build chef-dk.gemspec", env: env
gem "install chef-dk*.gem" \
" --no-ri --no-rdoc" \
" --verbose", env: env
appbundle 'berkshelf'
appbundle 'chefdk'
appbundle 'chef-vault'
appbundle 'foodcritic'
appbundle 'rubocop'
appbundle 'test-kitchen'
end
| 31.989583 | 78 | 0.736894 |
2800cf41c91baacb036c693470516ea64a762183 | 1,734 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Array#reverse" do
it "returns a new array with the elements in reverse order" do
[].reverse.should == []
[1, 3, 5, 2].reverse.should == [2, 5, 3, 1]
end
it "properly handles recursive arrays" do
empty = ArraySpecs.empty_recursive_array
empty.reverse.should == empty
array = ArraySpecs.recursive_array
array.reverse.should == [array, array, array, array, array, 3.0, 'two', 1]
end
ruby_version_is "" ... "1.9.3" do
it "returns subclass instance on Array subclasses" do
ArraySpecs::MyArray[1, 2, 3].reverse.should be_an_instance_of(ArraySpecs::MyArray)
end
end
ruby_version_is "1.9.3" do
it "does not return subclass instance on Array subclasses" do
ArraySpecs::MyArray[1, 2, 3].reverse.should be_an_instance_of(Array)
end
end
end
describe "Array#reverse!" do
it "reverses the elements in place" do
a = [6, 3, 4, 2, 1]
a.reverse!.should equal(a)
a.should == [1, 2, 4, 3, 6]
[].reverse!.should == []
end
it "properly handles recursive arrays" do
empty = ArraySpecs.empty_recursive_array
empty.reverse!.should == [empty]
array = ArraySpecs.recursive_array
array.reverse!.should == [array, array, array, array, array, 3.0, 'two', 1]
end
ruby_version_is "" ... "1.9" do
it "raises a TypeError on a frozen array" do
lambda { ArraySpecs.frozen_array.reverse! }.should raise_error(TypeError)
end
end
ruby_version_is "1.9" do
it "raises a RuntimeError on a frozen array" do
lambda { ArraySpecs.frozen_array.reverse! }.should raise_error(RuntimeError)
end
end
end
| 29.389831 | 88 | 0.67474 |
8707308ea2aa349c2ce0799c35c89d21be8dcadd | 1,722 | =begin
#Selling Partner API for Retail Procurement Orders
#The Selling Partner API for Retail Procurement Orders provides programmatic access to vendor orders data.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for AmzSpApi::VendorOrdersApiModel::Order
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'Order' do
before do
# run before each test
@instance = AmzSpApi::VendorOrdersApiModel::Order.new
end
after do
# run after each test
end
describe 'test an instance of Order' do
it 'should create an instance of Order' do
expect(@instance).to be_instance_of(AmzSpApi::VendorOrdersApiModel::Order)
end
end
describe 'test attribute "purchase_order_number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "purchase_order_state"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["New", "Acknowledged", "Closed"])
# validator.allowable_values.each do |value|
# expect { @instance.purchase_order_state = value }.not_to raise_error
# end
end
end
describe 'test attribute "order_details"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.689655 | 111 | 0.731707 |
03dcaa372fd3929148a4d8f88a9ed75d855f77a4 | 263 | # encoding: utf-8
class IntegrationWithFormsController < ApplicationController
def index
@archived = params[:archived] == '1' ? true : false
@tasks_grid = initialize_grid(Task,
conditions: { archived: @archived },
name: 'g'
)
end
end
| 21.916667 | 60 | 0.661597 |
acb112ba85c99c2901d7f21f452f90298913a545 | 117 | class User < ApplicationRecord
has_many :notifications, as: :recipient
def phone_number
"8675309"
end
end
| 14.625 | 41 | 0.735043 |
036163bb475a243da26a42f1171c8225a52cde6a | 757 | module Aws
module Stubbing
class StubData
def initialize(operation)
@rules = operation.output
@pager = operation[:pager]
end
def stub(data = {})
stub = EmptyStub.new(@rules).stub
remove_paging_tokens(stub)
apply_data(data, stub)
stub
end
private
def remove_paging_tokens(stub)
if @pager
@pager.instance_variable_get("@tokens").keys.each do |path|
key = path.split(/\b/)[0]
stub[key] = nil
end
end
end
def apply_data(data, stub)
ParamValidator.new(@rules, validate_required:false).validate!(data)
DataApplicator.new(@rules).apply_data(data, stub)
end
end
end
end
| 21.628571 | 75 | 0.573316 |
b953e31a4f6a457e18e82683dc96a2c48b0059f3 | 1,691 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user),params: { user: {name: "",
email: "foo@imvalid",
password: "foo",
password_confirmation: "bar"}}
assert_template 'users/edit'
end
test "successful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
name = "Foo Bar"
email = "foo@bar.com"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
name = "Foo Bar"
email = "foo@bar.com"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 31.90566 | 75 | 0.524542 |
87373ea64aed81b619c9179b4adbf51a6831cf00 | 75 | module CircuitBreaker
class CircuitBrokenError < StandardError ; end
end
| 18.75 | 48 | 0.826667 |
5d6043e03fc7deedc0e181eb46654f9536328592 | 374 | class Deck
VALUES = %w(2⃣ 3⃣ 4⃣ 5⃣ 6⃣ 7⃣ 8⃣ 9⃣ 🔟 👨 👰 🤴 🅰).freeze
SUITS = %w(♠ ♥ ♦ ♣).freeze
attr_reader :cards
def initialize
@cards = []
VALUES.each do |value|
SUITS.each do |suit|
cards << Card.new(value, suit)
end
end
2.times { cards << Card.new(nil, nil, true) }
end
def shuffle
cards.shuffle!
self
end
end
| 15.583333 | 55 | 0.529412 |
01995e28043a0afb2c232986d03fce127b5c240a | 61 | module PG
module Pglogical
VERSION = "0.1.0"
end
end
| 10.166667 | 21 | 0.639344 |
4afdf04016e8aeb50d15a6445ae7e75b538bad62 | 547 | module WhiteTail
module DSL
module Commands
class Attribute < Base
REQUIRED_OPTIONS = [:node_class, :attribute]
ALLOWED_OPTIONS = [:locator, :required]
def execute(execution_context)
Helpers.with_element(execution_context, options) do |element|
value = element[options[:attribute]]
raise ValidationError, "Attribute #{node_name} not found" if value.nil? && options[:required]
options[:node_class].new(value)
end
end
end
end
end
end
| 26.047619 | 105 | 0.625229 |
1dcd61de16aeb345f5c2f2f4ea289d39bb7f2a3d | 2,667 | require 'optparse'
module VagrantPlugins
module CommandBox
module Command
class Outdated < Vagrant.plugin("2", :command)
def execute
options = {}
opts = OptionParser.new do |o|
o.banner = "Usage: vagrant box outdated [options]"
o.separator ""
o.separator "Checks if there is a new version available for the box"
o.separator "that are you are using. If you pass in the --global flag,"
o.separator "all boxes will be checked for updates."
o.separator ""
o.separator "Options:"
o.separator ""
o.on("--global", "Check all boxes installed.") do |g|
options[:global] = g
end
end
argv = parse_options(opts)
return if !argv
# If we're checking the boxes globally, then do that.
if options[:global]
outdated_global
return 0
end
with_target_vms(argv) do |machine|
@env.action_runner.run(Vagrant::Action.action_box_outdated, {
box_outdated_force: true,
box_outdated_refresh: true,
box_outdated_success_ui: true,
machine: machine,
})
end
end
def outdated_global
boxes = {}
@env.boxes.all.reverse.each do |name, version, provider|
next if boxes[name]
boxes[name] = @env.boxes.find(name, provider, version)
end
boxes.values.each do |box|
if !box.metadata_url
@env.ui.output(I18n.t(
"vagrant.box_outdated_no_metadata",
name: box.name))
next
end
md = nil
begin
md = box.load_metadata
rescue Vagrant::Errors::DownloaderError => e
@env.ui.error(I18n.t(
"vagrant.box_outdated_metadata_error",
name: box.name,
message: e.extra_data[:message]))
next
end
current = Gem::Version.new(box.version)
latest = Gem::Version.new(md.versions.last)
if latest <= current
@env.ui.success(I18n.t(
"vagrant.box_up_to_date",
name: box.name,
version: box.version))
else
@env.ui.warn(I18n.t(
"vagrant.box_outdated",
name: box.name,
current: box.version,
latest: latest.to_s,))
end
end
end
end
end
end
end
| 29.633333 | 83 | 0.500187 |
26631482e900e0cc74c01ff6719a630be12fc9d2 | 233 | module HighLineTestHelpers
def prepare_highline
@input = instance_double(IO, eof?: false, gets: "q\n")
@output = StringIO.new
Imap::Backup::Setup.highline = HighLine.new(@input, @output)
[@input, @output]
end
end
| 25.888889 | 64 | 0.682403 |
5dc454122a505bd74099c85e3068b19ff6eab7c2 | 1,776 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_04_01
module Models
#
# IPv6 Circuit Connection properties for global reach.
#
class Ipv6CircuitConnectionConfig
include MsRestAzure
# @return [String] /125 IP address space to carve out customer addresses
# for global reach.
attr_accessor :address_prefix
# @return [CircuitConnectionStatus] Express Route Circuit connection
# state. Possible values include: 'Connected', 'Connecting',
# 'Disconnected'
attr_accessor :circuit_connection_status
#
# Mapper for Ipv6CircuitConnectionConfig class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Ipv6CircuitConnectionConfig',
type: {
name: 'Composite',
class_name: 'Ipv6CircuitConnectionConfig',
model_properties: {
address_prefix: {
client_side_validation: true,
required: false,
serialized_name: 'addressPrefix',
type: {
name: 'String'
}
},
circuit_connection_status: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'circuitConnectionStatus',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.645161 | 78 | 0.569257 |
1c4775b703f5b59bbe884702fe267e18f1ae9e78 | 993 | class Buildapp < Formula
desc "Creates executables with SBCL"
homepage "http://www.xach.com/lisp/buildapp/"
url "https://github.com/xach/buildapp/archive/release-1.5.4.tar.gz"
sha256 "8a3918d740f21fd46c18b08e066fec7525dad790b1355a1e3e5950f2d3ca4291"
head "https://github.com/xach/buildapp.git"
bottle do
sha256 "f854e3f08c1b6e361df0466ad13e4653a1630c367a8357bab3f1095915c28e58" => :yosemite
sha256 "11fbf1a1358580ce6558e5d3f5944b9e32af62d7338f806f350eda779d8715ee" => :mavericks
sha256 "e64ae1125b020eeede5ac83103cfcb30eaec8e960d103f6c3392a31465b590a4" => :mountain_lion
end
depends_on "sbcl"
def install
bin.mkpath
system "make", "install", "DESTDIR=#{prefix}"
end
test do
code = "(defun f (a) (declare (ignore a)) (write-line \"Hello, homebrew\"))"
system "#{bin}/buildapp", "--eval", code,
"--entry", "f",
"--output", "t"
assert_equal `./t`, "Hello, homebrew\n"
end
end
| 34.241379 | 95 | 0.683787 |
3983b5f2f485ef45a68fd7e0cae3af47bebb7a5e | 648 | Pod::Spec.new do |s|
s.name = "GVMusicPlayerController"
s.version = "0.2.0"
s.summary = "The power of AVPlayer with the simplicity of MPMusicPlayerController."
s.homepage = "https://github.com/gangverk/GVMusicPlayerController"
s.license = 'MIT'
s.author = { "Kevin Renskers" => "info@mixedcase.nl" }
s.source = { :git => "https://github.com/gangverk/GVMusicPlayerController.git", :tag => s.version.to_s }
s.platform = :ios, '4.0'
s.source_files = 'GVMusicPlayerController/*.{h,m}'
s.requires_arc = true
s.frameworks = 'CoreMedia', 'AudioToolbox', 'AVFoundation', 'MediaPlayer'
end
| 46.285714 | 112 | 0.648148 |
2800670782a6ed96e9cbc7540ea0cd6c87591b84 | 187 | require 'minitest/autorun'
describe_recipe 'template-cookbook::default' do
include MiniTest::Chef::Assertions
include MiniTest::Chef::Context
include MiniTest::Chef::Resources
end
| 23.375 | 47 | 0.791444 |
1c4b079ad1cb05b1df58558945092aa883849f63 | 1,302 | require "resolv"
require File.expand_path( '../item', File.dirname(__FILE__) )
require File.expand_path( '../utils', File.dirname(__FILE__) )
module Simp; end
class Simp::Cli; end
module Simp::Cli::Config
class Item::RemoveLdapFromHiera < ActionItem
attr_accessor :dir
def initialize
super
@key = 'puppet::remove_ldap_from_hiera'
@description = %Q{Removes any ldap classes from hieradata/hosts/puppet.your.domain.yaml (apply-only; noop).}
@dir = "/etc/puppet/environments/simp/hieradata/hosts"
@file = nil
end
def apply
success = true
fqdn = @config_items.fetch( 'hostname' ).value
file = File.join( @dir, "#{fqdn}.yaml")
say_green 'Removing ldap classes from the <domain>.yaml file' if !@silent
if File.exists?(file)
lines = File.open(file,'r').readlines
File.open(file, 'w') do |f|
lines.each do |line|
line.chomp!
f.puts line if !strip_line?(line)
end
end
else
success = false
say_yellow "WARNING: file not found: #{file}"
end
success
end
def strip_line?( line )
(line =~ /^\s*-\s+(([a-z_:'"]*::)*(open)*ldap|(open)*ldap[a-z_:'"]*)/m) ? true : false
end
end
end
| 27.125 | 114 | 0.586022 |
1173ee534357df9bd5c8386f07da1291bd2bf7f2 | 1,287 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150515172507) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "bands", force: :cascade do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "bands_venues", force: :cascade do |t|
t.integer "band_id"
t.integer "venue_id"
end
create_table "venues", force: :cascade do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 34.783784 | 86 | 0.748252 |
6a0ab1873f8cb2ed0043958673e3d5a539b6ab45 | 273 | module ActionClient
class ParseError < ActionClient::Error
attr_reader :body, :cause, :content_type
def initialize(cause, body, content_type)
super(cause.message)
@cause = cause
@content_type = content_type
@body = body
end
end
end
| 21 | 45 | 0.673993 |
1c804c8d5e32763db5055a54e9c983288d39c0b9 | 1,623 | class Libproxy < Formula
desc "Library that provides automatic proxy configuration management"
homepage "https://libproxy.github.io/libproxy/"
url "https://github.com/libproxy/libproxy/archive/0.4.15.tar.gz"
sha256 "18f58b0a0043b6881774187427ead158d310127fc46a1c668ad6d207fb28b4e0"
head "https://github.com/libproxy/libproxy.git"
bottle do
sha256 "e2ca77c5398273eb7fd3645eed6f2f393bb78d3cb8f1cbbe66530be6fdc2d92d" => :high_sierra
sha256 "2da6c1c16c4d821a03f3af0095e8c083650d8236b2a9a08cb5af1b2b235658a7" => :sierra
sha256 "2afb8712e1a562617d7ab8fcd1436290e83f65dd636e1927761d2e9e914879cc" => :el_capitan
sha256 "af63072e26e2dd635ff04988d1dbb68e4f83d966aad935a6071072fe22508f15" => :yosemite
sha256 "a44083432b4f382fb5d394e41b2d7ca8e7fff595b96a1c0d7a56194e1f99c864" => :x86_64_linux # glibc 2.19
end
depends_on "cmake" => :build
# Non-fatally fails to build against system Perl, so stick to Homebrew's here.
depends_on "perl" => :optional
depends_on "python" if MacOS.version <= :snow_leopard
# tries to install to system perl location
depends_on "perl" unless OS.mac?
def install
args = std_cmake_args + %W[
..
-DPYTHON2_SITEPKG_DIR=#{lib}/python2.7/site-packages
-DWITH_PYTHON3=OFF
]
if build.with? "perl"
args << "-DPX_PERL_ARCH=#{lib}/perl5/site_perl"
args << "-DPERL_LINK_LIBPERL=YES"
else
args << "-DWITH_PERL=OFF"
end
mkdir "build" do
system "cmake", *args
system "make", "install"
end
end
test do
assert_equal "direct://", pipe_output("#{bin}/proxy 127.0.0.1").chomp
end
end
| 33.8125 | 107 | 0.732594 |
5d957fb3d47c059a2e7025180d2189dae05f732c | 3,029 | #!/opt/puppetlabs/puppet/bin/ruby
require_relative '../lib/puppet/util/task_helper'
require 'json'
require 'puppet'
require 'openssl'
# require 'pry-remote'; binding.remote_pry
# class PowerstoreX509CertificateInstanceQueryTask
class PowerstoreX509CertificateInstanceQueryTask < TaskHelper
def task(arg_hash)
header_params = {}
# Remove task name from arguments - should contain all necessary parameters for URI
arg_hash.delete('_task')
namevar = ''
namevar = 'id' if namevar.empty?
operation_verb = 'Get'
operation_path = '/x509_certificate/%{id}'
parent_consumes = 'application/json'
# parent_produces = 'application/json'
query_params, body_params, path_params = format_params(arg_hash)
result = transport.call_op(path_params, query_params, header_params, body_params, operation_path, operation_verb, parent_consumes)
raise result.body unless result.is_a? Net::HTTPSuccess
return nil if result.body.nil?
return result.body if result.to_hash['content-type'].include? 'document/text'
body = JSON.parse(result.body)
return body.map { |i| [i[namevar], i] }.to_h if body.class == Array
body
end
def op_param(name, location, paramalias, namesnake)
{ name: name, location: location, paramalias: paramalias, namesnake: namesnake }
end
def format_params(key_values)
query_params = {}
body_params = {}
path_params = {}
key_values.each do |key, value|
next unless value.respond_to?(:include) && value.include?('=>')
value.include?('=>')
Puppet.debug("Running hash from string on #{value}")
value.tr!('=>', ':')
value.tr!("'", '"')
key_values[key] = JSON.parse(value)
Puppet.debug("Obtained hash #{key_values[key].inspect}")
end
if key_values.key?('body')
if File.file?(key_values['body'])
body_params['file_content'] = if key_values['body'].include?('json')
File.read(key_values['body'])
else
JSON.pretty_generate(YAML.load_file(key_values['body']))
end
end
end
op_params = [
op_param('id', 'path', 'id', 'id'),
op_param('query_string', 'query', 'query_string', 'query_string'),
]
op_params.each do |i|
location = i[:location]
name = i[:name]
# paramalias = i[:paramalias]
name_snake = i[:namesnake]
if location == 'query'
query_params[name] = key_values[name_snake.to_sym] unless key_values[name_snake.to_sym].nil?
elsif location == 'body'
body_params[name] = key_values[name_snake.to_sym] unless key_values[name_snake.to_sym].nil?
else
path_params[name_snake.to_sym] = key_values[name_snake.to_sym] unless key_values[name_snake.to_sym].nil?
end
end
[query_params, body_params, path_params]
end
if $PROGRAM_NAME == __FILE__
PowerstoreX509CertificateInstanceQueryTask.run
end
end
| 34.816092 | 134 | 0.648399 |
1a4013b7a55386b276dee208cf807eede6127524 | 1,442 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
require 'scm/authorization_policy'
class Scm::SubversionAuthorizationPolicy < Scm::AuthoriziationPolicy
private
def readonly_request?(params)
method = params[:method]
%w(GET PROPFIND REPORT OPTIONS).include?(method)
end
end
| 37.947368 | 91 | 0.763523 |
7920dc699686650ae8a2a52f139182fae84941d8 | 233 | class AddLocationToTours < ActiveRecord::Migration[4.2]
def change
add_column :tours, :latitude, :float, null: true
add_column :tours, :longitude, :float, null: true
add_index :tours, [:latitude, :longitude]
end
end
| 29.125 | 55 | 0.708155 |
261be9780f944cb3b9bd728d88304d1b207a5795 | 596 | module Erp::Products
class ProductsPart < ApplicationRecord
belongs_to :product
belongs_to :part, class_name: 'Erp::Products::Product'#, optional: true
def self.search(params)
query = self.all
query = query.where(product_id: params[:product_id])
return query
end
def part_name
part.nil? ? '' : part.name
end
def part_cost
part.nil? ? '' : part.cost
end
def part_code
part.nil? ? '' : part.code
end
def part_on_hand
part.nil? ? '' : part.on_hand
end
end
end | 22.074074 | 76 | 0.57047 |
38f105e88b2719d8a789b4746914e4df50cffb5c | 1,201 | module TestGame
module Jumpable
include TestGame::Fallable
def jump
@jump_held = true
if can_jump?
@jumping = true
@falling = false
@jumping_countdown = 0
@velocity_y = -320
end
end
def release_jump
@jump_held = false
end
def stop_jumping
@jumping = false
@jump_held = false
end
def update_jumping
if @jumping
@velocity_y -= 50 * Application.elapsed
@velocity_y = -@max_velocity_y if @velocity_y < -@max_velocity_y
@jumping_countdown += Application.elapsed
unless @jump_held
if @velocity_y < -189
@jumping = false
end
end
if @jumping_countdown > @jumping_cooldown
@jumping = false
@falling = true
@jump_held = false
@jumping_countdown = 0
end
end
end
def can_jump?
!(@jumping || @falling) && @velocity_y >= 0
end
# Falling Overrides
def update_falling
unless @jumping
super
end
end
def stop_falling(other)
unless @jumping
super
stop_jumping
end
end
end
end
| 17.661765 | 72 | 0.55204 |
79a3552bfb6a201e4cad332e20acebc85c98cd18 | 728 | # frozen_string_literal: true
RSpec.describe Conflow::Redis::FieldBuilder, redis: true do
let(:test_class) { Struct.new(:key) }
let(:instance) { test_class.new("test_key") }
let(:builder) { described_class.new(:params, Conflow::Redis::HashField) }
describe "#call" do
before { builder.call(test_class) }
describe "getter" do
subject { instance.params }
it { is_expected.to be_a_kind_of(Conflow::Redis::HashField) }
it { expect(subject.key).to eq "test_key:params" }
end
describe "setter" do
subject { instance.params = { test: :param, success: true } }
it { expect { subject }.to change { instance.params.to_h }.to(test: "param", success: true) }
end
end
end
| 28 | 99 | 0.657967 |
6a840b1317ea9ebcb4ca8158b54559cf3d04b180 | 1,465 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CoinMarketPro::Endpoint::Base do
let(:api_key) { 'CoinMarketCap-Api-Key' }
let(:client) { CoinMarketPro::Client::Base.new(api_key: api_key) }
let(:subject) { described_class.new(client: client, logger: client.logger) }
describe '#valid_params?' do
it 'returns true when valid' do
args = { id: 1 }
expect(subject.send(:valid_params?, args)).to eq(true)
end
it 'raises error when invalid' do
args = { foo: 1 }
expect { subject.send(:valid_params?, args) }
.to raise_error(ArgumentError, 'At least one "id" or "symbol" is required.')
end
end
describe '#convert_params' do
it 'returns standardized params' do
params = { foo: [1, 2, 3], bar: ' 4,5,6 ', baz: 13.0 }
expect(subject.send(:convert_params, params))
.to eq(foo: '1,2,3', bar: '4,5,6', baz: 13.0)
end
it 'returns empty hash if blank' do
expect(subject.send(:convert_params)).to eq({})
end
end
describe '#standardize_value' do
it 'returns formatted value (Array)' do
expect(subject.send(:standardize_value, ['hi', 1])).to eq('hi,1')
end
it 'returns formatted value (String)' do
expect(subject.send(:standardize_value, ' i haz crypto! ')).to eq('i haz crypto!')
end
it 'returns passthrough value (Numeric)' do
expect(subject.send(:standardize_value, 133.54)).to eq(133.54)
end
end
end
| 29.3 | 88 | 0.640956 |
91142c2de0e1ef3a4651c744a13f8124084be4fb | 1,088 | require 'test_helper'
class MicropostsInterfaceTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "micropost interface" do
log_in_as(@user)
get root_path
assert_select 'div.pagination'
# Invalid submission
assert_no_difference 'Micropost.count' do
post microposts_path, params: { micropost: { content: "" } }
end
assert_select 'div#error_explanation'
# Valid submission
content = "This micropost really ties the room together"
assert_difference 'Micropost.count', 1 do
post microposts_path, params: { micropost: { content: content } }
end
assert_redirected_to root_url
follow_redirect!
assert_match content, response.body
# Delete post
assert_select 'a', text: 'delete'
first_micropost = @user.microposts.paginate(page: 1).first
assert_difference 'Micropost.count', -1 do
delete micropost_path(first_micropost)
end
# Visit different user (no delete links)
get user_path(users(:archer))
assert_select 'a', text: 'delete', count: 0
end
end | 31.085714 | 71 | 0.706801 |
261e54da6a81fb789b0f02f01f86ebfdddfb8b2c | 39,100 | # frozen_string_literal: true
require 'spec_helper'
describe API::Runners do
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
let_it_be(:group_guest) { create(:user) }
let_it_be(:group_reporter) { create(:user) }
let_it_be(:group_developer) { create(:user) }
let_it_be(:group_maintainer) { create(:user) }
let_it_be(:project) { create(:project, creator_id: user.id) }
let_it_be(:project2) { create(:project, creator_id: user.id) }
let_it_be(:group) { create(:group).tap { |group| group.add_owner(user) } }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:shared_runner, reload: true) { create(:ci_runner, :instance, description: 'Shared runner') }
let_it_be(:project_runner, reload: true) { create(:ci_runner, :project, description: 'Project runner', projects: [project]) }
let_it_be(:two_projects_runner) { create(:ci_runner, :project, description: 'Two projects runner', projects: [project, project2]) }
let_it_be(:group_runner_a) { create(:ci_runner, :group, description: 'Group runner A', groups: [group]) }
let_it_be(:group_runner_b) { create(:ci_runner, :group, description: 'Group runner B', groups: [subgroup]) }
before_all do
group.add_guest(group_guest)
group.add_reporter(group_reporter)
group.add_developer(group_developer)
group.add_maintainer(group_maintainer)
project.add_maintainer(user)
project2.add_maintainer(user)
project.add_reporter(user2)
end
describe 'GET /runners' do
context 'authorized user' do
it 'returns response status and headers' do
get api('/runners', user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'returns user available runners' do
get api('/runners', user)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner'),
a_hash_including('description' => 'Group runner A'),
a_hash_including('description' => 'Group runner B')
]
end
it 'filters runners by scope' do
create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
get api('/runners?scope=paused', user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
a_hash_including('description' => 'Inactive project runner')
]
end
it 'avoids filtering if scope is invalid' do
get api('/runners?scope=unknown', user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by type' do
get api('/runners?type=project_type', user)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner')
]
end
it 'does not filter by invalid type' do
get api('/runners?type=bogus', user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
get api('/runners?status=paused', user)
expect(json_response).to match_array [
a_hash_including('description' => 'Inactive project runner')
]
end
it 'does not filter by invalid status' do
get api('/runners?status=bogus', user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
get api('/runners?tag_list=tag1,tag2', user)
expect(json_response).to match_array [
a_hash_including('description' => 'Runner tagged with tag1 and tag2')
]
end
end
context 'unauthorized user' do
it 'does not return runners' do
get api('/runners')
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'GET /runners/all' do
context 'authorized user' do
context 'with admin privileges' do
it 'returns response status and headers' do
get api('/runners/all', admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'returns all runners' do
get api('/runners/all', admin)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner'),
a_hash_including('description' => 'Group runner A'),
a_hash_including('description' => 'Group runner B'),
a_hash_including('description' => 'Shared runner')
]
end
it 'filters runners by scope' do
get api('/runners/all?scope=shared', admin)
shared = json_response.all? { |r| r['is_shared'] }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response[0]).to have_key('ip_address')
expect(shared).to be_truthy
end
it 'filters runners by scope' do
get api('/runners/all?scope=specific', admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner'),
a_hash_including('description' => 'Group runner A'),
a_hash_including('description' => 'Group runner B')
]
end
it 'avoids filtering if scope is invalid' do
get api('/runners/all?scope=unknown', admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by project type' do
get api('/runners/all?type=project_type', admin)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner')
]
end
it 'filters runners by group type' do
get api('/runners/all?type=group_type', admin)
expect(json_response).to match_array [
a_hash_including('description' => 'Group runner A'),
a_hash_including('description' => 'Group runner B')
]
end
it 'does not filter by invalid type' do
get api('/runners/all?type=bogus', admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
get api('/runners/all?status=paused', admin)
expect(json_response).to match_array [
a_hash_including('description' => 'Inactive project runner')
]
end
it 'does not filter by invalid status' do
get api('/runners/all?status=bogus', admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
get api('/runners/all?tag_list=tag1,tag2', admin)
expect(json_response).to match_array [
a_hash_including('description' => 'Runner tagged with tag1 and tag2')
]
end
end
context 'without admin privileges' do
it 'does not return runners list' do
get api('/runners/all', user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'unauthorized user' do
it 'does not return runners' do
get api('/runners')
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'GET /runners/:id' do
context 'admin user' do
context 'when runner is shared' do
it "returns runner's details" do
get api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(shared_runner.description)
expect(json_response['maximum_timeout']).to be_nil
end
end
context 'when runner is not shared' do
context 'when unused runner is present' do
let!(:unused_project_runner) { create(:ci_runner, :project, :without_projects) }
it 'deletes unused runner' do
expect do
delete api("/runners/#{unused_project_runner.id}", admin)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
it "returns runner's details" do
get api("/runners/#{project_runner.id}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(project_runner.description)
end
it "returns the project's details for a project runner" do
get api("/runners/#{project_runner.id}", admin)
expect(json_response['projects'].first['id']).to eq(project.id)
end
end
it 'returns 404 if runner does not exists' do
get api('/runners/0', admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context "runner project's administrative user" do
context 'when runner is not shared' do
it "returns runner's details" do
get api("/runners/#{project_runner.id}", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(project_runner.description)
end
end
context 'when runner is shared' do
it "returns runner's details" do
get api("/runners/#{shared_runner.id}", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['description']).to eq(shared_runner.description)
end
end
end
context 'other authorized user' do
it "does not return project runner's details" do
get api("/runners/#{project_runner.id}", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it "does not return project runner's details" do
get api("/runners/#{project_runner.id}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'FF hide_token_from_runners_api is enabled' do
before do
stub_feature_flags(hide_token_from_runners_api: true)
end
it "does not return runner's token" do
get api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).not_to have_key('token')
end
end
context 'FF hide_token_from_runners_api is disabled' do
before do
stub_feature_flags(hide_token_from_runners_api: false)
end
it "returns runner's token" do
get api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to have_key('token')
end
end
end
describe 'PUT /runners/:id' do
context 'admin user' do
# see https://gitlab.com/gitlab-org/gitlab-foss/issues/48625
context 'single parameter update' do
it 'runner description' do
description = shared_runner.description
update_runner(shared_runner.id, admin, description: "#{description}_updated")
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.description).to eq("#{description}_updated")
end
it 'runner active state' do
active = shared_runner.active
update_runner(shared_runner.id, admin, active: !active)
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.active).to eq(!active)
end
it 'runner tag list' do
update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
end
it 'runner untagged flag' do
# Ensure tag list is non-empty before setting untagged to false.
update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
update_runner(shared_runner.id, admin, run_untagged: 'false')
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.run_untagged?).to be(false)
end
it 'runner unlocked flag' do
update_runner(shared_runner.id, admin, locked: 'true')
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.locked?).to be(true)
end
it 'runner access level' do
update_runner(shared_runner.id, admin, access_level: 'ref_protected')
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.ref_protected?).to be_truthy
end
it 'runner maximum timeout' do
update_runner(shared_runner.id, admin, maximum_timeout: 1234)
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.reload.maximum_timeout).to eq(1234)
end
it 'fails with no parameters' do
put api("/runners/#{shared_runner.id}", admin)
shared_runner.reload
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when runner is shared' do
it 'updates runner' do
description = shared_runner.description
active = shared_runner.active
runner_queue_value = shared_runner.ensure_runner_queue_value
update_runner(shared_runner.id, admin, description: "#{description}_updated",
active: !active,
tag_list: ['ruby2.1', 'pgsql', 'mysql'],
run_untagged: 'false',
locked: 'true',
access_level: 'ref_protected',
maximum_timeout: 1234)
shared_runner.reload
expect(response).to have_gitlab_http_status(:ok)
expect(shared_runner.description).to eq("#{description}_updated")
expect(shared_runner.active).to eq(!active)
expect(shared_runner.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
expect(shared_runner.run_untagged?).to be(false)
expect(shared_runner.locked?).to be(true)
expect(shared_runner.ref_protected?).to be_truthy
expect(shared_runner.ensure_runner_queue_value)
.not_to eq(runner_queue_value)
expect(shared_runner.maximum_timeout).to eq(1234)
end
end
context 'when runner is not shared' do
it 'updates runner' do
description = project_runner.description
runner_queue_value = project_runner.ensure_runner_queue_value
update_runner(project_runner.id, admin, description: 'test')
project_runner.reload
expect(response).to have_gitlab_http_status(:ok)
expect(project_runner.description).to eq('test')
expect(project_runner.description).not_to eq(description)
expect(project_runner.ensure_runner_queue_value)
.not_to eq(runner_queue_value)
end
end
it 'returns 404 if runner does not exists' do
update_runner(0, admin, description: 'test')
expect(response).to have_gitlab_http_status(:not_found)
end
def update_runner(id, user, args)
put api("/runners/#{id}", user), params: args
end
end
context 'authorized user' do
context 'when runner is shared' do
it 'does not update runner' do
put api("/runners/#{shared_runner.id}", user), params: { description: 'test' }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is not shared' do
it 'does not update project runner without access to it' do
put api("/runners/#{project_runner.id}", user2), params: { description: 'test' }
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'updates project runner with access to it' do
description = project_runner.description
put api("/runners/#{project_runner.id}", admin), params: { description: 'test' }
project_runner.reload
expect(response).to have_gitlab_http_status(:ok)
expect(project_runner.description).to eq('test')
expect(project_runner.description).not_to eq(description)
end
end
end
context 'unauthorized user' do
it 'does not delete project runner' do
put api("/runners/#{project_runner.id}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'DELETE /runners/:id' do
context 'admin user' do
context 'when runner is shared' do
it 'deletes runner' do
expect do
delete api("/runners/#{shared_runner.id}", admin)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.instance_type.count }.by(-1)
end
it_behaves_like '412 response' do
let(:request) { api("/runners/#{shared_runner.id}", admin) }
end
end
context 'when runner is not shared' do
it 'deletes used project runner' do
expect do
delete api("/runners/#{project_runner.id}", admin)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
end
it 'returns 404 if runner does not exists' do
delete api('/runners/0', admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'authorized user' do
context 'when runner is shared' do
it 'does not delete runner' do
delete api("/runners/#{shared_runner.id}", user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is not shared' do
it 'does not delete runner without access to it' do
delete api("/runners/#{project_runner.id}", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete project runner with more than one associated project' do
delete api("/runners/#{two_projects_runner.id}", user)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'deletes project runner for one owned project' do
expect do
delete api("/runners/#{project_runner.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.project_type.count }.by(-1)
end
it 'does not delete group runner with guest access' do
delete api("/runners/#{group_runner_a.id}", group_guest)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete group runner with reporter access' do
delete api("/runners/#{group_runner_a.id}", group_reporter)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete group runner with developer access' do
delete api("/runners/#{group_runner_a.id}", group_developer)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete group runner with maintainer access' do
delete api("/runners/#{group_runner_a.id}", group_maintainer)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'deletes owned group runner with owner access' do
expect do
delete api("/runners/#{group_runner_a.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.group_type.count }.by(-1)
end
it 'deletes inherited group runner with owner access' do
expect do
delete api("/runners/#{group_runner_b.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { Ci::Runner.group_type.count }.by(-1)
end
it_behaves_like '412 response' do
let(:request) { api("/runners/#{project_runner.id}", user) }
end
end
end
context 'unauthorized user' do
it 'does not delete project runner' do
delete api("/runners/#{project_runner.id}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'GET /runners/:id/jobs' do
let_it_be(:job_1) { create(:ci_build) }
let_it_be(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
let_it_be(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
let_it_be(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) }
let_it_be(:job_5) { create(:ci_build, :failed, runner: project_runner, project: project) }
context 'admin user' do
context 'when runner exists' do
context 'when runner is shared' do
it 'return jobs' do
get api("/runners/#{shared_runner.id}/jobs", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
end
end
context 'when runner is specific' do
it 'return jobs' do
get api("/runners/#{project_runner.id}/jobs", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
end
end
context 'when valid status is provided' do
it 'return filtered jobs' do
get api("/runners/#{project_runner.id}/jobs?status=failed", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(1)
expect(json_response.first).to include('id' => job_5.id)
end
end
context 'when valid order_by is provided' do
context 'when sort order is not specified' do
it 'return jobs in descending order' do
get api("/runners/#{project_runner.id}/jobs?order_by=id", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
expect(json_response.first).to include('id' => job_5.id)
end
end
context 'when sort order is specified as asc' do
it 'return jobs sorted in ascending order' do
get api("/runners/#{project_runner.id}/jobs?order_by=id&sort=asc", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
expect(json_response.first).to include('id' => job_4.id)
end
end
end
context 'when invalid status is provided' do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?status=non-existing", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when invalid order_by is provided' do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?order_by=non-existing", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when invalid sort is provided' do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?sort=non-existing", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
context "when runner doesn't exist" do
it 'returns 404' do
get api('/runners/0/jobs', admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context "runner project's administrative user" do
context 'when runner exists' do
context 'when runner is shared' do
it 'returns 403' do
get api("/runners/#{shared_runner.id}/jobs", user)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is specific' do
it 'return jobs' do
get api("/runners/#{project_runner.id}/jobs", user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(2)
end
end
context 'when valid status is provided' do
it 'return filtered jobs' do
get api("/runners/#{project_runner.id}/jobs?status=failed", user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(1)
expect(json_response.first).to include('id' => job_5.id)
end
end
context 'when invalid status is provided' do
it 'return 400' do
get api("/runners/#{project_runner.id}/jobs?status=non-existing", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
context "when runner doesn't exist" do
it 'returns 404' do
get api('/runners/0/jobs', user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'other authorized user' do
it 'does not return jobs' do
get api("/runners/#{project_runner.id}/jobs", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it 'does not return jobs' do
get api("/runners/#{project_runner.id}/jobs")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
shared_examples_for 'unauthorized access to runners list' do
context 'authorized user without maintainer privileges' do
it "does not return group's runners" do
get api("/#{entity_type}/#{entity.id}/runners", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it "does not return project's runners" do
get api("/#{entity_type}/#{entity.id}/runners")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'GET /projects/:id/runners' do
context 'authorized user with maintainer privileges' do
it 'returns response status and headers' do
get api('/runners/all', admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
it 'returns all runners' do
get api("/projects/#{project.id}/runners", user)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner'),
a_hash_including('description' => 'Shared runner')
]
end
it 'filters runners by scope' do
get api("/projects/#{project.id}/runners?scope=specific", user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner')
]
end
it 'avoids filtering if scope is invalid' do
get api("/projects/#{project.id}/runners?scope=unknown", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by type' do
get api("/projects/#{project.id}/runners?type=project_type", user)
expect(json_response).to match_array [
a_hash_including('description' => 'Project runner'),
a_hash_including('description' => 'Two projects runner')
]
end
it 'does not filter by invalid type' do
get api("/projects/#{project.id}/runners?type=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by status' do
create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project])
get api("/projects/#{project.id}/runners?status=paused", user)
expect(json_response).to match_array [
a_hash_including('description' => 'Inactive project runner')
]
end
it 'does not filter by invalid status' do
get api("/projects/#{project.id}/runners?status=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'filters runners by tag_list' do
create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
get api("/projects/#{project.id}/runners?tag_list=tag1,tag2", user)
expect(json_response).to match_array [
a_hash_including('description' => 'Runner tagged with tag1 and tag2')
]
end
end
it_behaves_like 'unauthorized access to runners list' do
let(:entity_type) { 'projects' }
let(:entity) { project }
end
end
describe 'GET /groups/:id/runners' do
context 'authorized user with maintainer privileges' do
it 'returns all runners' do
get api("/groups/#{group.id}/runners", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Group runner A')
])
end
context 'filter by type' do
it 'returns record when valid and present' do
get api("/groups/#{group.id}/runners?type=group_type", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Group runner A')
])
end
it 'returns empty result when type does not match' do
get api("/groups/#{group.id}/runners?type=project_type", user)
expect(json_response).to be_empty
end
it 'does not filter by invalid type' do
get api("/groups/#{group.id}/runners?type=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'filter runners by status' do
it 'returns runners by valid status' do
create(:ci_runner, :group, :inactive, description: 'Inactive group runner', groups: [group])
get api("/groups/#{group.id}/runners?status=paused", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Inactive group runner')
])
end
it 'does not filter by invalid status' do
get api("/groups/#{group.id}/runners?status=bogus", user)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
it 'filters runners by tag_list' do
create(:ci_runner, :group, description: 'Runner tagged with tag1 and tag2', groups: [group], tag_list: %w[tag1 tag2])
create(:ci_runner, :group, description: 'Runner tagged with tag2', groups: [group], tag_list: %w[tag1])
get api("/groups/#{group.id}/runners?tag_list=tag1,tag2", user)
expect(json_response).to match_array([
a_hash_including('description' => 'Runner tagged with tag1 and tag2')
])
end
end
it_behaves_like 'unauthorized access to runners list' do
let(:entity_type) { 'groups' }
let(:entity) { group }
end
end
describe 'POST /projects/:id/runners' do
context 'authorized user' do
let_it_be(:project_runner2) { create(:ci_runner, :project, projects: [project2]) }
it 'enables specific runner' do
expect do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id }
end.to change { project.runners.count }.by(+1)
expect(response).to have_gitlab_http_status(:created)
end
it 'avoids changes when enabling already enabled runner' do
expect do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner.id }
end.to change { project.runners.count }.by(0)
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'does not enable locked runner' do
project_runner2.update(locked: true)
expect do
post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id }
end.to change { project.runners.count }.by(0)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not enable shared runner' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: shared_runner.id }
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not enable group runner' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: group_runner_a.id }
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'user is admin' do
context 'when project runner is used' do
let!(:new_project_runner) { create(:ci_runner, :project) }
it 'enables any specific runner' do
expect do
post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
end.to change { project.runners.count }.by(+1)
expect(response).to have_gitlab_http_status(:created)
end
end
it 'enables a instance type runner' do
expect do
post api("/projects/#{project.id}/runners", admin), params: { runner_id: shared_runner.id }
end.to change { project.runners.count }.by(1)
expect(shared_runner.reload).not_to be_instance_type
expect(response).to have_gitlab_http_status(:created)
end
end
it 'raises an error when no runner_id param is provided' do
post api("/projects/#{project.id}/runners", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'user is not admin' do
let!(:new_project_runner) { create(:ci_runner, :project) }
it 'does not enable runner without access to' do
post api("/projects/#{project.id}/runners", user), params: { runner_id: new_project_runner.id }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'authorized user without permissions' do
it 'does not enable runner' do
post api("/projects/#{project.id}/runners", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it 'does not enable runner' do
post api("/projects/#{project.id}/runners")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
describe 'DELETE /projects/:id/runners/:runner_id' do
context 'authorized user' do
context 'when runner have more than one associated projects' do
it "disables project's runner" do
expect do
delete api("/projects/#{project.id}/runners/#{two_projects_runner.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { project.runners.count }.by(-1)
end
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/runners/#{two_projects_runner.id}", user) }
end
end
context 'when runner have one associated projects' do
it "does not disable project's runner" do
expect do
delete api("/projects/#{project.id}/runners/#{project_runner.id}", user)
end.to change { project.runners.count }.by(0)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
it 'returns 404 is runner is not found' do
delete api("/projects/#{project.id}/runners/0", user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'authorized user without permissions' do
it "does not disable project's runner" do
delete api("/projects/#{project.id}/runners/#{project_runner.id}", user2)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'unauthorized user' do
it "does not disable project's runner" do
delete api("/projects/#{project.id}/runners/#{project_runner.id}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
end
| 34.817453 | 133 | 0.631355 |
edd9bea7bdab0f805abd2aeb0604ef9c5a2ad840 | 259 | require 'test_helper'
class ApplicationHelperTest < ActionView::TestCase
test "full title helper" do
assert_equal full_title, "Ruby on Rails Tutorial Sample App"
assert_equal full_title("Help"), "Help | Ruby on Rails Tutorial Sample App"
end
end | 28.777778 | 79 | 0.760618 |
1c61f7e56c07f5b279868e416ea5d305159c402c | 2,315 | # frozen_string_literal: true
require_relative '../../spec_helper'
require 'minitest/hooks/default'
require 'message_bus'
require 'message_bus/distributed_cache'
describe MessageBus::DistributedCache do
before :all do
@bus = MessageBus::Instance.new
@bus.configure(backend: :memory)
@manager = MessageBus::DistributedCache::Manager.new(@bus)
end
after :all do
@bus.destroy
end
def cache(name)
MessageBus::DistributedCache.new(name, manager: @manager)
end
let :cache_name do
SecureRandom.hex
end
before do
@cache1 = cache(cache_name)
@cache2 = cache(cache_name)
end
it 'supports arrays with hashes' do
c1 = cache("test1")
c2 = cache("test1")
c1["test"] = [{ test: :test }]
wait_for do
c2["test"] == [{ test: :test }]
end
expect(c2[:test]).must_equal([{ test: :test }])
end
it 'allows us to store Set' do
c1 = cache("test1")
c2 = cache("test1")
set = Set.new
set << 1
set << "b"
set << 92803984
set << 93739739873973
c1["cats"] = set
wait_for do
c2["cats"] == set
end
expect(c2["cats"]).must_equal(set)
set << 5
c2["cats"] = set
wait_for do
c1["cats"] == set
end
expect(c1["cats"]).must_equal(set)
end
it 'does not leak state across caches' do
c2 = cache("test1")
c3 = cache("test1")
c2["hi"] = "hi"
wait_for do
c3["hi"] == "hi"
end
Thread.pass
assert_nil(@cache1["hi"])
end
it 'allows coerces symbol keys to strings' do
@cache1[:key] = "test"
expect(@cache1["key"]).must_equal("test")
wait_for do
@cache2[:key] == "test"
end
expect(@cache2["key"]).must_equal("test")
end
it 'sets other caches' do
@cache1["test"] = "world"
wait_for do
@cache2["test"] == "world"
end
end
it 'deletes from other caches' do
@cache1["foo"] = "bar"
wait_for do
@cache2["foo"] == "bar"
end
@cache1.delete("foo")
assert_nil(@cache1["foo"])
wait_for do
@cache2["foo"] == nil
end
end
it 'clears cache on request' do
@cache1["foo"] = "bar"
wait_for do
@cache2["foo"] == "bar"
end
@cache1.clear
assert_nil(@cache1["foo"])
wait_for do
@cache2["boom"] == nil
end
end
end
| 17.537879 | 62 | 0.580994 |
e2e9ac200122cda4ce09d4da0e034345cbb5d49f | 359 | # Foo class is very foo
#
# Attributes:
# * id [integer, primary, not null] - primary key
# * body [text] - whatever
# * created_at [foobar] - creation time
# * random_number [integer] - We still haven't found what this actually means, WTF
# * title [octopus] - yellow
# * updated_at [datetime, not null] - last update time
class Foo < ActiveRecord::Base
end
| 29.916667 | 82 | 0.696379 |
016c56987a2e5a13a3cd27efe8324249a6981aeb | 342 | cask "bloomrpc" do
version "1.5.3"
sha256 "9ddc4d9b85af745a5f5e49a55e9dd4d57e09855aee721f77e2a3151744cbc3ad"
url "https://github.com/uw-labs/bloomrpc/releases/download/#{version}/BloomRPC-#{version}.dmg"
name "BloomRPC"
desc "GUI Client for GRPC Services"
homepage "https://github.com/uw-labs/bloomrpc"
app "BloomRPC.app"
end
| 28.5 | 96 | 0.75731 |
ac06cc9aa97170add90e6270aa3f9d7c021685e3 | 365 | require 'serverspec'
# Required by serverspec
set :backend, :exec
describe command('dig +short crackhackforum.com') do
its(:stdout) { should match /10\.0\.6\.66/ }
end
describe command('dig +short ogrean.com') do
its(:stdout) { should match /10\.0\.6\.66/ }
end
describe command('host register.science') do
its(:stdout) { should match /10\.0\.6\.66/ }
end
| 22.8125 | 52 | 0.684932 |
b90cc38aaa8f351dfa78dd7d5290790f1f7007ce | 96 | json.extract! @message_route, :id, :to_exchange_id, :from_exchange_id, :created_at, :updated_at
| 48 | 95 | 0.791667 |
3873b021ab2f4a26d3e623beb79af36cda87887a | 350 | class CreateSubmissions < ActiveRecord::Migration[4.2]
def change
create_table :submissions do |t|
t.belongs_to :exercise
t.belongs_to :student_registration
t.datetime :submitted_at
t.timestamps null: false
end
add_index :submissions, :exercise_id
add_index :submissions, :student_registration_id
end
end
| 25 | 54 | 0.725714 |
f7cc44316596dd38828cfef339cf3843b5be2b1f | 1,846 | Merb.logger.info("Compiling routes...")
Merb::Router.prepare do
# RESTful routes
namespace :locomotive, :path => '', :name_prefix => nil do
resources :service_types, 'Locomotive::ServiceType'
resources :dependencies, 'Locomotive::Dependency'
resources :roles, 'Locomotive::Role'
resources :systems, 'Locomotive::System' do
resources :purposes, 'Locomotive::Purpose'
resources :platform_memberships, 'Locomotive::PlatformMembership'
end
resources :platforms, 'Locomotive::Platform' do
resources :platform_memberships, 'Locomotive::PlatformMembership'
end
#map.resources :contracts
resources :deployments, 'Locomotive::Deployment'
resources :services, 'Locomotive::Service' do
resources :releases, 'Locomotive::Release'
resources :deployments, 'Locomotive::Deployment'
end
resources :clients, 'Locomotive::Client' do
resources :contracts, 'Locomotive::Contract'
resources :deployments, 'Locomotive::Deployment'
end
resources :plugin_instances, 'Locomotive::PluginInstance'
resources :plugin_properties, 'Locomotive::PluginProperty'
resources :plugins, 'Locomotive::Plugin'
end
# slice(:locomotive, :name_prefix => nil, :path_prefix => '')
# Adds the required routes for merb-auth using the password slice
slice(:merb_auth_slice_password, :name_prefix => nil, :path_prefix => "")
# This is the default route for /:controller/:action/:id
# This is fine for most cases. If you're heavily using resource-based
# routes, you may want to comment/remove this line to prevent
# clients from calling your create or destroy actions with a GET
# default_routes
# Change this for your home page to be available at /
match('/').to(:controller => 'locomotive/administration_dashboard', :action =>'index')
end
| 31.827586 | 88 | 0.715601 |
08fdf00c421b0e5b8855f56a333c3f8c49a9f3e2 | 560 | require_relative 'boot'
require 'rails/all'
Bundler.require(*Rails.groups)
require "tax_jp"
module Dummy
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 28 | 82 | 0.764286 |
bbe43b2dd37edfe22ab20024719782e52eea7c0a | 3,751 | #
# Codec to provide for MySQL string support
# http://mirror.yandex.ru/mirrors/ftp.mysql.com/doc/refman/5.0/en/string-syntax.html for details
module Owasp
module Esapi
module Codec
class MySQLCodec < BaseCodec
MYSQL_MODE = 0 # MySQL standard mode
ANSI_MODE = 1; # ANSI escape mode
# create a mysql codec.
# mode must be either MYSQL_MODE or ANSI_MODE
# The mode sets wether to use ansi mode in mysql or not
# defaults to MYSQL_MODE
def initialize(mode = 0)
if mode < MYSQL_MODE or mode > ANSI_MODE
raise RangeError.new()
end
@mode = mode
end
# Returns quote-encoded *character*
def encode_char(immune,input)
return input if immune.include?(input)
hex = hex(input)
return input if hex.nil?
return to_ansi(input) if @mode == ANSI_MODE
return to_mysql(input) if @mode == MYSQL_MODE
end
# Returns the decoded version of the character starting at index, or
# nil if no decoding is possible.
#
# Formats all are legal (case sensitive)
# In ANSI_MODE '' decodes to '
# In MYSQL_MODE \x decodes to x (or a small list of specials)
def decode_char(input)
return from_ansi(input) if @mode == ANSI_MODE
return from_mysql(input) if @mode == MYSQL_MODE
end
# encode ' only
def to_ansi(input) #:nodoc:
return "\'\'" if input == "\'"
input
end
# encode for NO_BACKLASH_MODE
def to_mysql(input) # :nodoc:
c = input.ord
return "\\0" if c == 0x00
return "\\b" if c == 0x08
return "\\t" if c == 0x09
return "\\n" if c == 0x0a
return "\\r" if c == 0x0d
return "\\Z" if c == 0x1a
return "\\\"" if c == 0x22
return "\\%" if c == 0x25
return "\\'" if c == 0x27
return "\\\\" if c == 0x5c
return "\\_" if c == 0x5f
"\\#{input}"
end
# decode a char with ansi only compliane i.e. apostrohpe only
def from_ansi(input) # :nodoc:
input.mark
first = input.next
# check first char
if first.nil?
input.reset
return nil
end
unless first == "\'"
input.reset
return nil
end
# check second char
second = input.next
if second.nil?
input.reset
return nil
end
# if second isnt an encoded char return nil
unless second == "\'"
input.reset
return nil
end
"\'"
end
# decode a char using mysql NO_BACKSLAH_QUOTE rules
def from_mysql(input) # :nodoc:
input.mark
# check first
first = input.next
if first.nil?
input.reset
return nil
end
# check second
second = input.next
if second.nil?
input.reset
return nil
end
return 0x00.chr if second == "0"
return 0x08.chr if second == "b"
return 0x08.chr if second == "t"
return 0x0a.chr if second == "n"
return 0x0d.chr if second == "r"
return 0x1a.chr if second == "z"
return 0x22.chr if second == "\""
return 0x25.chr if second == "%"
return 0x27.chr if second == "\'"
return 0x5c.chr if second == "\\"
return 0x5f.chr if second == "_"
# not an escape
second
end
end
end
end
end
| 28.416667 | 96 | 0.506798 |
79521660c522265b70451724a6da640c7e88e4d3 | 300 | class CreateAddresses < ActiveRecord::Migration[5.0]
def change
create_table :addresses do |t|
t.string :loc
t.string :label
t.text :memo
t.float :balance
t.float :total_rec
t.float :total_sent
t.float :total_fees
t.timestamps
end
end
end
| 18.75 | 52 | 0.623333 |
1dec7eff673198452447175181061d70f2939043 | 304 | # frozen_string_literal: true
module FedaPay
module APIOperations
module Delete
def delete(params = {}, opts = {})
opts = Util.normalize_opts(opts)
resp, opts = request(:delete, resource_url, params, opts)
initialize_from(resp.data, opts)
end
end
end
end
| 21.714286 | 65 | 0.644737 |
61b401350306ab71c4b98a4303c0d8c17a192140 | 155 | require 'spec_helper'
RSpec.describe Emittance::Sidekiq do
it 'has a version number' do
expect(Emittance::Sidekiq::VERSION).not_to be nil
end
end
| 19.375 | 53 | 0.748387 |
5d0eaa70d62176b5f714b791c6a00eb72ee195c6 | 1,069 | module OverrideOrder
def create_proposed_shipments
shipments = super
create_handling_charge! if needs_handling_charge?
shipments
end
end
Spree::Order.class_eval do
def display_handling_total
Spree::Money.new(handling_total, { currency: currency })
end
# Override if you want to create custom situations for applying the handling charge
def needs_handling_charge?
true
end
# Creates a adjustments representing handling charges, if applicable.
# Following 'create_tax_charge!' convention.
def create_handling_charge!
shipments.each do |shipment|
if shipment.needs_handling_charge? && shipment.stock_location.calculator
amount = shipment.stock_location.calculator.compute_shipment(shipment)
unless amount == 0
shipment.adjustments.create!({
source: shipment.stock_location,
adjustable: shipment,
amount: amount,
order: shipment.order,
label: "Handling"
})
end
end
end
end
prepend OverrideOrder
end
| 25.452381 | 85 | 0.695042 |
4a3914531a7c2c64c39af11a8873ce14b71d3334 | 1,178 | platform_is_not :windows do
require File.expand_path('../../../spec_helper', __FILE__)
require 'syslog'
describe "Syslog.options" do
platform_is_not :windows do
before :each do
Syslog.opened?.should be_false
end
after :each do
Syslog.opened?.should be_false
end
it "returns the logging options" do
Syslog.open("rubyspec", Syslog::LOG_PID)
Syslog.options.should == Syslog::LOG_PID
Syslog.close
end
it "returns nil when the log is closed" do
Syslog.opened?.should be_false
Syslog.options.should == nil
end
it "defaults to LOG_PID | LOG_CONS" do
Syslog.open
Syslog.options.should == Syslog::LOG_PID | Syslog::LOG_CONS
Syslog.close
end
it "resets after each open call" do
Syslog.open
Syslog.options.should == Syslog::LOG_PID | Syslog::LOG_CONS
Syslog.open!("rubyspec", Syslog::LOG_PID)
Syslog.options.should == Syslog::LOG_PID
Syslog.close
Syslog.open
Syslog.options.should == Syslog::LOG_PID | Syslog::LOG_CONS
Syslog.close
end
end
end
end
| 24.541667 | 67 | 0.618846 |
33a59fa767435111b1191e5730c18cbe72bc87cb | 151 | class ChangePartnerIdToUserIdReservations < ActiveRecord::Migration[4.2]
def change
rename_column :reservations, :partner_id, :user_id
end
end
| 25.166667 | 72 | 0.794702 |
62d4efee27d9c7b81cd34d96eb1432fc3248df3e | 362 | require "gerencianet"
require_relative "../../credentials"
options = {
client_id: CREDENTIALS::CLIENT_ID,
client_secret: CREDENTIALS::CLIENT_SECRET,
sandbox: CREDENTIALS::SANDBOX
}
params = {
id: 1000
}
body = {
email: 'oldbuck@gerencianet.com.br'
}
gerencianet = Gerencianet.new(options)
puts gerencianet.resend_billet(params: params, body: body)
| 18.1 | 58 | 0.737569 |
bf53c61601b9c80ad4b613f276ed15cc6b813901 | 1,941 | # frozen_string_literal: true
# This returns an app descriptor for use with Jira in development mode
# For the Atlassian Marketplace, a static copy of this JSON is uploaded to the marketplace
# https://developer.atlassian.com/cloud/jira/platform/app-descriptor/
class JiraConnect::AppDescriptorController < JiraConnect::ApplicationController
skip_before_action :verify_atlassian_jwt!
def show
render json: {
name: Atlassian::JiraConnect.app_name,
description: 'Integrate commits, branches and merge requests from GitLab into Jira',
key: Atlassian::JiraConnect.app_key,
baseUrl: jira_connect_base_url(protocol: 'https'),
lifecycle: {
installed: relative_to_base_path(jira_connect_events_installed_path),
uninstalled: relative_to_base_path(jira_connect_events_uninstalled_path)
},
vendor: {
name: 'GitLab',
url: 'https://gitlab.com'
},
links: {
documentation: help_page_url('integration/jira_development_panel', anchor: 'gitlabcom-1')
},
authentication: {
type: 'jwt'
},
scopes: %w(READ WRITE DELETE),
apiVersion: 1,
modules: {
jiraDevelopmentTool: {
key: 'gitlab-development-tool',
application: {
value: 'GitLab'
},
name: {
value: 'GitLab'
},
url: 'https://gitlab.com',
logoUrl: view_context.image_url('gitlab_logo.png'),
capabilities: %w(branch commit pull_request)
},
postInstallPage: {
key: 'gitlab-configuration',
name: {
value: 'GitLab Configuration'
},
url: relative_to_base_path(jira_connect_subscriptions_path)
}
},
apiMigrations: {
gdpr: true
}
}
end
private
def relative_to_base_path(full_path)
full_path.sub(/^#{jira_connect_base_path}/, '')
end
end
| 29.861538 | 97 | 0.630603 |
e21f8d0ce7fa54ff67b44f2ae6bf0d3adf455011 | 356 | cask 'freedom' do
version '2.2.5'
sha256 'c10d7a8a95387237a30eeba889f9ee0570efc75da783dd117704c9f1e635929e'
url "https://cdn.freedom.to/installers/updates/mac/#{version}/Freedom.zip"
appcast 'https://cdn.freedom.to/installers/updates/mac/Appcast.xml'
name 'Freedom'
homepage 'https://freedom.to/'
auto_updates true
app 'Freedom.app'
end
| 25.428571 | 76 | 0.755618 |
ab4078f5de06af4392f5945d764821bd97ecdb51 | 137 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_spree_session'
| 34.25 | 75 | 0.80292 |
013fd403292519900ccddebed9267c9d44907101 | 965 | module Gorilla
module Middleware
class SignatureAuth < Faraday::Middleware
SIGNATURE_METHOD = 'Signature'.freeze
SIGNATURE_ALGO = 'HS256'.freeze
def initialize(app, opts={})
[:key, :secret].each do |key|
raise ArgumentError, "#{key.inspect} is required" if !opts[key]
end
opts[:token_duration] ||= 5 * 60
super(app)
@opts = opts
end
def call(env)
env[:request_headers]['Authorization'] = build_auth_header(env)
@app.call(env)
end
private
def build_auth_header(env)
token = build_token(env)
"#{SIGNATURE_METHOD} #{@opts[:key]} #{token}"
end
def build_token(env)
JWT.encode({
exp: Time.now.utc.to_i + @opts[:token_duration].to_i,
method: env[:method].to_s.upcase,
path: env[:url].path.split('?').first
}, @opts[:secret], SIGNATURE_ALGO)
end
end
end
end
| 23.536585 | 73 | 0.572021 |
0344a7b5e60560570f00d1625cb909fab2ca8f4f | 7,720 | require "sequel"
require "pact_broker/project_root"
require "pact_broker/pacts/latest_tagged_pact_publications"
require "pact_broker/logging"
require "pact_broker/db/clean/selector"
module PactBroker
module DB
class Clean
include PactBroker::Logging
class Unionable < Array
def union(other)
Unionable.new(self + other)
end
end
def self.call database_connection, options = {}
new(database_connection, options).call
end
def initialize database_connection, options = {}
@db = database_connection
@options = options
end
def keep
@keep ||= if options[:keep]
# Could be a Matrix::UnresolvedSelector from the docker image, convert it
options[:keep].collect { | unknown_thing | Selector.from_hash(unknown_thing.to_hash) }
else
[Selector.new(tag: true, latest: true), Selector.new(branch: true, latest: true), Selector.new(latest: true), Selector.new(deployed: true), Selector.new(released: true)]
end
end
def resolve_ids(query, column_name = :id)
# query
Unionable.new(query.collect { |h| h[column_name] })
end
def pact_publication_ids_to_keep
@pact_publication_ids_to_keep ||= pact_publication_ids_to_keep_for_version_ids_to_keep
.union(latest_pact_publication_ids_to_keep)
.union(latest_tagged_pact_publications_ids_to_keep)
end
def pact_publication_ids_to_keep_for_version_ids_to_keep
@pact_publication_ids_to_keep_for_version_ids_to_keep ||= resolve_ids(db[:pact_publications].select(:id).where(consumer_version_id: version_ids_to_keep))
end
def latest_tagged_pact_publications_ids_to_keep
@latest_tagged_pact_publications_ids_to_keep ||= resolve_ids(keep.select(&:tag).select(&:latest).collect do | selector |
PactBroker::Pacts::LatestTaggedPactPublications.select(:id).for_selector(selector)
end.reduce(&:union) || [])
end
def latest_pact_publication_ids_to_keep
@latest_pact_publication_ids_to_keep ||= resolve_ids(db[:latest_pact_publications].select(:id))
end
def pact_publication_ids_to_delete
@pact_publication_ids_to_delete ||= resolve_ids(db[:pact_publications].select(:id).where(id: pact_publication_ids_to_keep).invert)
end
# because they belong to the versions to keep
def verification_ids_to_keep_for_version_ids_to_keep
@verification_ids_to_keep_for_version_ids_to_keep ||= resolve_ids(db[:verifications].select(:id).where(provider_version_id: version_ids_to_keep))
end
def verification_ids_to_keep_because_latest_verification_for_latest_pact
@verification_ids_to_keep_because_latest_verification ||= resolve_ids(
db[:latest_verification_ids_for_pact_versions]
.select(:latest_verification_id)
.where(pact_version_id:
db[:latest_pact_publications].select(:pact_version_id)
),
:latest_verification_id
)
end
def verification_ids_to_keep_for_pact_publication_ids_to_keep
@verification_ids_to_keep_for_pact_publication_ids_to_keep ||= resolve_ids(
db[:latest_verification_id_for_pact_version_and_provider_version]
.select(:verification_id)
.where(pact_version_id:
db[:pact_publications]
.select(:pact_version_id)
.where(id: pact_publication_ids_to_keep_for_version_ids_to_keep)
)
)
end
def verification_ids_to_keep
@verification_ids_to_keep ||= verification_ids_to_keep_for_version_ids_to_keep.union(verification_ids_to_keep_because_latest_verification_for_latest_pact)
end
def verification_ids_to_delete
@verification_ids_to_delete ||= db[:verifications].select(:id).where(id: verification_ids_to_keep).invert
end
def version_ids_to_keep
@version_ids_to_keep ||= keep.collect do | selector |
PactBroker::Domain::Version.select(:id).for_selector(selector)
end.reduce(&:union)
end
def call
deleted_counts = {}
kept_counts = {}
deleted_counts[:pact_publications] = pact_publication_ids_to_delete.count
kept_counts[:pact_publications] = pact_publication_ids_to_keep.count
# Work out how to keep the head verifications for the provider tags.
deleted_counts[:verification_results] = verification_ids_to_delete.count
kept_counts[:verification_results] = verification_ids_to_keep.count
delete_webhook_data(verification_triggered_webhook_ids_to_delete)
delete_verifications
delete_webhook_data(pact_publication_triggered_webhook_ids_to_delete)
delete_pact_publications
delete_orphan_pact_versions
overwritten_delete_counts = delete_overwritten_verifications
deleted_counts[:verification_results] = deleted_counts[:verification_results] + overwritten_delete_counts[:verification_results]
kept_counts[:verification_results] = kept_counts[:verification_results] - overwritten_delete_counts[:verification_results]
delete_orphan_tags
delete_orphan_versions
{ kept: kept_counts, deleted: deleted_counts }
end
private
attr_reader :db, :options
def verification_triggered_webhook_ids_to_delete
db[:triggered_webhooks].select(:id).where(verification_id: verification_ids_to_delete)
end
def pact_publication_triggered_webhook_ids_to_delete
db[:triggered_webhooks].select(:id).where(pact_publication_id: pact_publication_ids_to_delete)
end
def referenced_version_ids
db[:pact_publications].select(:consumer_version_id).union(db[:verifications].select(:provider_version_id))
end
def verification_ids_for_pact_publication_ids_to_delete
@verification_ids_for_pact_publication_ids_to_delete ||=
db[:verifications].select(:id).where(pact_version_id: db[:pact_publications].select(:pact_version_id).where(id: pact_publication_ids_to_delete))
end
def delete_webhook_data(triggered_webhook_ids)
db[:webhook_executions].where(triggered_webhook_id: triggered_webhook_ids).delete
db[:triggered_webhooks].where(id: triggered_webhook_ids).delete
end
def delete_pact_publications
db[:pact_publications].where(id: pact_publication_ids_to_delete).delete
end
def delete_verifications
db[:verifications].where(id: verification_ids_to_delete).delete
end
def delete_orphan_pact_versions
referenced_pact_version_ids = db[:pact_publications].select(:pact_version_id).union(db[:verifications].select(:pact_version_id))
db[:pact_versions].where(id: referenced_pact_version_ids).invert.delete
end
def delete_orphan_tags
db[:tags].where(version_id: referenced_version_ids).invert.delete
end
def delete_orphan_versions
db[:versions].where(id: referenced_version_ids).invert.delete
end
def delete_overwritten_verifications
verification_ids = db[:verifications].select(:id).where(id: db[:latest_verification_id_for_pact_version_and_provider_version].select(:verification_id)).invert
deleted_counts = { verification_results: verification_ids.count }
delete_webhook_data(db[:triggered_webhooks].where(verification_id: verification_ids).select(:id))
verification_ids.delete
deleted_counts
end
end
end
end
| 39.793814 | 189 | 0.716321 |
18b62191957c6fa6e77b5ddd6478f0387de3aa09 | 3,761 | require 'spec_helper'
require 'gocardless/paginator'
describe GoCardless::Paginator do
let(:resource_class) { GoCardless::Resource }
let(:path) { '/test' }
let(:query) { { :status => 'active' } }
let(:per_page) { 10 }
let(:page_number) { 1 }
let(:headers_p1) {{
'X-Pagination' => '{"records":15,"pages":2,"links":{"next":2,"last":2}}'
}}
let(:response_p1) { double(:headers => headers_p1, :parsed => [{:id => 'a'}]) }
let(:headers_p2) {{
'X-Pagination' => '{"records":15,"pages":2,"links":{"previous":1,"first":1}}'
}}
let(:response_p2) { double(:headers => headers_p2, :parsed => [{:id => 'b'}]) }
let(:client) { double('client') }
before { allow(client).to receive(:api_request).and_return(response_p1, response_p2,
response_p1, response_p2) }
let(:paginator) { described_class.new(client, resource_class, path, query) }
before { paginator.per_page(per_page) }
describe "#per_page" do
context "given no arguments" do
subject { paginator.per_page }
it { is_expected.to eq(per_page) }
end
context "given an argument" do
it "is chainable" do
expect(paginator.per_page(60)).to eq(paginator)
end
end
it "resets pagination metadata" do
expect(paginator).to receive(:load_page).exactly(2).times
paginator.count # reset metadata, check that we have to reload it
paginator.per_page(50)
paginator.count
end
end
describe "#load_page" do
it "asks the client for the correct path" do
expect(client).to receive(:api_request).
with(:get, '/test', anything).
and_return(response_p1)
paginator.page(page_number)
end
it "passes the correct pagination parameters through" do
pagination_params = { :page => page_number, :per_page => per_page }
expect(client).to receive(:api_request) { |_, _, opts|
expect(opts[:params]).to include pagination_params
}.and_return(response_p1)
paginator.page(page_number)
end
end
describe "#each" do
it "yields every item from each page" do
resources = [a_kind_of(resource_class), a_kind_of(resource_class)]
expect { |b| paginator.each(&b) }.to yield_successive_args(*resources)
end
end
describe "#each_page" do
let(:pages) { [a_kind_of(GoCardless::Page), a_kind_of(GoCardless::Page)] }
it "yields each page until there are none left" do
expect { |b| paginator.each_page(&b) }.to yield_successive_args(*pages)
end
it "can be iterated over multiple times" do
2.times do
expect { |b| paginator.each_page(&b) }.to yield_successive_args(*pages)
end
end
end
describe "#count" do
subject { paginator.count }
context "when metadata is loaded" do
before { paginator.page(1) }
it { is_expected.to eq(15) }
it "doesn't reload metadata" do
expect(paginator).not_to receive(:load_page)
paginator.count
end
end
context "when metadata is not loaded" do
it { is_expected.to eq(15) }
it "loads metadata" do
expect(paginator).to receive(:load_page)
paginator.count
end
end
end
describe "#page_count" do
subject { paginator.page_count }
context "when metadata is loaded" do
before { paginator.page(1) }
it { is_expected.to eq(2) }
it "doesn't reload metadata" do
expect(paginator).not_to receive(:load_page)
paginator.page_count
end
end
context "when metadata is not loaded" do
it { is_expected.to eq(2) }
it "loads metadata" do
expect(paginator).to receive(:load_page)
paginator.page_count
end
end
end
end
| 27.859259 | 86 | 0.629354 |
1db9bc002ae91b486e6b52668d6c941d7215adef | 1,578 | # encoding: utf-8
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activities_to_users_last_activity_on.rb')
describe MigrateUserActivitiesToUsersLastActivityOn, :redis do
let(:migration) { described_class.new }
let!(:user_active_1) { create(:user) }
let!(:user_active_2) { create(:user) }
def record_activity(user, time)
Gitlab::Redis.with do |redis|
redis.zadd(described_class::USER_ACTIVITY_SET_KEY, time.to_i, user.username)
end
end
around do |example|
Timecop.freeze { example.run }
end
before do
record_activity(user_active_1, described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 2.months)
record_activity(user_active_2, described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 3.months)
mute_stdout { migration.up }
end
describe '#up' do
it 'fills last_activity_on from the legacy Redis Sorted Set' do
expect(user_active_1.reload.last_activity_on).to eq((described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 2.months).to_date)
expect(user_active_2.reload.last_activity_on).to eq((described_class::TIME_WHEN_ACTIVITY_SET_WAS_INTRODUCED + 3.months).to_date)
end
end
describe '#down' do
it 'sets last_activity_on to NULL for all users' do
mute_stdout { migration.down }
expect(user_active_1.reload.last_activity_on).to be_nil
expect(user_active_2.reload.last_activity_on).to be_nil
end
end
def mute_stdout
orig_stdout = $stdout
$stdout = StringIO.new
yield
$stdout = orig_stdout
end
end
| 31.56 | 134 | 0.752218 |
ff4e543d941477004d5782438a8b7bba3066f2f0 | 1,100 | class Buildozer < Formula
desc "Rewrite bazel BUILD files using standard commands"
homepage "https://github.com/bazelbuild/buildtools"
url "https://github.com/bazelbuild/buildtools.git",
:tag => "3.3.0",
:revision => "ce0cf814cb03dddf546ea92b3d6bafddb0b9eaf8"
license "Apache-2.0"
head "https://github.com/bazelbuild/buildtools.git"
bottle do
cellar :any_skip_relocation
sha256 "394ee9a737ae1961fc4a9b64077b34ebbf62ed61466b0445a859e3277b9d3b43" => :catalina
sha256 "394ee9a737ae1961fc4a9b64077b34ebbf62ed61466b0445a859e3277b9d3b43" => :mojave
sha256 "394ee9a737ae1961fc4a9b64077b34ebbf62ed61466b0445a859e3277b9d3b43" => :high_sierra
end
depends_on "bazelisk" => :build
def install
system "bazelisk", "build", "--config=release", "buildozer:buildozer"
bin.install "bazel-bin/buildozer/darwin_amd64_stripped/buildozer"
end
test do
build_file = testpath/"BUILD"
touch build_file
system "#{bin}/buildozer", "new java_library brewed", "//:__pkg__"
assert_equal "java_library(name = \"brewed\")\n", build_file.read
end
end
| 33.333333 | 93 | 0.744545 |
624f255eaf73c6a40143208761b56eda7f3c58ef | 526 | class Launch4j < Formula
desc "Cross-platform Java executable wrapper"
homepage "https://launch4j.sourceforge.io/"
url "https://downloads.sourceforge.net/project/launch4j/launch4j-3/3.12/launch4j-3.12-macosx-x86.tgz"
version "3.12"
sha256 "754e557036ff4a469b4a24443c809113f85b9a0689a5ffdcf35a8a6e986c458f"
bottle :unneeded
def install
libexec.install Dir["*"] - ["src", "web"]
bin.write_jar_script libexec/"launch4j.jar", "launch4j"
end
test do
system "#{bin}/launch4j", "--version"
end
end
| 27.684211 | 103 | 0.730038 |
1a16f1c34e5326fba1d2122be1cbde1088af927d | 368 | cask 'farrago' do
version '1.2.0'
sha256 '67fd873285aa2b698d159d419a567504676984ab6d7de64dcc7e91461cbf8ac7'
url 'https://rogueamoeba.com/farrago/download/Farrago.zip'
appcast 'https://rogueamoeba.net/ping/versionCheck.cgi?format=sparkle&bundleid=com.rogueamoeba.Farrago'
name 'Farrago'
homepage 'https://rogueamoeba.com/farrago/'
app 'Farrago.app'
end
| 30.666667 | 105 | 0.782609 |
188d1b4683fc1cc8608fc911bb5ce279c58fa38e | 141 | class AddRoomToEvents < ActiveRecord::Migration[5.2]
def change
add_reference :events, :room, index: true, foreign_key: true
end
end
| 23.5 | 64 | 0.744681 |
f85d5d9c37b1554367d9ad0037b652145793baac | 1,346 | cask "blender" do
arch = Hardware::CPU.intel? ? "x64" : "arm64"
version "2.93.6"
url "https://download.blender.org/release/Blender#{version.major_minor}/blender-#{version}-macos-#{arch}.dmg"
if Hardware::CPU.intel?
sha256 "db703fbd60713f0b4270fc406b7a270d136bb6ae22a649b733fad4f61177b58d"
else
sha256 "1290068841b6bc3404632e7174a7d374dba932e4d2bc5f1cf39717a195cf0777"
end
name "Blender"
desc "3D creation suite"
homepage "https://www.blender.org/"
livecheck do
url "https://www.blender.org/download/"
regex(%r{href=.*?/blender[._-]v?(\d+(?:\.\d+)+)[._-]macos[._-]#{arch}\.dmg}i)
end
conflicts_with cask: "homebrew/cask-versions/blender-lts"
depends_on macos: ">= :high_sierra"
app "Blender.app"
# shim script (https://github.com/Homebrew/homebrew-cask/issues/18809)
shimscript = "#{staged_path}/blender.wrapper.sh"
binary shimscript, target: "blender"
preflight do
# make __pycache__ directories writable, otherwise uninstall fails
FileUtils.chmod "u+w", Dir.glob("#{staged_path}/*.app/**/__pycache__")
File.write shimscript, <<~EOS
#!/bin/bash
'#{appdir}/Blender.app/Contents/MacOS/Blender' "$@"
EOS
end
zap trash: [
"~/Library/Application Support/Blender",
"~/Library/Saved Application State/org.blenderfoundation.blender.savedState",
]
end
| 29.911111 | 111 | 0.697623 |
014767dd08ebcfdd8e06ac40f2b45b10ec1be3eb | 654 | require 'charyf/utils/generator/base'
module Facebook
module Interface
module Generators
class InstallGenerator < ::Charyf::Generators::Base
source_root File.expand_path('templates', __dir__)
def initializer
template 'config/initializers/facebook.rb.tt'
end
def finalize
return unless behavior == :invoke
say_status 'notice', "Facebook interface installed" +
"\n\t\tDo not forget to enable facebook interface in application configuration" +
"\n\t\t\tconfig.enabled_interfaces = [.., :facebook, ..]", :green
end
end
end
end
end | 26.16 | 95 | 0.633028 |
ffdbdcbabad17f8d67a87cd9131cdf77077584bb | 22,910 | #
# Author:: Adam Jacob (<adam@chef.io>)
# Author:: Seth Falcon (<seth@chef.io>)
# Author:: Kyle Goodwin (<kgoodwin@primerevenue.com>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "chef-config/exceptions"
require "chef-utils/dist" unless defined?(ChefUtils::Dist)
require_relative "constants"
class Chef
# == Chef::Exceptions
# Chef's custom exceptions are all contained within the Chef::Exceptions
# namespace.
class Exceptions
ConfigurationError = ChefConfig::ConfigurationError
# Backcompat with Chef::ShellOut code:
require "mixlib/shellout/exceptions"
def self.const_missing(const_name)
if const_name == :ShellCommandFailed
Chef::Log.warn("Chef::Exceptions::ShellCommandFailed is deprecated, use Mixlib::ShellOut::ShellCommandFailed")
called_from = caller[0..3].inject("Called from:\n") { |msg, trace_line| msg << " #{trace_line}\n" }
Chef::Log.warn(called_from)
Mixlib::ShellOut::ShellCommandFailed
else
super
end
end
class Application < RuntimeError; end
class SigInt < RuntimeError; end
class SigTerm < RuntimeError; end
class Cron < RuntimeError; end
class WindowsEnv < RuntimeError; end
class Exec < RuntimeError; end
class Execute < RuntimeError; end
class ErlCall < RuntimeError; end
class FileNotFound < RuntimeError; end
class Package < RuntimeError; end
class Service < RuntimeError; end
class Script < RuntimeError; end
class Route < RuntimeError; end
class SearchIndex < RuntimeError; end
class Override < RuntimeError; end
class UnsupportedAction < RuntimeError; end
class MissingLibrary < RuntimeError; end
class CannotDetermineNodeName < RuntimeError
def initialize
super "Unable to determine node name: configure node_name or configure the system's hostname and fqdn"
end
end
class User < RuntimeError; end
class Group < RuntimeError; end
class Link < RuntimeError; end
class Mount < RuntimeError; end
class Reboot < Exception; end # rubocop:disable Lint/InheritException
class RebootPending < Exception; end # rubocop:disable Lint/InheritException
class RebootFailed < Mixlib::ShellOut::ShellCommandFailed; end
class ClientUpgraded < Exception; end # rubocop:disable Lint/InheritException
class PrivateKeyMissing < RuntimeError; end
class CannotWritePrivateKey < RuntimeError; end
class RoleNotFound < RuntimeError; end
class DuplicateRole < RuntimeError; end
class ValidationFailed < ArgumentError; end
class CannotValidateStaticallyError < ArgumentError; end
class InvalidPrivateKey < ArgumentError; end
class MissingKeyAttribute < ArgumentError; end
class KeyCommandInputError < ArgumentError; end
class BootstrapCommandInputError < ArgumentError
def initialize
super "You cannot pass both --json-attributes and --json-attribute-file. Please pass one or none."
end
end
class InvalidKeyArgument < ArgumentError; end
class InvalidKeyAttribute < ArgumentError; end
class InvalidUserAttribute < ArgumentError; end
class InvalidClientAttribute < ArgumentError; end
class RedirectLimitExceeded < RuntimeError; end
class AmbiguousRunlistSpecification < ArgumentError; end
class CookbookFrozen < ArgumentError; end
class CookbookNotFound < RuntimeError; end
class OnlyApiVersion0SupportedForAction < RuntimeError; end
# Cookbook loader used to raise an argument error when cookbook not found.
# for back compat, need to raise an error that inherits from ArgumentError
class CookbookNotFoundInRepo < ArgumentError; end
class CookbookMergingError < RuntimeError; end
class RecipeNotFound < ArgumentError; end
# AttributeNotFound really means the attribute file could not be found
class AttributeNotFound < RuntimeError; end
# NoSuchAttribute is raised on access by node.read!("foo", "bar") when node["foo"]["bar"] does not exist.
class NoSuchAttribute < RuntimeError; end
# AttributeTypeMismatch is raised by node.write!("foo", "bar", "baz") when e.g. node["foo"] = "bar" (overwriting String with Hash)
class AttributeTypeMismatch < RuntimeError; end
class MissingCookbookDependency < StandardError; end # CHEF-5120
class InvalidCommandOption < RuntimeError; end
class CommandTimeout < RuntimeError; end
class RequestedUIDUnavailable < RuntimeError; end
class InvalidHomeDirectory < ArgumentError; end
class DsclCommandFailed < RuntimeError; end
class PlistUtilCommandFailed < RuntimeError; end
class UserIDNotFound < ArgumentError; end
class GroupIDNotFound < ArgumentError; end
class ConflictingMembersInGroup < ArgumentError; end
class InvalidResourceReference < RuntimeError; end
class ResourceNotFound < RuntimeError; end
class ProviderNotFound < RuntimeError; end
NoProviderAvailable = ProviderNotFound
class VerificationNotFound < RuntimeError; end
class InvalidEventType < ArgumentError; end
class MultipleIdentityError < RuntimeError; end
# Used in Resource::ActionClass#load_current_resource to denote that
# the resource doesn't actually exist (for example, the file does not exist)
class CurrentValueDoesNotExist < RuntimeError; end
# Can't find a Resource of this type that is valid on this platform.
class NoSuchResourceType < NameError
def initialize(short_name, node)
super "Cannot find a resource for #{short_name} on #{node[:platform]} version #{node[:platform_version]} with target_mode? #{Chef::Config.target_mode?}"
end
end
class InvalidPolicybuilderCall < ArgumentError; end
class InvalidResourceSpecification < ArgumentError; end
class SolrConnectionError < RuntimeError; end
class IllegalChecksumRevert < RuntimeError; end
class CookbookVersionNameMismatch < ArgumentError; end
class MissingParentDirectory < RuntimeError; end
class UnresolvableGitReference < RuntimeError; end
class InvalidRemoteGitReference < RuntimeError; end
class InvalidEnvironmentRunListSpecification < ArgumentError; end
class InvalidDataBagItemID < ArgumentError; end
class InvalidDataBagName < ArgumentError; end
class EnclosingDirectoryDoesNotExist < ArgumentError; end
# Errors originating from calls to the Win32 API
class Win32APIError < RuntimeError; end
# Thrown when Win32 API layer binds to non-existent Win32 function. Occurs
# when older versions of Windows don't support newer Win32 API functions.
class Win32APIFunctionNotImplemented < NotImplementedError; end # rubocop:disable Lint/InheritException
# Attempting to run windows code on a not-windows node
class Win32NotWindows < RuntimeError; end
class WindowsNotAdmin < RuntimeError; end
# Attempting to access a 64-bit only resource on a 32-bit Windows system
class Win32ArchitectureIncorrect < RuntimeError; end
class ObsoleteDependencySyntax < ArgumentError; end
class InvalidDataBagPath < ArgumentError; end
class DuplicateDataBagItem < RuntimeError; end
class PowershellCmdletException < RuntimeError; end
class LCMParser < RuntimeError; end
class CannotDetermineHomebrewOwner < Package; end
class CannotDetermineWindowsInstallerType < Package; end
class NoWindowsPackageSource < Package; end
# for example, if both recipes/default.yml, recipes/default.yaml are present
class AmbiguousYAMLFile < RuntimeError; end
# Can not create staging file during file deployment
class FileContentStagingError < RuntimeError
def initialize(errors)
super "Staging tempfile can not be created during file deployment.\n Errors: #{errors.join('\n')}!"
end
end
# A different version of a cookbook was added to a
# VersionedRecipeList than the one already there.
class CookbookVersionConflict < ArgumentError; end
# does not follow X.Y.Z format. ArgumentError?
class InvalidPlatformVersion < ArgumentError; end
class InvalidCookbookVersion < ArgumentError; end
# version constraint should be a string or array, or it doesn't
# match OP VERSION. ArgumentError?
class InvalidVersionConstraint < ArgumentError; end
# Version constraints are not allowed in chef-solo
class IllegalVersionConstraint < NotImplementedError; end # rubocop:disable Lint/InheritException
class MetadataNotValid < StandardError; end
class MetadataNotFound < StandardError
attr_reader :install_path
attr_reader :cookbook_name
def initialize(install_path, cookbook_name)
@install_path = install_path
@cookbook_name = cookbook_name
super "No metadata.rb or metadata.json found for cookbook #{@cookbook_name} in #{@install_path}"
end
end
# File operation attempted but no permissions to perform it
class InsufficientPermissions < RuntimeError; end
# Ifconfig failed
class Ifconfig < RuntimeError; end
# Invalid "source" parameter to a remote_file resource
class InvalidRemoteFileURI < ArgumentError; end
# Node::Attribute computes the merged version of of attributes
# and makes it read-only. Attempting to modify a read-only
# attribute will cause this error.
class ImmutableAttributeModification < NoMethodError
def initialize
super "Node attributes are read-only when you do not specify which precedence level to set. " +
%q{To set an attribute use code like `node.default["key"] = "value"'}
end
end
# Merged node attributes are invalidated when the component
# attributes are updated. Attempting to read from a stale copy
# of merged attributes will trigger this error.
class StaleAttributeRead < StandardError; end
# Registry Helper throws the following errors
class Win32RegArchitectureIncorrect < Win32ArchitectureIncorrect; end
class Win32RegHiveMissing < ArgumentError; end
class Win32RegKeyMissing < RuntimeError; end
class Win32RegValueMissing < RuntimeError; end
class Win32RegDataMissing < RuntimeError; end
class Win32RegValueExists < RuntimeError; end
class Win32RegNoRecursive < ArgumentError; end
class Win32RegTypeDoesNotExist < ArgumentError; end
class Win32RegBadType < ArgumentError; end
class Win32RegBadValueSize < ArgumentError; end
class Win32RegTypesMismatch < ArgumentError; end
# incorrect input for registry_key create action throws following error
class RegKeyValuesTypeMissing < ArgumentError; end
class RegKeyValuesDataMissing < ArgumentError; end
class InvalidEnvironmentPath < ArgumentError; end
class EnvironmentNotFound < RuntimeError; end
# File-like resource found a non-file (socket, pipe, directory, etc) at its destination
class FileTypeMismatch < RuntimeError; end
# File (or descendent) resource configured to manage symlink source, but
# the symlink that is there either loops or points to a nonexistent file
class InvalidSymlink < RuntimeError; end
class ChildConvergeError < RuntimeError; end
class DeprecatedFeatureError < RuntimeError
def initialize(message)
super("#{message} (raising error due to treat_deprecation_warnings_as_errors being set)")
end
end
class MissingRole < RuntimeError
attr_reader :expansion
def initialize(message_or_expansion = NOT_PASSED)
@expansion = nil
case message_or_expansion
when NOT_PASSED
super()
when String
super
when RunList::RunListExpansion
@expansion = message_or_expansion
missing_roles = @expansion.errors.join(", ")
super("The expanded run list includes nonexistent roles: #{missing_roles}")
end
end
end
class Secret
class RetrievalError < RuntimeError; end
class ConfigurationInvalid < RuntimeError; end
class FetchFailed < RuntimeError; end
class MissingSecretName < RuntimeError; end
class InvalidSecretName < RuntimeError; end
class InvalidFetcherService < RuntimeError
def initialize(given, fetcher_service_names)
super("#{given} is not a supported secrets service. Supported services are: :#{fetcher_service_names.join(" :")}")
end
end
class MissingFetcher < RuntimeError
def initialize(fetcher_service_names)
super("No secret service provided. Supported services are: :#{fetcher_service_names.join(" :")}")
end
end
class Azure
class IdentityNotFound < RuntimeError
def initialize
super("The managed identity could not be found. This could mean one of the following things:\n\n" \
" 1. The VM has no system or user assigned identities.\n" \
" 2. The managed identity object_id or client_id that was specified is not assigned to the VM.\n")
end
end
end
end
# Exception class for collecting multiple failures. Used when running
# delayed notifications so that chef can process each delayed
# notification even if chef client or other notifications fail.
class MultipleFailures < StandardError
def initialize(*args)
super
@all_failures = []
end
def message
base = "Multiple failures occurred:\n"
@all_failures.inject(base) do |message, (location, error)|
message << "* #{error.class} occurred in #{location}: #{error.message}\n"
end
end
def client_run_failure(exception)
set_backtrace(exception.backtrace)
@all_failures << [ "#{ChefUtils::Dist::Infra::PRODUCT} run", exception ]
end
def notification_failure(exception)
@all_failures << [ "delayed notification", exception ]
end
def raise!
unless empty?
raise for_raise
end
end
def empty?
@all_failures.empty?
end
def for_raise
if @all_failures.size == 1
@all_failures[0][1]
else
self
end
end
end
class CookbookVersionSelection
# Compound exception: In run_list expansion and resolution,
# run_list items referred to cookbooks that don't exist and/or
# have no versions available.
class InvalidRunListItems < StandardError
attr_reader :non_existent_cookbooks
attr_reader :cookbooks_with_no_matching_versions
def initialize(message, non_existent_cookbooks, cookbooks_with_no_matching_versions)
super(message)
@non_existent_cookbooks = non_existent_cookbooks
@cookbooks_with_no_matching_versions = cookbooks_with_no_matching_versions
end
def to_json(*a)
result = {
"message" => message,
"non_existent_cookbooks" => non_existent_cookbooks,
"cookbooks_with_no_versions" => cookbooks_with_no_matching_versions,
}
Chef::JSONCompat.to_json(result, *a)
end
end
# In run_list expansion and resolution, a constraint was
# unsatisfiable.
#
# This exception may not be the complete error report. If you
# resolve the misconfiguration represented by this exception and
# re-solve, you may get another exception
class UnsatisfiableRunListItem < StandardError
attr_reader :run_list_item
attr_reader :non_existent_cookbooks, :most_constrained_cookbooks
# most_constrained_cookbooks: if I were to remove constraints
# regarding these cookbooks, I would get a solution or move on
# to the next error (deeper in the graph). An item in this list
# may be unsatisfiable, but when resolved may also reveal
# further unsatisfiable constraints; this condition would not be
# reported.
def initialize(message, run_list_item, non_existent_cookbooks, most_constrained_cookbooks)
super(message)
@run_list_item = run_list_item
@non_existent_cookbooks = non_existent_cookbooks
@most_constrained_cookbooks = most_constrained_cookbooks
end
def to_json(*a)
result = {
"message" => message,
"unsatisfiable_run_list_item" => run_list_item,
"non_existent_cookbooks" => non_existent_cookbooks,
"most_constrained_cookbooks" => most_constrained_cookbooks,
}
Chef::JSONCompat.to_json(result, *a)
end
end
end # CookbookVersionSelection
# When the server sends a redirect, RFC 2616 states a user-agent should
# not follow it with a method other than GET or HEAD, unless a specific
# action is taken by the user. A redirect received as response to a
# non-GET and non-HEAD request will thus raise an InvalidRedirect.
class InvalidRedirect < StandardError; end
# Raised when the content length of a download does not match the content
# length declared in the http response.
class ContentLengthMismatch < RuntimeError
def initialize(response_length, content_length)
super <<~EOF
Response body length #{response_length} does not match HTTP Content-Length header #{content_length}.
This error is most often caused by network issues (proxies, etc) outside of #{ChefUtils::Dist::Infra::CLIENT}.
EOF
end
end
class UnsupportedPlatform < RuntimeError
def initialize(platform)
super "This functionality is not supported on platform #{platform}."
end
end
# Raised when Chef::Config[:run_lock_timeout] is set and some other client run fails
# to release the run lock before Chef::Config[:run_lock_timeout] seconds pass.
class RunLockTimeout < RuntimeError
def initialize(duration, blocking_pid)
super "Unable to acquire lock. Waited #{duration} seconds for #{blocking_pid} to release."
end
end
class ChecksumMismatch < RuntimeError
def initialize(res_cksum, cont_cksum)
super "Checksum on resource (#{res_cksum}...) does not match checksum on content (#{cont_cksum}...)"
end
end
class BadProxyURI < RuntimeError; end
# Raised by Chef::JSONCompat
class JSON
class EncodeError < RuntimeError; end
class ParseError < RuntimeError; end
end
class InvalidSearchQuery < ArgumentError; end
# Raised by Chef::ProviderResolver
class AmbiguousProviderResolution < RuntimeError
def initialize(resource, classes)
super "Found more than one provider for #{resource.resource_name} resource: #{classes}"
end
end
# If a converge fails, we want to wrap the output from those errors into 1 error so we can
# see both issues in the output. It is possible that nil will be provided. You must call `fill_backtrace`
# to correctly populate the backtrace with the wrapped backtraces.
class RunFailedWrappingError < RuntimeError
attr_reader :wrapped_errors
def initialize(*errors)
errors = errors.compact
output = "Found #{errors.size} errors, they are stored in the backtrace"
@wrapped_errors = errors
super output
end
def fill_backtrace
backtrace = []
wrapped_errors.each_with_index do |e, i|
backtrace << "#{i + 1}) #{e.class} - #{e.message}"
backtrace += e.backtrace if e.backtrace
backtrace << "" unless i == wrapped_errors.length - 1
end
set_backtrace(backtrace)
end
end
class PIDFileLockfileMatch < RuntimeError
def initialize
super "PID file and lockfile are not permitted to match. Specify a different location with --pid or --lockfile"
end
end
class CookbookChefVersionMismatch < RuntimeError
def initialize(chef_version, cookbook_name, cookbook_version, *constraints)
constraint_str = constraints.map { |c| c.requirement.as_list.to_s }.join(", ")
super "Cookbook '#{cookbook_name}' version '#{cookbook_version}' depends on #{ChefUtils::Dist::Infra::PRODUCT} version #{constraint_str}, but the running #{ChefUtils::Dist::Infra::PRODUCT} version is #{chef_version}"
end
end
class CookbookOhaiVersionMismatch < RuntimeError
def initialize(ohai_version, cookbook_name, cookbook_version, *constraints)
constraint_str = constraints.map { |c| c.requirement.as_list.to_s }.join(", ")
super "Cookbook '#{cookbook_name}' version '#{cookbook_version}' depends on ohai version #{constraint_str}, but the running ohai version is #{ohai_version}"
end
end
class MultipleDscResourcesFound < RuntimeError
attr_reader :resources_found
def initialize(resources_found)
@resources_found = resources_found
matches_info = @resources_found.each do |r|
if r["Module"].nil?
"Resource #{r["Name"]} was found in #{r["Module"]["Name"]}"
else
"Resource #{r["Name"]} is a binary resource"
end
end
super "Found multiple resources matching #{matches_info[0]["Module"]["Name"]}:\n#{(matches_info.map { |f| f["Module"]["Version"] }).uniq.join("\n")}"
end
end
# exception specific to invalid usage of 'dsc_resource' resource
class DSCModuleNameMissing < ArgumentError; end
class GemRequirementConflict < RuntimeError
def initialize(gem_name, option, value1, value2)
super "Conflicting requirements for gem '#{gem_name}': Both #{value1.inspect} and #{value2.inspect} given for option #{option.inspect}"
end
end
class UnifiedModeImmediateSubscriptionEarlierResource < RuntimeError
def initialize(notification)
super "immediate subscription from #{notification.resource} resource cannot be setup to #{notification.notifying_resource} resource, which has already fired while in unified mode"
end
end
class UnifiedModeBeforeSubscriptionEarlierResource < RuntimeError
def initialize(notification)
super "before subscription from #{notification.resource} resource cannot be setup to #{notification.notifying_resource} resource, which has already fired while in unified mode"
end
end
end
end
| 40.477032 | 224 | 0.711174 |
f75fd180f4ef5cccd09ce5c28a96d0c4315addd6 | 4,563 | class NationBuilder::Client
def initialize(nation_name, api_key, opts = {})
@nation_name = nation_name
@api_key = api_key
@name_to_endpoint = {}
@base_url = opts[:base_url] || 'https://:nation_name.nationbuilder.com'
@retries = opts[:retries] || 8
@http_client = opts[:http_client] || HTTPClient.new
if @retries < 0
raise 'Retries must be at least zero'
end
parsed_endpoints.each do |endpoint|
@name_to_endpoint[endpoint.name] = endpoint
end
end
def parsed_endpoints
NationBuilder::SpecParser
.parse(File.join(File.dirname(__FILE__), '..', 'api_spec/spec.json'))
end
class InvalidEndpoint < ArgumentError; end
def [](endpoint)
e = @name_to_endpoint[endpoint]
raise InvalidEndpoint.new(endpoint) if e.nil?
e
end
def endpoints
@name_to_endpoint.keys
end
def base_url
@base_url.gsub(':nation_name', @nation_name)
end
RETRY_DELAY = 0.1 # seconds
def raw_call(path, method, body = {}, args = {})
url = NationBuilder::URL.new(base_url).generate_url(path, args)
request_args = {
header: {
'Accept' => 'application/json',
'Content-Type' => 'application/json'
},
query: {
access_token: @api_key
}
}
if method == :get
request_args[:query].merge!(body)
else
body[:access_token] = @api_key
if !body[:fire_webhooks].nil?
request_args[:query][:fire_webhooks] = body[:fire_webhooks]
end
request_args[:body] = JSON(body)
end
perform_request_with_retries(method, url, request_args)
end
def call(endpoint_name, method_name, args={})
endpoint = self[endpoint_name]
method = endpoint[method_name]
nonmethod_args = method.nonmethod_args(args)
method_args = method.method_args(args)
method.validate_args(method_args)
return raw_call(method.uri, method.http_method, nonmethod_args, args)
end
def perform_request_with_retries(method, url, request_args)
parsed_response = nil
exception_to_reraise = nil
(@retries + 1).times do |i|
begin
set_response(@http_client.send(method, url, request_args))
parsed_response = parse_response_body(response)
rescue NationBuilder::RateLimitedError => e
exception_to_reraise = e
Kernel.sleep(RETRY_DELAY * 2**i)
rescue => e
raise e
else
exception_to_reraise = nil
break
end
end
# If the retry cycle ended with an error, reraise it
raise exception_to_reraise if exception_to_reraise
parsed_response
end
def set_response(value)
Thread.current[:nationbuilder_rb_response] = value
end
# This getter is used for fetching the raw response
def response
Thread.current[:nationbuilder_rb_response]
end
def classify_response_error(response)
case
when response.code == 429
NationBuilder::RateLimitedError.new(response.body)
when response.code.to_s.start_with?('4')
NationBuilder::ClientError.new(response.body)
when response.code.to_s.start_with?('5')
NationBuilder::ServerError.new(response.body)
end
end
def parse_response_body(response)
error = classify_response_error(response)
raise error if error
content_type = response.header['Content-Type'].first
unless content_type && content_type.include?('application/json')
return true
end
parsed_body(response.body)
end
def print_all_descriptions
endpoints.each do |endpoint_name|
self.print_description(endpoint_name)
puts
end
end
def print_description(endpoint_name)
endpoint_name = endpoint_name.to_sym
unless self.endpoints.include?(endpoint_name)
puts "Invalid endpoint name: #{endpoint_name}"
puts
puts "Valid endpoint names:"
self.endpoints.each do |endpoint|
puts " #{endpoint}"
end
return
end
endpoint_str = "Endpoint: #{endpoint_name}"
puts "=" * endpoint_str.length
puts endpoint_str
puts "=" * endpoint_str.length
self[endpoint_name].methods.each do |method_name|
puts
method = self[endpoint_name][method_name]
puts " Method: #{method_name}"
puts " Description: #{method.description}"
required_params = method.parameters.map { |p| p }
if required_params.any?
puts " Required parameters: #{required_params.join(', ')}"
end
end
end
private
def parsed_body(body)
if body.length == 0
{}
else
JSON.parse(body)
end
end
end
| 24.934426 | 75 | 0.667324 |
910a104e72d4567e09c092498612390179a124de | 1,169 | class Program < ApplicationRecord
include Codeable
include Nameable
include FinanceSpendable
include FinancePlannable
include BudgetItemDuplicatable
include PermaIdable
belongs_to :spending_agency
belongs_to :parent_program, class_name: 'Program'
has_many :child_programs,
class_name: 'Program',
foreign_key: :parent_program_id
has_many :priority_connections, as: :priority_connectable
has_many :priorities,
-> { distinct },
through: :priority_connections
def all_programs
return [] if child_programs.empty?
descendants_of_children = child_programs.map do |child_program|
child_program.all_programs
end.flatten
child_programs + (descendants_of_children)
end
def parent
return parent_program if parent_program.present?
spending_agency
end
def ancestors
return [] if parent.nil?
[parent] + parent.ancestors
end
def direct_priority_connections
priority_connections.direct
end
def type
self.class.to_s.underscore
end
def take_programs_from(other_program)
other_program.child_programs.update(parent_program: self)
end
end
| 21.648148 | 67 | 0.740804 |
e88549bc6bec781221c7d4a4c9b1726fb52bb356 | 931 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Apis
module RunV1alpha1
# Version of the google-apis-run_v1alpha1 gem
GEM_VERSION = "0.14.0"
# Version of the code generator used to generate this client
GENERATOR_VERSION = "0.4.0"
# Revision of the discovery document this client was generated from
REVISION = "20210813"
end
end
end
| 32.103448 | 74 | 0.729323 |
182222e453212848c0882f9a685001c6298c8bfd | 617 | # Bundler monkey patches
module ::Bundler
# Patch bundler to write a .lock file specific to the version of ruby.
# This keeps MRI/JRuby/RBX from conflicting over the Gemfile.lock updates
module SharedHelpers
def default_lockfile
ruby = "#{LogStash::Environment.ruby_engine}-#{LogStash::Environment.ruby_abi_version}"
Pathname.new("#{default_gemfile}.#{ruby}.lock")
end
end
# Add the Bundler.reset! method which has been added in master but is not in 1.7.9.
class << self
unless self.method_defined?("reset!")
def reset!
@definition = nil
end
end
end
end
| 29.380952 | 93 | 0.692058 |
39207e0b8dc3eeb757d437b3c3d8fd9478f4cd5a | 663 | require File.dirname(__FILE__) + '/../integration_helper'
$setup_loader.each_setup do |setup|
context "Basic Operation for #{setup.name}" do
setup do
setup.setup_test
end
teardown do
setup.teardown_test
end
specify "should execute async operation" do
BasicOperationWorker.async_do_work(:token => "my magic token")
sleep(2) # this is not clean, but we need to wait for the async call to finish
File.exists?(File.join(setup.tmp_directory, "basic_operation.output")).should == true
File.read(File.join(setup.tmp_directory, "basic_operation.output")).should == "my magic token"
end
end
end
| 24.555556 | 100 | 0.689291 |
bb61cd527491a9ad4f98a4050bb98fe6ddfbe8f3 | 37 | module Mpesa
VERSION = '0.1.0'
end
| 9.25 | 19 | 0.648649 |
1a0328daa1ce8ad5f21cbb05a928fbb0f268cd03 | 462 | module Webspicy
class Tester
class Result
class PostconditionMet < Check
def initialize(result, post)
super(result)
@post = post
end
attr_reader :post
def behavior
post.to_s
end
def must?
true
end
def call
post.check!
end
end # class PostconditionMet
end # class Result
end # class Tester
end # module Webspicy
| 16.5 | 36 | 0.532468 |
f7f89ddee0a48acf58a3bc847c83ba46cedc2621 | 2,901 | # Encoding: utf-8
# IBM WebSphere Application Server Liberty Buildpack
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'liberty_buildpack/repository'
require 'liberty_buildpack/util/tokenized_version'
module LibertyBuildpack::Repository
# A resolver that selects values from a collection based on a set of rules governing wildcards
class VersionResolver
# Resolves a version from a collection of versions. The +candidate_version+ must be structured like:
# * up to three numeric components, followed by an optional string component
# * the final component may be a +
# The resolution returns the maximum of the versions that match the candidate version
#
# @param [TokenizedVersion] candidate_version the version, possibly containing a wildcard, to resolve. If +nil+,
# substituted with +.
# @param [Array<String>] versions the collection of versions to resolve against
# @return [TokenizedVersion] the resolved version
# @raise if no version can be resolved
def self.resolve(candidate_version, versions)
tokenized_candidate_version = safe_candidate_version candidate_version
tokenized_versions = versions.map { |version| LibertyBuildpack::Util::TokenizedVersion.new(version, false) }
version = tokenized_versions
.select { |tokenized_version| matches? tokenized_candidate_version, tokenized_version }
.max { |a, b| a <=> b }
raise "No version resolvable for '#{candidate_version}' in #{versions.join(', ')}" if version.nil?
version
end
private
TOKENIZED_WILDCARD = LibertyBuildpack::Util::TokenizedVersion.new('+')
def self.safe_candidate_version(candidate_version)
if candidate_version.nil?
TOKENIZED_WILDCARD
else
raise "Invalid TokenizedVersion '#{candidate_version}'" unless candidate_version.is_a?(LibertyBuildpack::Util::TokenizedVersion)
candidate_version
end
end
def self.matches?(tokenized_candidate_version, tokenized_version)
(0..3).all? do |i|
tokenized_candidate_version[i].nil? ||
tokenized_candidate_version[i] == LibertyBuildpack::Util::TokenizedVersion::WILDCARD ||
tokenized_candidate_version[i] == tokenized_version[i]
end
end
end
end
| 41.442857 | 138 | 0.717684 |
87eed9c760b5bcc247751350c59e23aa5fd7f75f | 219 | class Candidate < ActiveRecord::Base
has_many :skills, dependent: :destroy
validates :name, :email, presence: true
validates :email, uniqueness: true
validates_associated :skills
def to_s
name
end
end
| 18.25 | 41 | 0.73516 |
f8a0596a39e6abf5edbe91e5408b0bf9a7e290a9 | 268 | if Rails.env.development? || Rails.env.test?
ActiveRecordQueryTrace.enabled = false
ActiveRecordQueryTrace.level = :app
ActiveRecordQueryTrace.ignore_cached_queries = true
ActiveRecordQueryTrace.colorize = 'light purple'
ActiveRecordQueryTrace.lines = 5
end
| 33.5 | 53 | 0.80597 |
6af307cebddb6e52cb80fbb1c349f72c957c43f1 | 760 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe "PreferredNames", type: :request do
let(:user) { create(:user, account_created: false) }
before do
sign_in user
end
describe "GET /preferred-name/edit" do
it "renders edit template" do
get "/preferred-name/edit"
expect(response).to render_template(:edit)
end
end
describe "PUT /preferred-name" do
it "redirects successfully" do
put "/preferred-name", params: { user: { preferred_name: "Test" } }
expect(response).to redirect_to(dashboard_path)
end
it "sets preferred-name and account_created" do
put "/preferred-name", params: { user: { preferred_name: "Test" } }
expect(user.name).to eq("Test")
end
end
end
| 24.516129 | 73 | 0.671053 |
d5c0783cb8539e6888810c1763f5556c9ba7ba93 | 256 | module Cms
module Articles
module ActionControllerExtensions
module ClassMethods
def tracks_articles
end
end
end
end
end
ActionController::Base.send(:extend, Cms::Articles::ActionControllerExtensions::ClassMethods) | 19.692308 | 93 | 0.722656 |
268a859dc61a3026849d77b3fb87354bafa04bf5 | 2,560 | require File.dirname(__FILE__) + '/test_helper'
DIR = '/tmp/bdb_deadlock_test'
Bdb::Environment.config :path => DIR, :cache_size => 1 * 1024 * 1024, :page_size => 512
class DeadlockTest < Test::Unit::TestCase
def setup
FileUtils.rmtree DIR
FileUtils.mkdir DIR
Bdb::Environment[DIR].close
@db = Bdb::Database.new('foo')
end
attr_reader :db
N = 5000 # total number of records
R = 10 # number of readers
W = 10 # number of writers
T = 20 # reads per transaction
L = 100 # logging frequency
def test_detect_deadlock
pids = []
W.times do |n|
pids << fork(&writer)
end
sleep(1)
R.times do
pids << fork(&reader)
end
# Make sure that all processes finish with no errors.
pids.each do |pid|
Process.wait(pid)
assert_equal status, $?.exitstatus
end
end
C = 10
def test_detect_unclosed_resources
threads = []
threads << Thread.new do
C.times do
sleep(10)
pid = fork do
cursor = db.db.cursor(nil, 0)
cursor.get(nil, nil, Bdb::DB_FIRST)
exit!(1)
end
puts "\n====simulating exit with unclosed resources ===="
Process.wait(pid)
assert_equal 1, $?.exitstatus
end
end
threads << Thread.new do
C.times do
pid = fork(&writer(1000))
Process.wait(pid)
assert [0,9].include?($?.exitstatus)
end
end
sleep(3)
threads << Thread.new do
C.times do
pid = fork(&reader(1000))
Process.wait(pid)
assert [0,9].include?($?.exitstatus)
end
end
threads.each {|t| t.join}
end
def reader(n = N)
lambda do
T.times do
(1...n).to_a.shuffle.each_slice(T) do |ids|
db.transaction do
ids.each {|id| db.get(id)}
end
log('r')
end
end
db.close_environment
end
end
def writer(n = N)
lambda do
(1...n).to_a.shuffle.each do |id|
db.transaction do
begin
db.set(id, {:bar => "bar" * 1000 + " ayn #{rand}"})
rescue Bdb::DbError => e
if e.code == Bdb::DB_KEYEXIST
db.delete(id)
retry
else
raise(e)
end
end
end
log('w')
end
db.close_environment
end
end
def log(action)
@count ||= Hash.new(0)
if @count[action] % L == 0
print action.to_s
$stdout.flush
end
@count[action] += 1
end
end
| 20.31746 | 87 | 0.533203 |
1852ae108ee320688df606f30176136ce1bbe6a9 | 1,688 | class FlintChecker < Formula
desc "Check your project for common sources of contributor friction"
homepage "https://github.com/pengwynn/flint"
url "https://github.com/pengwynn/flint/archive/v0.1.0.tar.gz"
sha256 "ec865ec5cad191c7fc9c7c6d5007754372696a708825627383913367f3ef8b7f"
bottle do
cellar :any_skip_relocation
sha256 "48211955f96e66b5254338d9f6ba56e6e35f6680fb0379190f5b4a3d8f6fe6f4" => :catalina
sha256 "8cd18ca30e932554d379b710cd9d1adc9b14c073d2c7bf7f993c4e98c2349947" => :mojave
sha256 "b1d4e65bc48b267d9d05b31ad5321d534717a5b0122d80a8bf5d483bd4c00662" => :high_sierra
sha256 "0d246b741b5a09fcb7aa0641ba2322e55db92eb98b755f6528171e0ce82c782e" => :sierra
sha256 "be77f701f14ecabf655ddbf92eb132aa0cca9413196343783032a665ce2b33c0" => :el_capitan
sha256 "5dcce77a6426af8579cd283a120f6bb3b8cce384f6d4934c995dc7b23779bc51" => :yosemite
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/pengwynn").mkpath
ln_sf buildpath, buildpath/"src/github.com/pengwynn/flint"
system "go", "build", "-o", bin/"flint"
end
test do
assert_match version.to_s, shell_output("#{bin}/flint --version", 0)
shell_output("#{bin}/flint", 2)
(testpath/"README.md").write("# Readme")
(testpath/"CONTRIBUTING.md").write("# Contributing Guidelines")
(testpath/"LICENSE").write("License")
(testpath/"CHANGELOG").write("changelog")
(testpath/"CODE_OF_CONDUCT").write("code of conduct")
(testpath/"script").mkpath
(testpath/"script/bootstrap").write("Bootstrap Script")
(testpath/"script/test").write("Test Script")
shell_output("#{bin}/flint", 0)
end
end
| 41.170732 | 93 | 0.75 |
f7c072cfcf2500807647ef4e1cb18efb244c4196 | 283 | RSpec.shared_examples_for 'Msf::DBManager::Note' do
unless ENV['REMOTE_DB']
it { is_expected.to respond_to :each_note }
end
it { is_expected.to respond_to :find_or_create_note }
it { is_expected.to respond_to :notes }
it { is_expected.to respond_to :report_note }
end | 28.3 | 55 | 0.742049 |
87bd03f6eb7e7d8fda3f5b635911174df3269c58 | 934 | # encoding: UTF-8
require_relative '../core/lib/spree/core/version.rb'
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_backend'
s.version = Spree.version
s.summary = 'backend e-commerce functionality for the Spree project.'
s.description = 'Required dependency for Spree'
s.author = 'Sean Schofield'
s.email = 'sean@spreecommerce.com'
s.homepage = 'https://spreecommerce.com'
s.license = 'BSD-3'
s.rubyforge_project = 'spree_backend'
s.files = `git ls-files`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_api', s.version
s.add_dependency 'spree_core', s.version
s.add_dependency 'bootstrap-sass', '~> 3.3'
s.add_dependency 'jquery-rails', '~> 4.1'
s.add_dependency 'jquery-ui-rails', '~> 5.0'
s.add_dependency 'select2-rails', '3.5.9.1' # 3.5.9.2 breaks several specs
end
| 31.133333 | 78 | 0.656317 |
79e621145258d098abc2b8931a0d106029666f09 | 864 | # name: SCEMOJI
# about: adds an evil trout emoji
# version: 0.2.5
# authors: DJShaneypup, DanTheDak
# url: https://github.com/DJShaneypup/scfemoji
register_asset "javascripts/biblethump.js", :server_side
register_asset "javascripts/coolcat.js", :server_side
register_asset "javascripts/datsheffy.js", :server_side
register_asset "javascripts/deilluminati.js", :server_side
register_asset "javascripts/frankerz.js", :server_side
register_asset "javascripts/kapow.js", :server_side
register_asset "javascripts/kappa.js", :server_side
register_asset "javascripts/kappapride.js", :server_side
register_asset "javascripts/manload.js", :server_side
register_asset "javascripts/pjsalt.js", :server_side
register_asset "javascripts/shibez.js", :server_side
register_asset "javascripts/vaultboy.js", :server_side
register_asset "javascripts/cookiecat.js", :server_side
| 39.272727 | 58 | 0.815972 |
1a7e9dce59ecb335783085d5432aef9c0ecacf55 | 3,445 | module Lint
module Hashes
def test_hset_and_hget
r.hset("foo", "f1", "s1")
assert_equal "s1", r.hget("foo", "f1")
end
def test_hsetnx
r.hset("foo", "f1", "s1")
r.hsetnx("foo", "f1", "s2")
assert_equal "s1", r.hget("foo", "f1")
r.del("foo")
r.hsetnx("foo", "f1", "s2")
assert_equal "s2", r.hget("foo", "f1")
end
def test_hdel
r.hset("foo", "f1", "s1")
assert_equal "s1", r.hget("foo", "f1")
assert_equal 1, r.hdel("foo", "f1")
assert_equal nil, r.hget("foo", "f1")
end
def test_variadic_hdel
return if version < "2.3.9"
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
assert_equal "s1", r.hget("foo", "f1")
assert_equal "s2", r.hget("foo", "f2")
assert_equal 2, r.hdel("foo", ["f1", "f2"])
assert_equal nil, r.hget("foo", "f1")
assert_equal nil, r.hget("foo", "f2")
end
def test_hexists
assert_equal false, r.hexists("foo", "f1")
r.hset("foo", "f1", "s1")
assert r.hexists("foo", "f1")
end
def test_hlen
assert_equal 0, r.hlen("foo")
r.hset("foo", "f1", "s1")
assert_equal 1, r.hlen("foo")
r.hset("foo", "f2", "s2")
assert_equal 2, r.hlen("foo")
end
def test_hkeys
assert_equal [], r.hkeys("foo")
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
assert_equal ["f1", "f2"], r.hkeys("foo")
end
def test_hvals
assert_equal [], r.hvals("foo")
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
assert_equal ["s1", "s2"], r.hvals("foo")
end
def test_hgetall
assert({} == r.hgetall("foo"))
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
assert({"f1" => "s1", "f2" => "s2"} == r.hgetall("foo"))
end
def test_hmset
r.hmset("hash", "foo1", "bar1", "foo2", "bar2")
assert_equal "bar1", r.hget("hash", "foo1")
assert_equal "bar2", r.hget("hash", "foo2")
end
def test_hmset_with_invalid_arguments
assert_raise(Redis::CommandError) do
r.hmset("hash", "foo1", "bar1", "foo2", "bar2", "foo3")
end
end
def test_mapped_hmset
r.mapped_hmset("foo", :f1 => "s1", :f2 => "s2")
assert_equal "s1", r.hget("foo", "f1")
assert_equal "s2", r.hget("foo", "f2")
end
def test_hmget
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
r.hset("foo", "f3", "s3")
assert_equal ["s2", "s3"], r.hmget("foo", "f2", "f3")
end
def test_hmget_mapped
r.hset("foo", "f1", "s1")
r.hset("foo", "f2", "s2")
r.hset("foo", "f3", "s3")
assert({"f1" => "s1"} == r.mapped_hmget("foo", "f1"))
assert({"f1" => "s1", "f2" => "s2"} == r.mapped_hmget("foo", "f1", "f2"))
end
def test_hincrby
r.hincrby("foo", "f1", 1)
assert_equal "1", r.hget("foo", "f1")
r.hincrby("foo", "f1", 2)
assert_equal "3", r.hget("foo", "f1")
r.hincrby("foo", "f1", -1)
assert_equal "2", r.hget("foo", "f1")
end
def test_hincrbyfloat
return if version < "2.5.4"
r.hincrbyfloat("foo", "f1", 1.23)
assert_equal "1.23", r.hget("foo", "f1")
r.hincrbyfloat("foo", "f1", 0.77)
assert_equal "2", r.hget("foo", "f1")
r.hincrbyfloat("foo", "f1", -0.1)
assert_equal "1.9", r.hget("foo", "f1")
end
end
end
| 21.134969 | 79 | 0.501887 |
28f63fb55520401773b4e17928a71b31cd400885 | 11,181 | # encoding: UTF-8
require File.expand_path(File.dirname(__FILE__)) + '/helper'
# Random tests for specific issues.
#
# The test suite will be cleaned up at some point soon.
class TestMisc < Premailer::TestCase
# in response to http://github.com/alexdunae/premailer/issues#issue/4
#
# NB: 2010-11-16 -- after reverting to Hpricot this test can no longer pass.
# It's too much of an edge case to get any dev time.
def test_parsing_extra_quotes
io = StringIO.new('<p></p>
<h3 "id="WAR"><a name="WAR"></a>Writes and Resources</h3>
<table></table>')
premailer = Premailer.new(io, :adapter => :nokogiri)
assert_match /<h3>[\s]*<a name="WAR">[\s]*<\/a>[\s]*Writes and Resources[\s]*<\/h3>/i, premailer.to_inline_css
end
def test_styles_in_the_body
html = <<END_HTML
<html>
<body>
<style type="text/css"> p { color: red; } </style>
<p>Test</p>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
assert_match /color\: red/i, premailer.processed_doc.at('p')['style']
end
def test_commented_out_styles_in_the_body
html = <<END_HTML
<html>
<body>
<style type="text/css"> <!-- p { color: red; } --> </style>
<p>Test</p>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
assert_match /color\: red/i, premailer.processed_doc.at('p')['style']
end
def test_not_applying_styles_to_the_head
html = <<END_HTML
<html>
<head>
<title>Title</title>
<style type="text/css"> * { color: red; } </style>
</head>
<body>
<p><a>Test</a></p>
</body>
</html>
END_HTML
[:nokogiri, :hpricot].each do |adapter|
premailer = Premailer.new(html, :with_html_string => true, :adapter => adapter)
premailer.to_inline_css
h = premailer.processed_doc.at('head')
assert_nil h['style']
t = premailer.processed_doc.at('title')
assert_nil t['style']
end
end
def test_multiple_identical_ids
html = <<-END_HTML
<html>
<head>
<style type="text/css"> #the_id { color: red; } </style>
</head>
<body>
<p id="the_id">Test</p>
<p id="the_id">Test</p>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
premailer.processed_doc.search('p').each do |el|
assert_match /red/i, el['style']
end
end
def test_preserving_styles
html = <<END_HTML
<html>
<head>
<link rel="stylesheet" href="#"/>
<style type="text/css"> a:hover { color: red; } </style>
</head>
<body>
<p><a>Test</a></p>
</body>
</html>
END_HTML
[:nokogiri, :hpricot].each do |adapter|
premailer = Premailer.new(html, :with_html_string => true, :preserve_styles => true, :adapter => adapter)
premailer.to_inline_css
assert_equal 1, premailer.processed_doc.search('head link').length
assert_equal 1, premailer.processed_doc.search('head style').length
premailer = Premailer.new(html, :with_html_string => true, :preserve_styles => false, :adapter => adapter)
premailer.to_inline_css
assert_nil premailer.processed_doc.at('body link')
# should be preserved as unmergeable
assert_match /color: red/i, premailer.processed_doc.at('body style').inner_html
assert_match /a:hover/i, premailer.processed_doc.at('style').inner_html
end
end
def test_unmergable_rules
html = <<END_HTML
<html> <head> <style type="text/css"> a { color:blue; } a:hover { color: red; } </style> </head>
<p><a>Test</a></p>
</body> </html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true, :verbose => true)
premailer.to_inline_css
# blue should be inlined
assert_no_match /a\:hover[\s]*\{[\s]*color\:[\s]*blue[\s]*;[\s]*\}/i, premailer.processed_doc.at('body style').inner_html
# red should remain in <style> block
assert_match /a\:hover[\s]*\{[\s]*color\:[\s]*red;[\s]*\}/i, premailer.processed_doc.at('body style').inner_html
end
def test_unmergable_media_queries
html = <<END_HTML
<html> <head>
<style type="text/css">
a { color: blue; }
@media (min-width:500px) {
a { color: red; }
}
@media screen and (orientation: portrait) {
a { color: green; }
}
</style>
</head>
<body>
<p><a>Test</a></p>
</body> </html>
END_HTML
[:nokogiri, :hpricot].each do |adapter|
puts "------- Testing adapter #{adapter}"
premailer = Premailer.new(html, :with_html_string => true, :adapter => adapter)
puts premailer.to_inline_css
style_tag = premailer.processed_doc.at('body style')
assert style_tag, "#{adapter} failed to add a body style tag"
style_tag_contents = style_tag.inner_html
assert_equal "color: blue", premailer.processed_doc.at('a').attributes['style'].to_s,
"#{adapter}: Failed to inline the default style"
assert_match /@media \(min-width:500px\) \{.*?a \{.*?color: red;.*?\}.*?\}/m, style_tag_contents,
"#{adapter}: Failed to add media query with no type to style"
assert_match /@media screen and \(orientation: portrait\) \{.*?a \{.*?color: green;.*?\}.*?\}/m, style_tag_contents,
"#{adapter}: Failed to add media query with type to style"
end
end
def test_unmergable_rules_with_no_body
html = <<END_HTML
<html>
<style type="text/css"> a:hover { color: red; } </style>
<p><a>Test</a></p>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
assert_nothing_raised do
premailer.to_inline_css
end
assert_match /a\:hover[\s]*\{[\s]*color\:[\s]*red;[\s]*\}/i, premailer.processed_doc.at('style').inner_html
end
# in response to https://github.com/alexdunae/premailer/issues#issue/7
def test_ignoring_link_pseudo_selectors
html = <<END_HTML
<html>
<style type="text/css"> td a:link.top_links { color: red; } </style>
<body>
<td><a class="top_links">Test</a></td>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
assert_nothing_raised do
premailer.to_inline_css
end
assert_match /color: red/, premailer.processed_doc.at('a').attributes['style'].to_s
end
# in response to https://github.com/alexdunae/premailer/issues#issue/7
#
# fails sometimes in JRuby, see https://github.com/alexdunae/premailer/issues/79
def test_parsing_bad_markup_around_tables
html = <<END_HTML
<html>
<style type="text/css">
.style3 { font-size: xx-large; }
.style5 { background-color: #000080; }
</style>
<tr>
<td valign="top" class="style3">
<!-- MSCellType="ContentHead" -->
<strong>PROMOCION CURSOS PRESENCIALES</strong></td>
<strong>
<td valign="top" style="height: 125px" class="style5">
<!-- MSCellType="DecArea" -->
<img alt="" src="../../images/CertisegGold.GIF" width="608" height="87" /></td>
</tr>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
assert_match /font-size: xx-large/, premailer.processed_doc.search('.style3').first.attributes['style'].to_s
assert_match /background: #000080/, premailer.processed_doc.search('.style5').first.attributes['style'].to_s
end
# in response to https://github.com/alexdunae/premailer/issues/56
def test_inline_important
html = <<END_HTML
<html>
<style type="text/css">
p { color: red !important; }
</style>
<body>
<p style='color: green !important;'>test</p></div>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true, :adapter => :nokogiri)
premailer.to_inline_css
assert_equal 'color: green !important', premailer.processed_doc.search('p').first.attributes['style'].to_s
end
# in response to https://github.com/alexdunae/premailer/issues/28
def test_handling_shorthand_auto_properties
html = <<END_HTML
<html>
<style type="text/css">
#page { margin: 0; margin-left: auto; margin-right: auto; }
p { border: 1px solid black; border-right: none; }
</style>
<body>
<div id='page'><p>test</p></div>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
assert_match /margin: 0 auto/, premailer.processed_doc.search('#page').first.attributes['style'].to_s
assert_match /border-style: solid none solid solid;/, premailer.processed_doc.search('p').first.attributes['style'].to_s
end
def test_sorting_style_attributes
html = <<END_HTML
<html>
<style type="text/css">
#page { right: 10px; left: 5px }
</style>
<body>
<div id='page'>test</div>
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true)
premailer.to_inline_css
assert_equal "left: 5px; right: 10px", premailer.processed_doc.search('#page').first.attributes['style'].to_s
end
def test_removing_scripts
html = <<END_HTML
<html>
<head>
<script>script to be removed</script>
</head>
<body>
content
</body>
</html>
END_HTML
[:nokogiri, :hpricot].each do |adapter|
premailer = Premailer.new(html, :with_html_string => true, :remove_scripts => true, :adapter => adapter)
premailer.to_inline_css
assert_equal 0, premailer.processed_doc.search('script').length
end
[:nokogiri, :hpricot].each do |adapter|
premailer = Premailer.new(html, :with_html_string => true, :remove_scripts => false, :adapter => adapter)
premailer.to_inline_css
assert_equal 1, premailer.processed_doc.search('script').length
end
end
def test_strip_important_from_attributes
html = <<END_HTML
<html>
<head>
<style>td { background-color: #FF0000 !important; }</style>
</head>
<body>
<table><tr><td>red</td></tr></table>
</body>
</html>
END_HTML
[:nokogiri, :hpricot].each do |adapter|
premailer = Premailer.new(html, :with_html_string => true, :adapter => adapter)
assert_match 'bgcolor="#FF0000"', premailer.to_inline_css
end
end
def test_scripts_with_nokogiri
html = <<END_HTML
<html>
<body>
<script type="application/ld+json">
{
"@context": "http://schema.org",
"@type": "Person",
"name": "John Doe",
"jobTitle": "Graduate research assistant",
"affiliation": "University of Dreams",
"additionalName": "Johnny",
"url": "http://www.example.com",
"address": {
"@type": "PostalAddress",
"streetAddress": "1234 Peach Drive",
"addressLocality": "Wonderland",
"addressRegion": "Georgia"
}
}
</script
</body>
</html>
END_HTML
premailer = Premailer.new(html, :with_html_string => true, :remove_scripts => false, :adapter => :nokogiri)
premailer.to_inline_css
assert !premailer.processed_doc.css('script[type="application/ld+json"]').first.children.first.cdata?
end
end
| 29.895722 | 125 | 0.63912 |
395e92324fb412b323458b85ed918ce74599f547 | 3,046 | FROM ruby:2.5.1
RUN apt-get update && apt-get install -y \
wget \
python-pip
RUN pip install --upgrade pip anchorecli
# java required for updatebot
RUN apt-get update && apt-get install -y openjdk-8-jre
# chrome
RUN apt-get install -y libappindicator1 fonts-liberation libasound2 libnspr4 libnss3 libxss1 lsb-release xdg-utils libappindicator3-1 && \
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
dpkg -i google-chrome*.deb && \
rm google-chrome*.deb
# USER jenkins
WORKDIR /home/jenkins
# Docker
ENV DOCKER_VERSION 17.12.0
RUN curl https://download.docker.com/linux/static/stable/x86_64/docker-$DOCKER_VERSION-ce.tgz | tar xvz && \
mv docker/docker /usr/bin/ && \
rm -rf docker
# helm
ENV HELM_VERSION 2.8.2
RUN curl https://storage.googleapis.com/kubernetes-helm/helm-v${HELM_VERSION}-linux-amd64.tar.gz | tar xzv && \
mv linux-amd64/helm /usr/bin/ && \
rm -rf linux-amd64
# gcloud
ENV GCLOUD_VERSION 187.0.0
RUN curl -L https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz | tar xzv && \
mv google-cloud-sdk /usr/bin/
ENV PATH=$PATH:/usr/bin/google-cloud-sdk/bin
# jx-release-version
ENV JX_RELEASE_VERSION 1.0.9
RUN curl -o ./jx-release-version -L https://github.com/jenkins-x/jx-release-version/releases/download/v${JX_RELEASE_VERSION}/jx-release-version-linux && \
mv jx-release-version /usr/bin/ && \
chmod +x /usr/bin/jx-release-version
# exposecontroller
ENV EXPOSECONTROLLER_VERSION 2.3.34
RUN curl -L https://github.com/fabric8io/exposecontroller/releases/download/v$EXPOSECONTROLLER_VERSION/exposecontroller-linux-amd64 > exposecontroller && \
chmod +x exposecontroller && \
mv exposecontroller /usr/bin/
# skaffold
ENV SKAFFOLD_VERSION 0.4.0
# RUN curl -Lo skaffold https://github.com/GoogleCloudPlatform/skaffold/releases/download/v${SKAFFOLD_VERSION}/skaffold-linux-amd64 && \
# TODO use temp fix distro
RUN curl -Lo skaffold https://github.com/jstrachan/skaffold/releases/download/v0.5.0.1-jx2/skaffold-linux-amd64 && \
chmod +x skaffold && \
mv skaffold /usr/bin
# updatebot
ENV UPDATEBOT_VERSION 1.1.11
RUN curl -o ./updatebot -L https://oss.sonatype.org/content/groups/public/io/jenkins/updatebot/updatebot/${UPDATEBOT_VERSION}/updatebot-${UPDATEBOT_VERSION}.jar && \
chmod +x updatebot && \
cp updatebot /usr/bin/ && \
rm -rf updatebot
# draft
RUN curl https://azuredraft.blob.core.windows.net/draft/draft-canary-linux-amd64.tar.gz | tar xzv && \
mv linux-amd64/draft /usr/bin/ && \
rm -rf linux-amd64
# kubectl
RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && \
chmod +x kubectl && \
mv kubectl /usr/bin/
# jx
ENV JX_VERSION 1.2.142
RUN curl -L https://github.com/jenkins-x/jx/releases/download/v${JX_VERSION}/jx-linux-amd64.tar.gz | tar xzv && \
mv jx /usr/bin/
ENV PATH ${PATH}:/opt/google/chrome
CMD ["helm","version"]
| 35.011494 | 179 | 0.736376 |
5dcb4a1dd528bcbe6135de5d6a2741ec5f0d2366 | 1,016 | Pod::Spec.new do |s|
s.name = 'AlipaySDK_No_UTDID-Mirror'
s.version = '15.8.00'
s.summary = 'A Mirror For AlipaySDK_No_UTDID'
s.homepage = 'https://github.com/Dwarven/AlipaySDK_No_UTDID-Mirror'
s.ios.deployment_target = '7.0'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Dwarven' => 'prison.yang@gmail.com' }
s.social_media_url = 'https://twitter.com/DwarvenYang'
s.source = { :git => 'https://github.com/Dwarven/AlipaySDK_No_UTDID-Mirror.git', :tag => s.version }
s.frameworks = 'SystemConfiguration', 'CoreTelephony', 'QuartzCore', 'CoreText', 'CoreGraphics', 'UIKit', 'Foundation', 'CFNetwork', 'CoreMotion', 'WebKit'
s.libraries = 'z', 'c++'
s.resource = 'AlipaySDK_No_UTDID/AlipaySDK.bundle'
s.vendored_frameworks = 'AlipaySDK_No_UTDID/AlipaySDK.framework'
s.requires_arc = true
s.dependency 'UTDID'
end | 53.473684 | 168 | 0.587598 |
d59d9a3f920e7a8457296441376195fadda074cc | 140 | class FixUserIndexes < ActiveRecord::Migration[6.0]
def change
remove_index :users, name: 'index_users_on_uid_and_provider'
end
end
| 23.333333 | 64 | 0.778571 |
617b3bf977303bdeca218d1e97900034fc28aa75 | 847 | # frozen_string_literal: true
module SpecSupport
module Helpers
def build_cache_store
case
when Testing.test_redis_cache?
Cache::Redis.build
when Testing.test_redis_store_cache?
Cache::RedisStore.build
when Testing.test_dalli_cache?
Cache::Dalli.build
when Testing.test_as_memory_store_cache?
Cache::ActiveSupportMemoryStore.build
when Testing.test_as_file_store_cache?
Cache::ActiveSupportFileStore.build
when Testing.test_as_redis_cache_store_cache?
Cache::ActiveSupportRedisCacheStore.build
when Testing.test_as_mem_cache_store_cache?
Cache::ActiveSupportMemCacheStore.build
when Testing.test_as_dalli_store_cache?
Cache::ActiveSupportDalliStore.build
else
raise 'No cache :('
end
end
end
end
| 29.206897 | 51 | 0.716647 |
389b96d3c488740aa7860230b83c6ff677000837 | 338 | # frozen_string_literal: true
namespace :db do
namespace :mongoid do
desc "Drop all collections in all databases, including indexes."
task purge_all: :environment do
Mongoid::Clients.default.database.collections.each(&:drop)
Mongoid::Clients.with_name("users").database.collections.each(&:drop)
end
end
end
| 24.142857 | 75 | 0.727811 |
ffc23d882b695a8ac801feeda47513b5a5f2bde1 | 9,035 | require 'spec_helper'
module Bosh::Director::DeploymentPlan
describe NetworkPlanner::ReservationReconciler do
include Bosh::Director::IpUtil
describe :reconcile do
let(:network_planner) { NetworkPlanner::ReservationReconciler.new(instance_plan, logger) }
let(:instance_model) { Bosh::Director::Models::Instance.make }
let(:network) { ManualNetwork.new('my-network', subnets, logger) }
let(:subnets) do
[
ManualNetworkSubnet.new(
'my-network',
NetAddr::CIDR.create('192.168.1.0/24'),
nil, nil, nil, nil, ['zone_1'], [],
['192.168.1.10']),
ManualNetworkSubnet.new(
'my-network',
NetAddr::CIDR.create('192.168.2.0/24'),
nil, nil, nil, nil, ['zone_2'], [],
['192.168.2.10']),
]
end
let(:instance_plan) do
network_plans = desired_reservations.map { |r| NetworkPlanner::Plan.new(reservation: r) }
InstancePlan.new(
desired_instance: DesiredInstance.new(nil, nil, desired_az),
network_plans: network_plans,
existing_instance: nil,
instance: nil
)
end
let(:desired_az) { AvailabilityZone.new('zone_1', {}) }
let(:existing_reservations) {
[
BD::ExistingNetworkReservation.new(instance_model, network, '192.168.1.2', 'manual'),
BD::ExistingNetworkReservation.new(instance_model, network, '192.168.1.3', 'manual')
]
}
before { existing_reservations.each { |reservation| reservation.mark_reserved } }
context 'when desired reservations are the same as existing ones' do
let(:dynamic_network_reservation) { BD::DesiredNetworkReservation.new_dynamic(instance_model, network) }
let(:static_network_reservation) { BD::DesiredNetworkReservation.new_static(instance_model, network, '192.168.1.2') }
let(:desired_reservations) {
[
static_network_reservation,
dynamic_network_reservation
]
}
before do
existing_reservations[0].resolve_type(:static)
existing_reservations[1].resolve_type(:dynamic)
end
it 'should keep existing reservation and return no desired new or obsolete network plans' do
network_plans = network_planner.reconcile(existing_reservations)
obsolete_plans = network_plans.select(&:obsolete?)
existing_plans = network_plans.select(&:existing?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(desired_plans.count).to eq(0)
expect(existing_plans.count).to eq(2)
expect(obsolete_plans.count).to eq(0)
end
context 'when the order of IPs changed' do
let(:static_network_reservation1) { BD::DesiredNetworkReservation.new_static(instance_model, network, '192.168.1.3') }
let(:static_network_reservation2) { BD::DesiredNetworkReservation.new_static(instance_model, network, '192.168.1.4') }
let(:desired_reservations) {
[
static_network_reservation2,
static_network_reservation1
]
}
before do
existing_reservations[0].resolve_type(:static)
existing_reservations[1].resolve_type(:static)
end
it 'should keep existing reservation that match IP address' do
network_plans = network_planner.reconcile(existing_reservations)
obsolete_plans = network_plans.select(&:obsolete?)
existing_plans = network_plans.select(&:existing?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(obsolete_plans.count).to eq(1)
expect(ip_to_netaddr(obsolete_plans.first.reservation.ip)).to eq('192.168.1.2')
expect(existing_plans.count).to eq(1)
expect(ip_to_netaddr(existing_plans.first.reservation.ip)).to eq('192.168.1.3')
expect(desired_plans.count).to eq(1)
expect(ip_to_netaddr(desired_plans.first.reservation.ip)).to eq('192.168.1.4')
end
end
end
context 'when existing reservation availability zones do not match job availability zones' do
let(:desired_az) { AvailabilityZone.new('zone_2', {}) }
let(:existing_reservations) { [BD::ExistingNetworkReservation.new(instance_model, network, '192.168.1.2', 'manual')] }
before { existing_reservations[0].resolve_type(:dynamic) }
let(:desired_reservations) { [BD::DesiredNetworkReservation.new_dynamic(instance_model, network)] }
it 'not reusing existing reservations' do
network_plans = network_planner.reconcile(existing_reservations)
obsolete_plans = network_plans.select(&:obsolete?)
existing_plans = network_plans.select(&:existing?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(obsolete_plans.count).to eq(1)
expect(ip_to_netaddr(obsolete_plans.first.reservation.ip)).to eq('192.168.1.2')
expect(existing_plans.count).to eq(0)
expect(desired_plans.count).to eq(1)
expect(desired_plans.first.reservation.type).to eq(:dynamic)
end
context 'when desired instance does not yet have an availability zone' do
let(:desired_az) { nil }
it 'does not raise an error' do
allow(logger).to receive(:debug)
expect(logger).to receive(:debug).with(/Can't reuse reservation .*, existing reservation az does not match desired az ''/)
network_planner.reconcile(existing_reservations)
end
end
end
context 'when existing reservation and job do not belong to any availability zone' do
let(:desired_az) { nil }
let(:existing_reservations) { [BD::ExistingNetworkReservation.new(instance_model, network, '192.168.1.2', 'manual')] }
before { existing_reservations[0].resolve_type(:dynamic) }
let(:desired_reservations) { [BD::DesiredNetworkReservation.new_dynamic(instance_model, network)] }
let(:subnets) do
[
ManualNetworkSubnet.new(
'my-network',
NetAddr::CIDR.create('192.168.1.0/24'),
nil, nil, nil, nil, nil, [],
['192.168.1.10'])
]
end
it 'reusing existing reservations' do
network_plans = network_planner.reconcile(existing_reservations)
obsolete_plans = network_plans.select(&:obsolete?)
existing_plans = network_plans.select(&:existing?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(obsolete_plans.count).to eq(0)
expect(existing_plans.count).to eq(1)
expect(ip_to_netaddr(existing_plans.first.reservation.ip)).to eq('192.168.1.2')
expect(desired_plans.count).to eq(0)
end
end
context 'when there are new reservations' do
let(:dynamic_network_reservation) { BD::DesiredNetworkReservation.new_dynamic(instance_model, network) }
let(:desired_reservations) {
[
BD::DesiredNetworkReservation.new_static(instance_model, network, '192.168.1.2'),
BD::DesiredNetworkReservation.new_static(instance_model, network, '192.168.1.4'),
dynamic_network_reservation
]
}
before do
existing_reservations[0].resolve_type(:static)
existing_reservations[1].resolve_type(:dynamic)
end
it 'should return desired network plans for the new reservations' do
network_plans = network_planner.reconcile(existing_reservations)
obsolete_plans = network_plans.select(&:obsolete?)
existing_plans = network_plans.select(&:existing?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(desired_plans.count).to eq(1)
expect(existing_plans.count).to eq(2)
expect(obsolete_plans.count).to eq(0)
end
end
context 'when there is no desired reservations' do
let(:dynamic_network_reservation) { BD::DesiredNetworkReservation.new_dynamic(instance_model, network) }
let(:desired_reservations) { [] }
before do
existing_reservations[0].resolve_type(:static)
existing_reservations[1].resolve_type(:dynamic)
end
it 'should return desired network plans for the new reservations' do
network_plans = network_planner.reconcile(existing_reservations)
existing_plans = network_plans.select(&:existing?)
obsolete_plans = network_plans.select(&:obsolete?)
desired_plans = network_plans.reject(&:existing?).reject(&:obsolete?)
expect(existing_plans.count).to eq(0)
expect(desired_plans.count).to eq(0)
expect(obsolete_plans.count).to eq(2)
end
end
end
end
end
| 43.4375 | 134 | 0.645047 |
d5b6c61c57d4b8ed4abc2955a863c2e1da2952ea | 1,791 | require 'test_helper'
class CategoriesControllerTest < ActionDispatch::IntegrationTest
test "should get category index" do
sign_in_as :moderator
get categories_path
assert_response :success
end
test "should create category" do
sign_in_as :moderator
get new_category_path
assert_response :success
assert_difference('Category.count', 1) do
post categories_path, params: {
category: {
name: "New category"
}
}
end
assert_response :redirect
follow_redirect!
assert_response :success
end
test "should update category" do
sign_in_as :moderator
category = categories(:one)
assert_no_difference('Category.count') do
put category_path(category.id), params: {
category: {
name: "Updated name"
}
}
end
assert_response :redirect
follow_redirect!
assert_response :success
end
test "should remove category" do
sign_in_as :moderator
cat = categories(:three)
assert_difference 'Category.count', -1 do
delete category_path cat
end
assert_response :redirect
follow_redirect!
assert_response :success
end
test "residents can't create categories" do
sign_in_as :resident
get new_category_path
assert_response :redirect, "A success would mean resident create category"
end
test "residents can't edit categories" do
sign_in_as :resident
cat = categories(:one)
get edit_category_path(cat)
assert_response :redirect, "A success would mean resident could edit categories"
end
test "artist can't create categories" do
sign_in_as :artist
get new_category_path
assert_response :redirect, "A success means artist can see category form"
end
end
| 24.202703 | 84 | 0.691792 |
f7e5a09d13137add813de76e76d7f98d31f180de | 6,319 | require "test_helper"
class Feedkit::RequestTest < Minitest::Test
def test_should_be_html
url = "http://www.example.com/atom.xml"
body = "<a>hello</a><meta />"
stub_request(:get, url).to_return(body: body)
response = ::Feedkit::Request.download(url)
document = response.parse(validate: false)
assert_instance_of Feedkit::Parser::HTMLDocument, document
end
def test_persistence
url = "http://www.example.com/"
file = "index.html"
stub_request_file(file, url)
response = ::Feedkit::Request.download(url)
path_before = response.path
response.persist!
assert path_before != response.path, "path should have changed"
assert File.file?(response.path), "file should exist"
File.unlink(response.path)
end
def test_get_body
url = "http://www.example.com/"
file = "index.html"
stub_request_file(file, url)
response = ::Feedkit::Request.download(url)
assert_equal load_body(file), response.body
end
def test_should_raise_invalid_url
assert_raises Feedkit::InvalidUrl do
::Feedkit::Request.download("")
end
end
def test_should_raise_too_large
url = "http://www.example.com/"
# build an 11MB string
body = "12345678910" * (1024 * 1024)
stub_request(:get, url).to_return(body: body)
assert_raises Feedkit::TooLarge do
::Feedkit::Request.download(url)
end
end
def test_should_raise_not_feed
url = "http://www.example.com/"
file = "index.html"
stub_request_file(file, url)
response = ::Feedkit::Request.download(url)
assert_raises Feedkit::NotFeed do
response.parse
end
end
def test_should_raise_unauthorized
url = "http://www.example.com/"
response = {
status: 401,
headers: {
"WWW-Authenticate" => ' Basic realm="Application"'
}
}
stub_request(:get, url).to_return(response)
exception = assert_raises Feedkit::Unauthorized do |e|
::Feedkit::Request.download(url)
end
assert exception.basic_auth?, "basic_auth? should be true"
end
def test_should_raise_too_many_redirects
first_url = "http://www.example.com"
urls = {
first_url => "#{first_url}/one",
"#{first_url}/one" => "#{first_url}/two",
"#{first_url}/two" => "#{first_url}/three",
"#{first_url}/three" => "#{first_url}/four",
"#{first_url}/four" => "#{first_url}/five"
}
urls.each do |url, location|
response = {
status: 301,
headers: {
"Location" => location
}
}
stub_request(:get, url).to_return(response)
end
assert_raises Feedkit::TooManyRedirects do
::Feedkit::Request.download(first_url)
end
end
def test_should_be_xml
url = "http://www.example.com/atom.xml"
stub_request_file("atom.xml", url)
response = ::Feedkit::Request.download(url)
assert_instance_of Feedkit::Parser::XMLFeed, response.parse
end
def test_should_be_json_feed
url = "http://www.example.com/feed.json"
stub_request_file("feed.json", url, {headers: {"Content-Type" => "application/json"}})
response = ::Feedkit::Request.download(url)
assert_instance_of Feedkit::Parser::JSONFeed, response.parse
end
def test_should_follow_redirects
first_url = "http://www.example.com"
last_url = "#{first_url}/final"
response = {
status: 301,
headers: {
"Location" => last_url
}
}
stub_request(:get, first_url).to_return(response)
stub_request(:get, last_url)
on_redirect = proc do |_, to|
@location = to.uri.to_s
end
response = ::Feedkit::Request.download(first_url, on_redirect: on_redirect)
assert_equal last_url, @location
end
def test_should_get_caching_headers
url = "http://www.example.com/atom.xml"
last_modified = Time.now.httpdate
etag = random_string
response = {
headers: {
"Last-Modified" => last_modified,
"Etag" => etag
}
}
stub_request(:get, url).to_return(response)
response = ::Feedkit::Request.download(url)
assert_equal last_modified, response.last_modified
assert_equal etag, response.etag
end
def test_should_not_be_modified_etag
url = "http://www.example.com"
etag = random_string
status = 304
request = {
headers: {"If-None-Match" => etag}
}
stub_request(:get, url).with(request).to_return(status: status)
response = ::Feedkit::Request.download(url, etag: etag)
assert response.not_modified?, "reponse should be not_modified?"
end
def test_should_not_be_modified_last_modified
url = "http://www.example.com"
last_modified = Time.now.httpdate
status = 304
request = {
headers: {"If-Modified-Since" => last_modified}
}
stub_request(:get, url).with(request).to_return(status: status)
response = ::Feedkit::Request.download(url, last_modified: last_modified)
assert response.not_modified?, "reponse should be not_modified?"
end
def test_should_not_be_modified_checksum
url = "http://www.example.com"
stub_request(:get, url)
response = ::Feedkit::Request.download(url)
assert response.not_modified?("da39a3e"), "reponse should be not_modified?"
end
def test_basic_auth
request = {
headers: {"Authorization" => "Basic dXNlcm5hbWU6cGFzc3dvcmQ="}
}
stub_request(:get, "http://www.example.com").with(request)
::Feedkit::Request.download("http://username:password@www.example.com")
end
def test_should_get_checksum
url = "http://www.example.com/"
file = "index.html"
stub_request_file(file, url)
response = ::Feedkit::Request.download(url)
assert_equal "2ff0eb5", response.checksum
end
def test_should_allow_setting_auto_inflate
with_auto_inflate = "http://www.example1.com"
stub_request(:any, with_auto_inflate)
::Feedkit::Request.download(with_auto_inflate)
assert_requested :get, with_auto_inflate, headers: {"Accept-Encoding" => "gzip, deflate"}
without_auto_inflate = "http://www.example2.com"
stub_request(:any, without_auto_inflate)
::Feedkit::Request.download(without_auto_inflate, auto_inflate: false)
assert_requested(:get, without_auto_inflate) { |request| request.headers["Accept-Encoding"] == nil }
end
end
| 27.004274 | 104 | 0.67115 |
3397778c737c349c0d6e6176e66eb688e888d37f | 156 | class CreateRecipes < ActiveRecord::Migration[5.0]
def change
create_table :recipes do |t|
t.string :name
t.timestamps
end
end
end
| 15.6 | 50 | 0.660256 |
398884df10daa2021fff7fae6dc6e7532f97cbf7 | 121 | class AddPlayerIdToWins < ActiveRecord::Migration[6.1]
def change
add_column :wins, :player_id, :integer
end
end
| 20.166667 | 54 | 0.743802 |
bf67e54f6d3639883c30154894737ab09dbe72d0 | 217 | class User < ActiveRecord::Base
has_many :attendances, :class_name => "Attendance", :dependent => :destroy
has_many :attended_events, :through => :attendances
has_many :events, :dependent => :destroy
end
| 24.111111 | 76 | 0.709677 |
e9feed2bb37234b595c08cf31de66d99ebd3621d | 691 | class EntryPolicy
attr_reader :user, :entry
def initialize(user, entry)
@user = user
@entry = entry
end
def index?
true
end
def show?
#scope.where(id: entry.id).exists?
true
end
def create?
@entry.user_id == user.id
end
def new?
create?
end
def update?
@entry.user_id == user.id
end
def edit?
update?
end
def destroy?
@entry.user_id == user.id
end
def scope
Pundit.policy_scope!(user, entry.class)
end
class Scope
attr_reader :user, :scope
def initialize(user, scope)
@user = user
@scope = scope
end
def resolve
scope.where(user_id: @user.id)
end
end
end
| 12.563636 | 43 | 0.597685 |
ac6990b1a22ac28aec13e76d3ebf075465f1a3ad | 4,628 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 3.3.4-SNAPSHOT
=end
require 'spec_helper'
require 'json'
# Unit tests for Petstore::PetApi
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'PetApi' do
before do
# run before each test
@instance = Petstore::PetApi.new
end
after do
# run after each test
end
describe 'test an instance of PetApi' do
it 'should create an instance of PetApi' do
expect(@instance).to be_instance_of(Petstore::PetApi)
end
end
# unit tests for add_pet
# Add a new pet to the store
# @param pet Pet object that needs to be added to the store
# @param [Hash] opts the optional parameters
# @return [nil]
describe 'add_pet test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for delete_pet
# Deletes a pet
# @param pet_id Pet id to delete
# @param [Hash] opts the optional parameters
# @option opts [String] :api_key
# @return [nil]
describe 'delete_pet test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for find_pets_by_status
# Finds Pets by status
# Multiple status values can be provided with comma separated strings
# @param status Status values that need to be considered for filter
# @param [Hash] opts the optional parameters
# @return [Array<Pet>]
describe 'find_pets_by_status test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for find_pets_by_tags
# Finds Pets by tags
# Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.
# @param tags Tags to filter by
# @param [Hash] opts the optional parameters
# @return [Array<Pet>]
describe 'find_pets_by_tags test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for get_pet_by_id
# Find pet by ID
# Returns a single pet
# @param pet_id ID of pet to return
# @param [Hash] opts the optional parameters
# @return [Pet]
describe 'get_pet_by_id test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for update_pet
# Update an existing pet
# @param pet Pet object that needs to be added to the store
# @param [Hash] opts the optional parameters
# @return [nil]
describe 'update_pet test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for update_pet_with_form
# Updates a pet in the store with form data
# @param pet_id ID of pet that needs to be updated
# @param [Hash] opts the optional parameters
# @option opts [String] :name Updated name of the pet
# @option opts [String] :status Updated status of the pet
# @return [nil]
describe 'update_pet_with_form test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for upload_file
# uploads an image
# @param pet_id ID of pet to update
# @param [Hash] opts the optional parameters
# @option opts [String] :additional_metadata Additional data to pass to server
# @option opts [File] :file file to upload
# @return [ApiResponse]
describe 'upload_file test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for upload_file_with_required_file
# uploads an image (required)
# @param pet_id ID of pet to update
# @param required_file file to upload
# @param [Hash] opts the optional parameters
# @option opts [String] :additional_metadata Additional data to pass to server
# @return [ApiResponse]
describe 'upload_file_with_required_file test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 31.917241 | 157 | 0.714347 |