query
stringlengths 7
9.5k
| document
stringlengths 10
1.07M
| negatives
sequencelengths 19
19
| metadata
dict |
---|---|---|---|
GET /efemerides GET /efemerides.json | def index
@efemerides = Efemeride.all
end | [
"def index\n @efimerides = Efimeride.all\n end",
"def show\n @evemt = Evemt.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @evemt }\n end\n end",
"def index\n @electors = Elector.all\n\n render json: @electors\n end",
"def show\n @etf = Etf.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @etf }\n end\n end",
"def list_ideas\n json_out(Idea.all)\n end",
"def index\n @user = User.find(params[:user_id])\n @ideas = @user.ideas\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ideas }\n end\n end",
"def show\n @emergencia = Emergencia.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @emergencia }\n end\n end",
"def index\n @efectividads = Efectividad.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @efectividads }\n end\n end",
"def show\n @idiom = Idiom.find(params[:id])\n @essays = Essay.where(idiom_id: params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @idiom }\n end\n end",
"def show\n @ed = Ed.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ed }\n end\n end",
"def index\n @eicons = Eicon.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @eicons }\n end\n end",
"def index\n @ef_pares = EfPare.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ef_pares }\n end\n end",
"def index\n @epidemics = Epidemic.all\n end",
"def show\n @etape = Etape.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @etape }\n end\n end",
"def index\n @ephems = Ephem.page(params[:page])\n end",
"def index\n @ejes = Eje.all\n end",
"def show\n render json: @elector\n end",
"def show\n @etsy = Etsy.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @etsy }\n end\n end",
"def index\n @eegs = Eeg.all\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /efemerides POST /efemerides.json | def create
@efemeride = Efemeride.new(efemeride_params)
respond_to do |format|
if @efemeride.save
format.html { redirect_to @efemeride, notice: 'Efemeride ha sido creada.' }
format.json { render :show, status: :created, location: @efemeride }
else
format.html { render :new }
format.json { render json: @efemeride.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @efemeride = Efemeride.new(efemeride_params)\n\n respond_to do |format|\n if @efemeride.save\n format.html { redirect_to @efemeride, notice: \"Efemeride was successfully created.\" }\n format.json { render :show, status: :created, location: @efemeride }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @efemeride.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @edrife = Edrive.new(edrife_params)\n\n respond_to do |format|\n if @edrife.save\n format.html { redirect_to @edrife, notice: 'Edrive was successfully created.' }\n format.json { render :show, status: :created, location: @edrife }\n else\n format.html { render :new }\n format.json { render json: @edrife.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ephem = Ephem.new(ephem_params)\n\n respond_to do |format|\n if @ephem.save\n format.html { redirect_to @ephem, notice: 'Ephem was successfully created.' }\n format.json { render action: 'show', status: :created, location: @ephem }\n else\n format.html { render action: 'new' }\n format.json { render json: @ephem.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @efimeride = Efimeride.new(efimeride_params)\n\n respond_to do |format|\n if @efimeride.save\n format.html { redirect_to @efimeride, notice: 'Efimeride was successfully created.' }\n format.json { render :show, status: :created, location: @efimeride }\n else\n format.html { render :new }\n format.json { render json: @efimeride.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @episodio = Episodio.new(episodio_params)\n\n respond_to do |format|\n if @episodio.save\n format.html { redirect_to @episodio, notice: 'Episodio was successfully created.' }\n format.json { render :show, status: :created, location: @episodio }\n else\n format.html { render :new }\n format.json { render json: @episodio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @eeg = Eeg.new(eeg_params)\n\n respond_to do |format|\n if @eeg.save\n format.html { redirect_to @eeg, notice: 'Eeg was successfully created.' }\n format.json { render :show, status: :created, location: @eeg }\n else\n format.html { render :new }\n format.json { render json: @eeg.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @eje = Eje.new(eje_params)\n\n respond_to do |format|\n if @eje.save\n format.html { redirect_to @eje, notice: 'Eje was successfully created.' }\n format.json { render :show, status: :created, location: @eje }\n else\n format.html { render :new }\n format.json { render json: @eje.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @estanteev = Estanteev.new(estanteev_params)\n\n respond_to do |format|\n if @estanteev.save\n format.html { redirect_to @estanteev, notice: 'Estanteev was successfully created.' }\n format.json { render :show, status: :created, location: @estanteev }\n else\n format.html { render :new }\n format.json { render json: @estanteev.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @epidemic = Epidemic.new(epidemic_params)\n\n respond_to do |format|\n if @epidemic.save\n format.html { redirect_to @epidemic, notice: 'Epidemic was successfully created.' }\n format.json { render :show, status: :created, location: @epidemic }\n else\n format.html { render :new }\n format.json { render json: @epidemic.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @evemt = Evemt.new(params[:evemt])\n\n respond_to do |format|\n if @evemt.save\n format.html { redirect_to @evemt, notice: 'Evemt was successfully created.' }\n format.json { render json: @evemt, status: :created, location: @evemt }\n else\n format.html { render action: \"new\" }\n format.json { render json: @evemt.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @electee = Electee.new(electee_params)\n\n respond_to do |format|\n if @electee.save\n format.html { redirect_to @electee, notice: 'Electee was successfully created.' }\n format.json { render :show, status: :created, location: @electee }\n else\n format.html { render :new }\n format.json { render json: @electee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @deed = Deed.create(deed_params)\n\n if @deed.save\n render json: @deed, status: :created, location: @deed\n else\n render json: @deed.errors, status: :unprocessable_entity\n end\n end",
"def create\n @donee = Donee.new(donee_params)\n\n respond_to do |format|\n if @donee.save\n format.html { redirect_to @donee, notice: 'Donee was successfully created.' }\n format.json { render :show, status: :created, location: @donee }\n else\n format.html { render :new }\n format.json { render json: @donee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @donee = Donee.new(params[:donee])\n\n respond_to do |format|\n if @donee.save\n format.html { redirect_to @donee, notice: 'Donee was successfully created.' }\n format.json { render json: @donee, status: :created, location: @donee }\n else\n format.html { render action: \"new\" }\n format.json { render json: @donee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @etape = Etape.new(params[:etape])\n\n respond_to do |format|\n if @etape.save\n format.html { redirect_to @etape, notice: 'Etape was successfully created.' }\n format.json { render json: @etape, status: :created, location: @etape }\n else\n format.html { render action: \"new\" }\n format.json { render json: @etape.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @eess = Eess.new(eess_params)\n\n respond_to do |format|\n if @eess.save\n format.html { redirect_to @eess, notice: 'Eess was successfully created.' }\n format.json { render :show, status: :created, location: @eess }\n else\n format.html { render :new }\n format.json { render json: @eess.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @enade = Enade.new(enade_params)\n\n respond_to do |format|\n if @enade.save\n format.html { redirect_to @enade, notice: 'Enade was successfully created.' }\n format.json { render :show, status: :created, location: @enade }\n else\n format.html { render :new }\n format.json { render json: @enade.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ezii_seed = EziiSeed.new(ezii_seed_params)\n\n respond_to do |format|\n if @ezii_seed.save\n format.html { redirect_to @ezii_seed, notice: 'Ezii seed was successfully created.' }\n format.json { render :show, status: :created, location: @ezii_seed }\n else\n format.html { render :new }\n format.json { render json: @ezii_seed.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ephemeri = Ephemeri.new(ephemeri_params)\n\n respond_to do |format|\n if @ephemeri.save\n format.html { redirect_to @ephemeri, notice: \"Ephemeri was successfully created.\" }\n format.json { render :show, status: :created, location: @ephemeri }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @ephemeri.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /efemerides/1 PATCH/PUT /efemerides/1.json | def update
respond_to do |format|
if @efemeride.update(efemeride_params)
format.html { redirect_to @efemeride, notice: 'Efemeride ha sido actualizada.' }
format.json { render :show, status: :ok, location: @efemeride }
else
format.html { render :edit }
format.json { render json: @efemeride.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n respond_to do |format|\n if @efemeride.update(efemeride_params)\n format.html { redirect_to @efemeride, notice: \"Efemeride was successfully updated.\" }\n format.json { render :show, status: :ok, location: @efemeride }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @efemeride.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @ephem.update(ephem_params)\n format.html { redirect_to @ephem, notice: 'Ephem was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @ephem.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @especy = Especie.find(params[:id])\n\n respond_to do |format|\n if @especy.update_attributes(params[:especy])\n format.html { redirect_to @especy, notice: 'Especie was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @especy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fife.update(fife_params)\n format.html { redirect_to @fife, notice: 'Five was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @fife.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @eveent.update(eveent_params)\n format.html { redirect_to @eveent, notice: 'Eveent was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @eveent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @donee = Donee.find(params[:id])\n\n respond_to do |format|\n if @donee.update_attributes(params[:donee])\n format.html { redirect_to @donee, notice: 'Donee was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @donee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @efimeride.update(efimeride_params)\n format.html { redirect_to @efimeride, notice: 'Efimeride was successfully updated.' }\n format.json { render :show, status: :ok, location: @efimeride }\n else\n format.html { render :edit }\n format.json { render json: @efimeride.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @especy.update(especy_params)\n format.html { redirect_to @especy, notice: 'Especie was successfully updated.' }\n format.json { render :show, status: :ok, location: @especy }\n else\n format.html { render :edit }\n format.json { render json: @especy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @eje.update(eje_params)\n format.html { redirect_to @eje, notice: 'Eje was successfully updated.' }\n format.json { render :show, status: :ok, location: @eje }\n else\n format.html { render :edit }\n format.json { render json: @eje.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @estudiante = Estudiante.find(params[:id])\n\n if @estudiante.update(params[:estudiante])\n head :no_content\n else\n render json: @estudiante.errors, status: :unprocessable_entity\n end\n end",
"def update\n @etape = Etape.find(params[:id])\n\n respond_to do |format|\n if @etape.update_attributes(params[:etape])\n format.html { redirect_to @etape, notice: 'Etape was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @etape.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @oferta = Oferta.find(params[:id])\n\n respond_to do |format|\n if @oferta.update_attributes(params[:oferta])\n format.html { redirect_to [:admin, @oferta], :notice => 'Exemplo was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @oferta.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @episodio.update(episodio_params)\n format.html { redirect_to @episodio, notice: 'Episodio was successfully updated.' }\n format.json { render :show, status: :ok, location: @episodio }\n else\n format.html { render :edit }\n format.json { render json: @episodio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @referee = Referee.find(params[:id])\n\n respond_to do |format|\n if @referee.update_attributes(params[:referee])\n format.html { redirect_to @referee, notice: 'Referee was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @referee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @egreso = Egreso.find(params[:id])\n\n respond_to do |format|\n if @egreso.update_attributes(params[:egreso])\n format.html { redirect_to @egreso, notice: 'Egreso was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @egreso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @set_aside.update(set_aside_params)\n format.html { redirect_to @set_aside, notice: 'Set aside was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @set_aside.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @emily.update(emily_params)\n format.html { redirect_to @emily, notice: 'Emily was successfully updated.' }\n format.json { render :show, status: :ok, location: @emily }\n else\n format.html { render :edit }\n format.json { render json: @emily.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @epcformulary.update(epcformulary_params)\n format.html { redirect_to @epcformulary, notice: 'Epcformulary was successfully updated.' }\n format.json { render :show, status: :ok, location: @epcformulary }\n else\n format.html { render :edit }\n format.json { render json: @epcformulary.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @espec = Espec.find(params[:id])\n\n respond_to do |format|\n if @espec.update_attributes(params[:espec])\n format.html { redirect_to especs_path, notice: 'Espec was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @espec.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /efemerides/1 DELETE /efemerides/1.json | def destroy
@efemeride.destroy
respond_to do |format|
format.html { redirect_to efemerides_url, notice: 'Efemeride ha sido eliminada.' }
format.json { head :no_content }
end
end | [
"def destroy\n @efemeride.destroy\n respond_to do |format|\n format.html { redirect_to efemerides_url, notice: \"Efemeride was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ephem.destroy\n respond_to do |format|\n format.html { redirect_to ephems_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @economia1 = Economia1.find(params[:id])\n @economia1.destroy\n\n respond_to do |format|\n format.html { redirect_to economia1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @agente = Agente.find(params[:id])\n @agente.destroy\n\n respond_to do |format|\n format.html { redirect_to agenti_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @ecoetiqueta.destroy\n respond_to do |format|\n format.html { redirect_to ecoetiquetes_url, notice: 'Ecoetiqueta was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @esclerosi = Esclerosi.find(params[:id])\n @esclerosi.destroy\n\n respond_to do |format|\n format.html { redirect_to esclerosis_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @enonce = Enonce.find(params[:id])\n @enonce.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @etf = Etf.find(params[:id])\n @etf.destroy\n\n respond_to do |format|\n format.html { redirect_to etfs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clientepedido = Clientepedido.find(params[:id])\n @clientepedido.destroy\n\n respond_to do |format|\n format.html { redirect_to clientepedidos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @egreso = Egreso.find(params[:id])\n @egreso.destroy\n\n respond_to do |format|\n format.html { redirect_to egresos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ejercicio1 = Ejercicio1.find(params[:id])\n @ejercicio1.destroy\n\n respond_to do |format|\n format.html { redirect_to ejercicio1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @episodio.destroy\n respond_to do |format|\n format.html { redirect_to episodios_url, notice: 'Episodio was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @etape = Etape.find(params[:id])\n @etape.destroy\n\n respond_to do |format|\n format.html { redirect_to etapes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @emergencia = Emergencia.find(params[:id])\n @emergencia.destroy\n\n respond_to do |format|\n format.html { redirect_to emergencias_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @edetail = Edetail.find_by(id: params[:id])\n @edetail.destroy\n respond_to do |format|\n format.html { redirect_to edetails_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @eveent.destroy\n respond_to do |format|\n format.html { redirect_to eveents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @economia6 = Economia6.find(params[:id])\n @economia6.destroy\n\n respond_to do |format|\n format.html { redirect_to economia6s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @estatuto = Estatuto.find(params[:id])\n @estatuto.destroy\n\n respond_to do |format|\n format.html { redirect_to estatutos_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /empresas_grandes GET /empresas_grandes.json | def index
@empresas_grandes = EmpresasGrande.all
end | [
"def index\n @grupoassuntos = Grupoassunto.all\n\n render json: @grupoassuntos\n end",
"def returnGrupoEmpresas\n\t\tempresas_array = Array.new\n\t\t@grupo = Grupo.find(params[:id])\n\t\t@grupo.empresas.each do |item|\n\t\t\tempresas_array << item\n\t\tend\n \t\trender :json => (empresas_array.sort!).to_json.to_s.html_safe\n\tend",
"def index\n @grados = Grado.order(\"nivel, grado\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @grados }\n end\n end",
"def show\n @grupo_de_despesa = GrupoDeDespesa.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @grupo_de_despesa }\n end\n end",
"def index\n @grupy = Grupa.all\n\n respond_to do |format|\n format.html # index.html.erb\n #format.json { render json: @grupy }\n end\n end",
"def index\n @degres = Degre.all\n end",
"def index\n grades = SisGrade.by_newest\n\n grades = scope_grades(grades)\n sis_grades = {}\n\n if params[:page].present?\n grades = grades.paginate(page: @page, per_page: @per_page)\n sis_grades[:total_pages] = grades.total_pages\n end\n\n sis_grades[:grades] = grades\n\n render json: sis_grades\n end",
"def index\n @luggages = luggages.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @luggages }\n end\n end",
"def show\n render json: @grupoassunto\n end",
"def index\n @grants =\n if @company\n @company.grants(params[:filter_by], params[:sort_by], params[:desc])\n else\n Grant.kept.order(name: :asc)\n end\n\n headers[\"Total\"] = @grants.count\n headers[\"Per-Page\"] = params[:per_page]\n\n render json: (params[:page] == 'all' ? @grants : paginator(@grants)), has_type: false\n end",
"def index\n @admin_grampanchyats = Admin::Grampanchyat.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_grampanchyats }\n end\n end",
"def index\n @grades = Grade.all\n render json: {data: @grades, status: \"SUCCESS\"}, :status => 200\n end",
"def index\n respond_to :html, :json\n @organismes = Organisme.all\n end",
"def show\n @grup = Grup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @grup }\n end\n end",
"def index\r\n @a_gudangs = AGudang.page(params[:page]).per(PAGINATE)\r\n\r\n respond_to do |format|\r\n format.html # index.html.erb\r\n format.json { render json: @a_gudangs }\r\n end\r\n end",
"def index\n @ultimo_grado_de_estudios = UltimoGradoDeEstudio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ultimo_grado_de_estudios }\n end\n end",
"def index\n @gruppens = Gruppen.all\n end",
"def show\n @partecipanti_gruppo = PartecipantiGruppo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @partecipanti_gruppo }\n end\n end",
"def index\n @humedals = Humedal.all\n render json: @humedals\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /empresas_grandes POST /empresas_grandes.json | def create
@empresas_grande = EmpresasGrande.new(empresas_grande_params)
respond_to do |format|
if @empresas_grande.save
format.html { redirect_to @empresas_grande, notice: 'Empresas grande was successfully created.' }
format.json { render :show, status: :created, location: @empresas_grande }
else
format.html { render :new }
format.json { render json: @empresas_grande.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @gran_unidad = GranUnidad.new(gran_unidad_params)\n\n respond_to do |format|\n if @gran_unidad.save\n format.html { redirect_to @gran_unidad, notice: 'Gran unidad was successfully created.' }\n format.json { render action: 'show', status: :created, location: @gran_unidad }\n else\n format.html { render action: 'new' }\n format.json { render json: @gran_unidad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grupo_demandas = GrupoDemandas.new(grupo_demandas_params)\n\n respond_to do |format|\n if @grupo_demandas.save\n format.html { redirect_to @grupo_demandas, notice: 'Grupo demandas was successfully created.' }\n format.json { render :show, status: :created, location: @grupo_demandas }\n else\n format.html { render :new }\n format.json { render json: @grupo_demandas.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grupo_de_despesa = GrupoDeDespesa.new()\n\t@grupo_de_despesa[:descricao] = params[:grupo_de_despesa][:descricao]\n\t@grupo_de_despesa[:data_das_despesas] = params[:grupo_de_despesa][:data_das_despesas]\n\t@grupo_de_despesa[:usuario] = Usuario.find(params[:grupo_de_despesa][:usuario].to_i)\n\n respond_to do |format|\n if @grupo_de_despesa.save\n format.html { redirect_to @grupo_de_despesa, notice: 'Grupo de despesa was successfully created.' }\n format.json { render json: @grupo_de_despesa, status: :created, location: @grupo_de_despesa }\n else\n format.html { render action: \"new\" }\n format.json { render json: @grupo_de_despesa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grantee = Grantee.new(params[:grantee])\n\n respond_to do |format|\n if @grantee.save\n format.html { redirect_to @grantee, notice: 'Grantee was successfully created.' }\n format.json { render json: @grantee, status: :created, location: @grantee }\n else\n format.html { render action: \"new\" }\n format.json { render json: @grantee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grantee = Grantee.new(grantee_params)\n\n respond_to do |format|\n if @grantee.save\n format.html { redirect_to @grantee, notice: 'Grantee was successfully created.' }\n format.json { render :show, status: :created, location: @grantee }\n else\n format.html { render :new }\n format.json { render json: @grantee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @etablissement_ght = EtablissementGht.new(etablissement_ght_params)\n\n respond_to do |format|\n if @etablissement_ght.save\n format.html { redirect_to @etablissement_ght, notice: 'Etablissement ght was successfully created.' }\n format.json { render :show, status: :created, location: @etablissement_ght }\n else\n format.html { render :new }\n format.json { render json: @etablissement_ght.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @empresas_grandes = EmpresasGrande.all\n end",
"def create\n @sadt_grupo = SadtGrupo.new(sadt_grupo_params)\n\n respond_to do |format|\n if @sadt_grupo.save\n format.html { redirect_to @sadt_grupo, notice: 'Sadt Grupo was successfully created.' }\n format.json { render :show, status: :created, location: @sadt_grupo }\n else\n format.html { render :new }\n format.json { render json: @sadt_grupo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @aggrupation = Aggrupation.new(aggrupation_params)\n\n respond_to do |format|\n if @aggrupation.save\n format.html { redirect_to @aggrupation, notice: 'Aggrupation was successfully created.' }\n format.json { render :show, status: :created, location: @aggrupation }\n else\n format.html { render :new }\n format.json { render json: @aggrupation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gestor = Gestor.new(params[:gestor])\n @gestor.password = \"12345678\"\n @gestor.password_confirmation = \"12345678\"\n grupo_gestor = Grupo.where(internal_id: Grupo::GESTOR).first\n @gestor.grupo = grupo_gestor\n\n respond_to do |format|\n if @gestor.save\n format.html { redirect_success(\"Gestor adicionado com sucesso!\",:gestor, :index)}\n format.json { render json: @gestor, status: :created, location: @gestor }\n else\n format.html { redirect_error(\"Erro ao adicionar o gestor!\",:gestor, :index)}\n format.json { render json: @gestor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grupoassunto = Grupoassunto.new(grupoassunto_params)\n\n if @grupoassunto.save\n render json: @grupoassunto, status: :created, location: @grupoassunto\n else\n render json: @grupoassunto.errors, status: :unprocessable_entity\n end\n end",
"def create\n @grade_horario = GradeHorario.new(grade_horario_params)\n\n respond_to do |format|\n if @grade_horario.save\n format.html { redirect_to @grade_horario, notice: 'Grade horario was successfully created.' }\n format.json { render :show, status: :created, location: @grade_horario }\n else\n format.html { render :new }\n format.json { render json: @grade_horario.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @subgrupo = Subgrupo.new(params[:subgrupo])\n @subgrupo.empresa = session[:usuario].empresa\n respond_to do |format|\n if @subgrupo.save\n format.html { redirect_to @subgrupo, notice: 'Subgrupo was successfully created.' }\n format.json { render json: @subgrupo, status: :created, location: @subgrupo }\n else\n format.html { render action: \"new\" }\n format.json { render json: @subgrupo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grade_scheduel = GradeScheduel.new(grade_scheduel_params)\n\n respond_to do |format|\n if @grade_scheduel.save\n format.html { redirect_to root_path, notice: 'Ugeskemaet er gemt.' }\n format.json { render :show, status: :created, location: @grade_scheduel }\n else\n format.html { render :new }\n format.json { render json: @grade_scheduel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @grup = Grup.new(params[:grup])\n\n respond_to do |format|\n if @grup.save\n format.html { redirect_to @grup, notice: 'Grup was successfully created.' }\n format.json { render json: @grup, status: :created, location: @grup }\n else\n format.html { render action: \"new\" }\n format.json { render json: @grup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @anmeldung = Anmeldung.new(anmeldung_params)\n\n respond_to do |format|\n if @anmeldung.save\n format.html { redirect_to @anmeldung, notice: 'Anmeldung was successfully created.' }\n format.json { render :show, status: :created, location: @anmeldung }\n else\n format.html { render :new }\n format.json { render json: @anmeldung.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @groep = Groep.new(params[:groep])\n @lesgevers = Lesgever.order('name').all\n @dags = Dag.all\n @niveaus = Niveau.order('position').all\n respond_to do |format|\n if @groep.save\n format.html { redirect_to @groep, notice: 'Groep werd succesvol aangemaakt.' }\n format.json { render json: @groep, status: :created, location: @groep }\n else\n format.html { render action: \"new\" }\n format.json { render json: @groep.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @galletum = Galletum.new(galletum_params)\n\n respond_to do |format|\n if @galletum.save\n format.html { redirect_to @galletum, notice: 'Galletum was successfully created.' }\n format.json { render :show, status: :created, location: @galletum }\n else\n format.html { render :new }\n format.json { render json: @galletum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @profesor_grado = ProfesorGrado.new(profesor_grado_params)\n\n respond_to do |format|\n if @profesor_grado.save\n format.html { redirect_to @profesor_grado, notice: 'Profesor grado was successfully created.' }\n format.json { render :show, status: :created, location: @profesor_grado }\n else\n format.html { render :new }\n format.json { render json: @profesor_grado.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /empresas_grandes/1 PATCH/PUT /empresas_grandes/1.json | def update
respond_to do |format|
if @empresas_grande.update(empresas_grande_params)
format.html { redirect_to @empresas_grande, notice: 'Empresas grande was successfully updated.' }
format.json { render :show, status: :ok, location: @empresas_grande }
else
format.html { render :edit }
format.json { render json: @empresas_grande.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n respond_to do |format|\n if @grupo_demandas.update(grupo_demandas_params)\n format.html { redirect_to @grupo_demandas, notice: 'Grupo demandas was successfully updated.' }\n format.json { render :show, status: :ok, location: @grupo_demandas }\n else\n format.html { render :edit }\n format.json { render json: @grupo_demandas.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @emprestimos.update(emprestimo_params)\n format.html { redirect_to @emprestimo, notice: 'emprestimo atualizado com sucesso.' }\n format.json { render :show, status: :ok, location: @emprestimo }\n else\n format.html { render :edit }\n format.json { render json: @emprestimo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @gran_unidad.update(gran_unidad_params)\n format.html { redirect_to @gran_unidad, notice: 'Gran unidad was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @gran_unidad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @emprego.update(emprego_params)\n format.html { redirect_to @emprego, notice: 'Emprego was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @emprego.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @tipo_egreso = TipoEgreso.find(params[:id])\n\n respond_to do |format|\n if @tipo_egreso.update_attributes(params[:tipo_egreso])\n format.html { redirect_to @tipo_egreso, notice: 'Tipo egreso was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @tipo_egreso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @egreso = Egreso.find(params[:id])\n\n respond_to do |format|\n if @egreso.update_attributes(params[:egreso])\n format.html { redirect_to @egreso, notice: 'Egreso was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @egreso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @emprestimo = Emprestimo.find(params[:id])\n\n respond_to do |format|\n if @emprestimo.update_attributes(params[:emprestimo])\n format.html { redirect_to @emprestimo, notice: 'Emprestimo was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @emprestimo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @compra_venta_especial.update(compra_venta_especial_params)\n format.html { redirect_to @compra_venta_especial, notice: 'Compra venta especial was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @compra_venta_especial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @modif_repar.update(modif_repar_params.permit(:sb,:Objet,:ref,:date_rel,:fait_par,:id_machine))\n format.html { redirect_to @modif_repar, notice: 'Modif repar was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @modif_repar.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @estudiante = Estudiante.find(params[:id])\n\n if @estudiante.update(params[:estudiante])\n head :no_content\n else\n render json: @estudiante.errors, status: :unprocessable_entity\n end\n end",
"def update\n @emergencia = Emergencia.find(params[:id])\n\n respond_to do |format|\n if @emergencia.update_attributes(params[:emergencia])\n format.html { redirect_to @emergencia, notice: 'Emergencia was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @emergencia.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n @metodosdepago.empresa = current_user.empresa\n if @metodosdepago.update(metodosdepago_params)\n format.html { redirect_to @metodosdepago, notice: 'Metodosdepago was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @metodosdepago.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @empaque.update(empaque_params)\n format.html { redirect_to @empaque, notice: 'Empaque was successfully updated.' }\n format.json { render :show, status: :ok, location: @empaque }\n else\n format.html { render :edit }\n format.json { render json: @empaque.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @egreso.update(egreso_params)\n format.html { redirect_to @egreso, notice: 'Egreso was successfully updated.' }\n format.json { render :show, status: :ok, location: @egreso }\n else\n format.html { render :edit }\n format.json { render json: @egreso.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @grupo_de_despesa = GrupoDeDespesa.find(params[:id])\n\n respond_to do |format|\n if @grupo_de_despesa.update_attributes(params[:grupo_de_despesa])\n format.html { redirect_to @grupo_de_despesa, notice: 'Grupo de despesa was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @grupo_de_despesa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @empresa = Empresa.find(params[:id])\n\n if @empresa.update(empresa_params)\n head :no_content\n else\n render json: @empresa.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @datos_generales_empresa.update(datos_generales_empresa_params)\n format.html { redirect_to @datos_generales_empresa, notice: 'Datos generales empresa was successfully updated.' }\n format.json { render :show, status: :ok, location: @datos_generales_empresa }\n else\n format.html { render :edit }\n format.json { render json: @datos_generales_empresa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @pessoa.update(pessoa_params)\n format.html { redirect_to admin_pessoas_path, notice: 'Pessoa foi atualizada com sucesso.' }\n format.json { head :no_content }\n else\n get_dependencies\n format.html { render action: 'edit' }\n format.json { render json: @pessoa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @supermercado.update(supermercado_params)\n format.html { redirect_to @supermercado, notice: 'Supermercado was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @supermercado.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /empresas_grandes/1 DELETE /empresas_grandes/1.json | def destroy
@empresas_grande.destroy
respond_to do |format|
format.html { redirect_to empresas_grandes_url, notice: 'Empresas grande was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @gran_unidad.destroy\n respond_to do |format|\n format.html { redirect_to gran_unidad_index_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @emprestimo = Emprestimo.find(params[:id])\n @emprestimo.destroy\n\n respond_to do |format|\n format.html { redirect_to emprestimos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @agronomiagalera = Agronomiagalera.find(params[:id])\n @agronomiagalera.destroy\n\n respond_to do |format|\n format.html { redirect_to agronomiagaleras_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @egreso = Egreso.find(params[:id])\n @egreso.destroy\n\n respond_to do |format|\n format.html { redirect_to egresos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @estatuto = Estatuto.find(params[:id])\n @estatuto.destroy\n\n respond_to do |format|\n format.html { redirect_to estatutos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tipo_egreso = TipoEgreso.find(params[:id])\n @tipo_egreso.destroy\n\n respond_to do |format|\n format.html { redirect_to tipo_egresos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @asignatura.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @estacion = Estacion.find(params[:id])\n @estacion.destroy\n\n respond_to do |format|\n format.html { redirect_to estaciones_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @emergencia = Emergencia.find(params[:id])\n @emergencia.destroy\n\n respond_to do |format|\n format.html { redirect_to emergencias_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n checar_egresso_super\r\n @egresso.destroy\r\n respond_to do |format|\r\n format.html { redirect_to egressos_url, notice: 'Egresso excluído com sucesso.' }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @seguidore = Seguidore.find(params[:id])\n @seguidore.destroy\n\n respond_to do |format|\n format.html { redirect_to seguidores_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @humanidades1 = Humanidades1.find(params[:id])\n @humanidades1.destroy\n\n respond_to do |format|\n format.html { redirect_to humanidades1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tipo_demanda = TipoDemanda.find(params[:id])\n @tipo_demanda.destroy\n\n respond_to do |format|\n format.html { redirect_to tipo_demandas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @empregado.destroy\n respond_to do |format|\n format.html { redirect_to empregados_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @emprego.destroy\n respond_to do |format|\n format.html { redirect_to empregos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @grupo_de_despesa = GrupoDeDespesa.find(params[:id])\n @grupo_de_despesa.destroy\n\n respond_to do |format|\n format.html { redirect_to grupo_de_despesas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n authorize!(:edit, Heb412Gen::Plantillahcm)\n @plantillahcm.destroy\n respond_to do |format|\n format.html do\n redirect_to(\n Rails.configuration.relative_url_root,\n notice: \"Plantillahcm eliminado.\",\n )\n end\n format.json { head(:no_content) }\n end\n end",
"def destroy\n @estudiante = Estudiante.find(params[:id])\n @estudiante.destroy\n\n respond_to do |format|\n format.html { redirect_to estudiantes_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
walks all files and converts them all to the new format | def transform_files!(files, type)
files.each do |file|
@file = file
@type = type
parsed = ""
@dirnames = Base.get_namespace(file,type) # directories after the app/type/ directory
namespaces = LANGUAGES.collect do |lang|
if type == 'views'
namespace = [lang] + @dirnames
else
namespace = [lang, type] + @dirnames
end
puts "Converting: " + file + " into namespace: "
puts namespace.map {|x| "[\"#{x}\"]"}.join("")
Namespace.new(namespace,lang)
end
contents = Base.get_file_as_string(file)
parsed << GettextI18nConvertor.string_to_i18n(contents, namespaces, type)
# write the app/type/file with new i18n format instead of gettext
File.open(file, 'w') { |file| file.write(parsed)}
namespaces.each do |ns|
new_file_handler(ns)
end
end
end | [
"def transform_files\n extract\n reorder_files\n transform @attendance_file, 'att' unless @attendance_file.blank?\n transform @enroll_file, 'enroll' unless @enroll_file.blank?\n transform @ili_file, 'ili' unless @ili_file.blank?\n end",
"def git_convert_all!\n Dir[\"#{@path}/**/*.erb\"].each do |file|\n haml_file = file.gsub(/\\.erb$/, '.haml')\n temp_file = file.gsub(/\\.erb$/, '.tmp')\n begin\n puts \"Copying #{file} to #{temp_file}\"\n File.copy_stream file, temp_file\n puts \"Renaming #{file} to #{haml_file}\"\n `git mv #{file} #{haml_file}`\n puts \"Converting #{File.basename(file)} to #{File.basename(haml_file)}\"\n `html2haml -rx #{temp_file} #{haml_file}`\n puts \"Removing #{temp_file}\"\n File.delete temp_file\n rescue\n File.rename(temp_file, file)\n File.delete(temp_file)\n end\n end\n end",
"def convert\n # collect all doc ids and enable replacement of known doc ids with\n # valid references to adoc files\n manage_doc_ids if @options[:resolveDocid]\n\n # register add-on for handling searchability\n manage_searchability(@options) if @options[:makeSearchable]\n\n # traverse the src file tree and convert all files deemed as\n # adoc files\n conv_error = false\n if @paths.src_root_abs.directory?\n Find.find(@paths.src_root_abs) do |path|\n p = Pathname.new(path)\n begin\n to_asciidoc(p) if adocfile? p\n rescue StandardError => e\n str = String.new(\"Error when converting file \"\\\n \"#{path}: #{e.message}\\nBacktrace:\\n\")\n e.backtrace.each { |l| str << \" #{l}\\n\" }\n Giblog.logger.error { str }\n conv_error = true\n end\n end\n end\n\n # create necessary search assets if needed\n create_search_assets if @options[:makeSearchable]\n\n # build index and other fancy stuff if not suppressed\n unless @options[:suppressBuildRef]\n # build a dependency graph (only if we resolve docids...)\n dep_graph_exist = @options[:resolveDocid] && build_graph_page\n\n # build a reference index\n build_index_page(dep_graph_exist)\n end\n conv_error\n end",
"def iterate_and_format(dir)\n file_types = {h: true, m: true, cpp: true, c: true}\n Dir.foreach(dir) do |filename|\n next if filename.start_with?('.') || filename.length == 0 || filename == \"Pods\" || filename.start_with?('Carthage') || filename == \"build\" || filename.start_with?('Third Party') || filename.end_with?('.framework') \n \n full_path = dir + '/' + filename\n if File.directory?(full_path) \n iterate_and_format(full_path)\n next\n end\n \n extension = filename.split('.').last\n next if !extension\n next if !file_types[extension.downcase.to_sym]\n\n begin\n new_file = ''\n skip = true\n File.foreach(full_path) do |line|\n if skip && (/^\\/\\//.match(line) || line.strip.length == 0)\n \n else\n skip = false\n new_file << line\n end\n end\n File.delete(full_path)\n f = File.new(full_path, 'w')\n f.write(new_file)\n f.close\n rescue Errno::EISDIR\n end\n puts \"formatting #{full_path}\"\n `clang-format -style=file -i \"#{full_path}\"`\n end\nend",
"def transform_files!(type) \n files = Files.send(type.to_s + \"_files\")\n files.each do |file|\n parsed = \"\"\n namespace = [DEFAULT_LANGUAGE, 'txt', type] + Base.get_namespace(file, type)\n puts \"Converting: \" + file + \" into namespace: \"\n puts namespace.map {|x| \"[\\\"#{x}\\\"]\"}.join(\"\")\n \n namespace = Namespace.new(namespace)\n contents = File.read(file)\n parsed << GettextI18nConvertor.string_to_i18n(contents, namespace)\n\n File.open(file, 'w') { |file| file.write(parsed)}\n \n namespace.merge(@translations)\n end\n end",
"def scan_dir(from_dir, to_dir)\n Dir.foreach(from_dir) do |file|\n from_path = from_dir + File::SEPARATOR + file\n to_path = to_dir + File::SEPARATOR + file\n if file =~ /^\\./\n next\n elsif FileTest.directory?(from_path)\n scan_dir(from_path, to_path) \n elsif file =~ /\\.java$/\n File.makedirs(to_dir)\n output_file = to_path.sub(/\\.java$/, '.cs')\n print \"Translating #{from_path} to #{output_file}\\n\"\n transmogrify(from_path, output_file)\n else\n File.makedirs(to_dir)\n print \"Copying #{from_path} to #{to_path}\\n\"\n File.copy(from_path, to_path)\n end\n end\nend",
"def run\n\t\t@files.each do |file|\n\t\t\textname = File.extname(file)\n\t\t\tbasename = File.basename(file, extname)\n\t\t\tdirname = File.dirname(file)\n\t\t\tnew_path = File.join(dirname, basename + \".html\")\n\t\t\t\n\t\t\tputs \"Converting to #{File.basename(new_path)}\"\n\t\t\t%x[markdown #{file.shellescape} > #{new_path.shellescape}]\n\n\t\tend\n\n\tend",
"def convert!\n puts \"Searching for ch*.html files from Atlas yeah\"\n Dir[\"ch*.html\"].each do |fn|\n out = html_to_ipynb(fn)\n # Compute the new filename, which is the original filename \n # with the \".html\" (last 5 chars) replaced with \".ipynb\". \n title_fn = self.string_to_filename(out['metadata']['name'])\n ipynb_fn = \"#{fn[0,fn.length-5]}_#{title_fn}.ipynb\"\n puts \"... Converting #{fn} to #{ipynb_fn}\"\n # Create the file\n f = File.open(ipynb_fn, 'w')\n f.write JSON.pretty_generate(out)\n f.close\n end\n puts \"Done!\"\n end",
"def transform files\n files.map do |f|\n v = nil\n if !f.is_a?(CDR::File)\n v = CDR::File.new(f,search: true)\n v.unzip! if v.zip?\n else\n v = f\n end\n v\n end\n end",
"def redact_files\n setup_output\n Dir.glob(@dir.to_s + '/*.gz').each do |p|\n redact_file(p)\n end\n end",
"def erb_convert\n files = `find app/views -name '*.rhtml'`\n file_ary = files.split(\"\\n\")\n file_ary.each do |file|\n new_file = file.gsub(\"rhtml\", \"html.erb\") \n FileUtils.mv file, new_file\n end\n puts \"#{file_ary.length} files converted.\"\n puts \"Don't forget to update your scm with the new files!\"\n end",
"def process_and_convert\n all_locales_filenames = @locales_array.map { |file| File.basename(file) }\n\n locale_patterns = all_locales_filenames.map { |filename| filename.scan(/\\..*\\./) }.uniq.flatten\n # e.g. => [\".en.\", \".cs.\", ...]\n\n locale_patterns.each do |pattern|\n @locale = pattern[1...-1]\n locale_files = @locales_array.select { |filename| File.basename(filename).include?(pattern) }\n\n # join contents of locale_files to one hash of variables\n variables_hash = yaml_files_to_hash(locale_files)\n # process these variables\n processed_vars = process(variables_hash.to_yaml)\n\n # now process tokens and make new files from them\n @tokens_array.each do |token_file|\n @file = File.basename(token_file)\n\n output_filename_base = \"#{File.basename(token_file, File.extname(token_file))}.#{@locale}\"\n yaml_file = \"#{@yaml_target}/#{output_filename_base}.yml\"\n json_file = \"#{@json_target}/#{output_filename_base}.json\"\n\n template = { @locale => yaml_files_to_hash([token_file]) }\n\n processed_locale = process_template(template.to_yaml, processed_vars)\n processed_locale = processed_locale.gsub(\"---\\n\", '')\n File.open(yaml_file, 'w') { |f| f.write processed_locale }\n\n json_locale = convert_to_json(yaml_file)\n File.open(json_file, 'w') { |f| f.write json_locale }\n end\n end\n\n rescue Liquid::SyntaxError, Psych::SyntaxError, Liquid::UndefinedVariable, Liquid::UndefinedFilter => e\n file = @file ? \"(file: #{@file}) \" : ''\n raise LiqamlError.new(\"Error for locale '#{@locale}'#{file}- #{e}\")\n end",
"def convert(path)\n if path.file?\n convert_file! path\n return\n end\n\n path.children.each do |child|\n if self.should_convert_all\n convert(child)\n next\n end\n\n cli.choose do |menu|\n menu.prompt = \"What to do next?\"\n menu.choice(:convert, \"Convert #{child}\") do\n convert(child)\n end\n\n menu.choice(:exit) do\n raise ExitError\n end\n\n menu.choice(:convert_all) do\n self.should_convert_all = true\n convert(child)\n end\n end\n end\n end",
"def convert( srcdir = SRCDIR, dstdir = DSTDIR )\n list( srcdir ).each do |old|\n puts \"for <#{old}>\"\n #\n tmp = \"\"\n newfn = nil\n\n # \"wiki/A5D7A5EAA5ADA5E5A5A2.txt\" => \"A5D7A5EAA5ADA5E5A5A2\"\n #\n #\n tmp = File.basename( old, SRCEXT )\n\n # \"A5D7A5EAA5ADA5E5A5A2\" (EUC-coded string)\n # => \"プリキュア\" (string in UTF-8)\n #\n tmp = conv_fn2utf8( tmp )\n\n # escape for file name convention (in HFS+).\n tmp = Regexp.escape( tmp ) # paren, $, etc.\n #tmp = tmp.gsub(':', '\\:') # no needs to escape with colon.\n tmp = tmp.gsub('\\\\ ', ' ') # unescape with spaces.\n tmp = tmp.gsub('\\\\.', '.') # unescape with periods.\n tmp = tmp.gsub('\\\\-', '-') # unescape with hyphens.\n tmp = tmp.gsub('\\\\(', '(') # unescape with parens(open).\n tmp = tmp.gsub('\\\\)', ')') # unescape with parens(close).\n\n\n # Create directories when slashes are contained in filename.\n # (We cannot use slashes in HFS+)\n #\n if tmp.match('/')\n # # If you want flat filename, substitute it with some string.\n # # In this case, digging directories should not be done.\n # #\n # tmp.gsub('/', \"%2F\")\n\n # dig directories.\n puts \" create dirs for #{tmp}.\"\n tmpdir = dstdir+\"/\"+File.dirname( tmp )\n FileUtils.makedirs( tmpdir )\n if not(Dir.exists?( tmpdir ))\n raise \"Cannot create dir\"\n else\n puts \" Dir #{tmpdir} is created.\"\n end\n\n newfn = File.basename( tmp ) # filename w/o directory and ext.\n end\n\n if not( newfn )\n newfn = dstdir + \"/\" + tmp + DSTEXT\n else\n newfn = dstdir + \"/\" + File.dirname( tmp ) + \"/\" + newfn + DSTEXT\n end\n\n\n puts \" Processing #{newfn}...\"\n #`cp -f #{old} #{newfn}`\n `echo \"# #{newfn}\" > \"#{newfn}\"`\n #`iconv -f #{SRCCPS} -t #{DSTCPS} #{old} >> \"#{newfn}\"`\n `iconv -f #{SRCCPS} -t #{DSTCPS} #{old} | ./nf-pw2md.rb >> \"#{newfn}\"`\n puts \" Done.\"\n\n end\nend",
"def reformatFiles(theArgs)\n\n\t# Get the state we need\n\tpathConfig = getConfig(theArgs);\n\ttheFiles = getFiles( theArgs);\n\n\n\n\t# Reformat the files\n\ttheFiles.each do |theFile|\n\t\treformatFile(theArgs, theFile)\n\tend\n\t\n\tFileUtils.rm_rf(pathConfig);\n\nend",
"def transform_pages(dir = '')\n base = File.join(self.source, dir)\n entries = Dir.entries(base)\n entries = entries.reject { |e| ['.', '_'].include?(e[0..0]) }\n \n entries.each do |f|\n if File.directory?(File.join(base, f))\n transform_pages(File.join(dir, f))\n else\n first3 = File.open(File.join(self.source, dir, f)) { |fd| fd.read(3) }\n \n if first3 == \"---\"\n page = Page.new(self.source, dir, f)\n page.add_layout(self.layouts, site_payload)\n page.write(self.dest)\n else\n FileUtils.mkdir_p(File.join(self.dest, dir))\n FileUtils.cp(File.join(self.source, dir, f), File.join(self.dest, dir, f))\n end\n end\n end\n end",
"def convert\n @assets.each do |asset|\n # Convert asset multiple times if more than one converter is found\n finished = false\n while finished == false\n # Find a converter to use\n klass = JAPR::Converter.subclasses.select do |c|\n c.filetype == File.extname(asset.filename).downcase\n end.last\n\n # Convert asset if converter is found\n if klass.nil?\n finished = true\n else\n begin\n # Convert asset content\n converter = klass.new(asset)\n\n # Replace asset content and filename\n asset.content = converter.converted\n asset.filename = File.basename(asset.filename, '.*')\n\n # Add back the output extension if no extension left\n if File.extname(asset.filename) == ''\n asset.filename = \"#{asset.filename}#{@type}\"\n end\n rescue Exception => e\n puts \"Asset Pipeline: Failed to convert '#{asset.filename}' \" \\\n \"with '#{klass}': #{e.message}\"\n raise e\n end\n end\n end\n end\n end",
"def rnformat\n\n\t# Get the state we need\n\tcheckTools();\n\t\n\ttheArgs = getArguments();\n\n\n\n\t# Reformat the files\n\treformatFiles(theArgs);\n\nend",
"def transform_links_in_files files\n files.each do |filename|\n text = File.read filename\n content = transform_links_in_text text\n File.open(filename, \"w\") { |file| file << content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
dumps the translation strings into yml files | def dump_yaml!(filepath,translations)
return if translations.blank?
FileUtils.mkdir_p(@dirpath) unless File.exists?(@dirpath)
File.open(filepath, 'w+') { |f| YAML::dump(translations, f) }
end | [
"def dump_yaml!\n FileUtils.mkdir_p LOCALE_DIR\n File.open(template_file,'w+'){ |f| YAML::dump(@translations, f) } \n end",
"def dump_yaml!\n FileUtils.mkdir_p LOCALE_DIR\n File.open(STANDARD_LOCALE_FILE,'w+') { |f| YAML::dump(@translations, f) } \n end",
"def yml_to_po\r\nDir.glob(Rails.root.join('po', self.locale_code, '*.po')).each do |t|\r\n @trans_file = File.basename(t)\r\n my_file = Rails.root.join('po', self.locale_code, \"#{@trans_file}_trans.yml\")\r\n msgid\t = { @trans_file => {} }\r\n if File.exist? my_file\r\n msgid_temp = YAML.load(File.open(my_file, 'r'))\r\n if msgid_temp\r\n msgid_temp.each do |l, ldata|\r\n mmsgid = l.gsub(/\\n/, \"\\\\n\").gsub(/\\r/,'')\r\n msgid[@trans_file][mmsgid] = ldata.gsub(/\\r/,'')\r\n end\r\n end\r\n self.generate_po_files(msgid)\r\n end\r\nend\r\nend",
"def save!\n FileUtils.mkdir_p File.dirname(self.file)\n\n File.open(self.file, \"w+\") do |f|\n f << %(#{self.namespace}.translations || (#{self.namespace}.translations = {});\\n)\n self.translations.each do |locale, translations_for_locale|\n f << %(#{self.namespace}.translations[\"#{locale}\"] = #{translations_for_locale.to_json};\\n);\n end\n end\n end",
"def save!\n FileUtils.mkdir_p File.dirname(self.file)\n\n File.open(self.file, \"w+\") do |f|\n f << %(#{self.namespace}.translations || (#{self.namespace}.translations = {});\\n)\n self.translations.each do |locale, translations|\n f << %(#{self.namespace}.translations[\"#{locale}\"] = #{print_json(translations)};\\n);\n end\n end\n end",
"def po_to_yml\r\nAppLocale.po_files(self.locale_code).each do |t|\r\n @trans_file = File.basename(t)\r\n @gettext_trans = true\r\n @gettext_untrans = @gettext_fuzzy = @gettext_obsol = false\r\n @translation = self.gettext_fct(\"trans\")[:translation]\r\n @translation.each do |file, data|\r\n yml_gettext = Rails.root.join('po', self.locale_code, \"#{file}_trans.yml\")\r\n File.open(yml_gettext, \"w+\") do |f|\r\n YAML.dump(data.inject({}) {|acc,v| acc[v[0]] = v[1] ; acc}, f)\r\n end\r\n end\r\n @gettext_untrans = true\r\n @gettext_trans = @gettext_fuzzy = @gettext_obsol = false\r\n @translation = self.gettext_fct(\"untrans\")[:translation]\r\n @translation.each do |file, data|\r\n yml_gettext = Rails.root.join('po', self.locale_code, file + '_' + 'untrans.yml')\r\n File.open(yml_gettext, \"w+\") do |f|\r\n YAML.dump(data.inject({}) {|acc,v| acc[v[0]] = v[1] ; acc}, f)\r\n end\r\n end\r\n @gettext_fuzzy = true\r\n @gettext_untrans = @gettext_trans = @gettext_obsol = false\r\n @translation = self.gettext_fct(\"fuzzy\")[:translation]\r\n @translation.each do |file, data|\r\n yml_gettext = Rails.root.join('po', self.locale_code, file + '_' + 'fuzzy.yml')\r\n File.open(yml_gettext, \"w+\") do |f|\r\n YAML.dump(data.inject({}) {|acc,v| acc[v[0]] = v[1] ; acc}, f)\r\n end\r\n end\r\n @gettext_obsol = true\r\n @gettext_untrans = @gettext_trans = @gettext_fuzzy = false\r\n @translation = self.gettext_fct(\"obsol\")[:translation]\r\n @translation.each do |file, data|\r\n yml_gettext = Rails.root.join('po', self.locale_code, file + '_' + 'obsol.yml')\r\n File.open(yml_gettext, \"w+\") do |f|\r\n YAML.dump(data.inject({}) {|acc,v| acc[v[0]] = v[1] ; acc}, f)\r\n end\r\n end\r\nend\r\nend",
"def dump\n files = Dir[Rails.root.join(\"config\", \"locales\", \"*.yml\").to_s]\n delete_all unless Interpret.soft\n\n records = []\n files.each do |f|\n ar = YAML.load_file f\n locale = ar.keys.first\n records += parse_hash(ar.first[1], locale)\n end\n\n # TODO: Replace with activerecord-import bulk inserts\n transaction do\n records.each {|x| x.save(:validate => false)}\n end\n end",
"def write_file(lang, yaml_content)\n tmp_dir = File.join(I18nParserConfig.root,'tmp')\n FileUtils.mkdir_p(tmp_dir) unless File.exists?(tmp_dir)\n tmp_yaml_path = File.join(tmp_dir, lang + '_tmp.yml')\n yaml_file = File.open(tmp_yaml_path, 'w+')\n yaml_file.write(\"---\\n\")\n yaml_content.each do |lang, content| \n yaml_file.write(lang + \":\\n\")\n content.each do |key, value|\n yaml_file.write(\" %s: \\\"%s\\\"\\n\" % [key,value])\n end\n end\n yaml_file.close\n lang_path = File.join( I18nParserConfig.root, I18nParserConfig.lang_dir, lang + '.yml')\n FileUtils.mv(tmp_yaml_path,lang_path)\n end",
"def generate_yaml(locale_name, translations)\n yaml = YamlDocument.new(\"config/locales/translation_#{locale_name}.yml\", locale_name)\n each_value [], translations do |parents, value|\n node = parents.inject(yaml[locale_name]) {|node, parent| node[parent]}\n node.value = value\n end\n yaml\n end",
"def save(result, dpath)\n result.each do |key, language_hash| \n path = \"#{dpath}/#{key}.lproj\"\n Dir.mkdir(path) unless File.directory? path\n fout = File.open(\"#{path}/messages.strings\", 'w') \n\n language_hash.each do |key, string|\n text = \"\\\"#{key}\\\" = \\\"#{string}\\\";\\n\"\n fout.puts text\n end\n\n fout.close()\n end\nend",
"def write_to_file keys\n init_translations_and_ignore_app_mode_file_dump if self.class.mode == :origin\n # Hash to capture the files updated on origin mode and the keys for each one\n result = {}\n keys.each do |key, value|\n #\n # Search the files where the translation will be applied to\n decide_filenames(key).each do |filename| \n (result[filename] ||= []) << key\n # Apply the current translation to the filenames\n #\n # It will save a key 'ubiquo.categories.index.title' with a value 'Title'\n # mergin the content of $filename with it\n #\n # Load the file\n hash = YAML.load_file(filename)\n # Morph the translation key\n # from: 'ubiquo.categories.index.title'\n # to: { :ubiquo => {\n # :categories => {\n # :index => {\n # :title => 'Title'\n # }\n # }\n # }\n # }\n # }\n branch_hash = Translate::Keys.to_deep_hash({key => value})\n #\n # Cast all the hash keys to String\n #\n branch_hash = Translate::File.deep_stringify_keys({self.locale => branch_hash})\n #\n # Merge the translation with the content of the file\n #\n #\n hash.deep_merge!(branch_hash)\n #\n # Save to file updated to disk\n Translate::File.new(filename).write(hash)\n end \n end\n result\n end",
"def copy_translate_file\n copy_file \"../../../config/locales/devise.en.yml\", \"config/locales/devise.en.yml\"\n end",
"def load_translations\n @yaml_backend.load_translations\n end",
"def load_translations\n self.yaml_backend.load_translations\n end",
"def missing_translations_to_yaml\n missing_translations_to_hash.deep_stringify_keys.ya2yaml\n end",
"def write_locale(directory, file, strings, plurals, locale, date); end",
"def output_config_for_locale(locale)\n save_config_file(locale, \"config\")\n save_config_file(locale, \"faq\")\n save_config_file(locale, \"form\")\nend",
"def create_yml_file_for_locale_mission(main_language, id, title, instructions)\n File.open(yml_path(main_language), 'w+') do |file|\n write_locale_file_headers(file, main_language)\n write_mission_to_locale(file, id, instructions, title)\n end\nend",
"def po_to_json\n files = read_dir(@config['paths']['to_json']['src'])\n\n files.each do |file|\n # Setup, read file\n _lang = File.basename(file, '.po')\n _file = File.open(file, \"r:iso-8859-1:utf-8\")\n _content = _file.read\n\n # Parse translations\n _translation = _content.scan(/^msgid \\\"([^\\n]+)\\\"\\n^msgstr \\\"([^\\n]+)\\\"$/)\n\n # Transform to JSON and return\n _json = { _lang => {} }\n _translation.each { |key,value| _json[_lang][key] = value }\n \n # Save file\n save_file(_json.to_json, _lang +\".json\", @config['paths']['to_json']['dst'])\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Render the html rows for the given records and an optional named partial. Returns HTML string or nil | def render_response_html(records, partial: 'row', locals: {} )
output = render_to_string(partial: partial, locals: { records: records }.merge(locals))
# if the query has no results, it will return a string which causes jquery to crash
output = nil unless records.any?
output
end | [
"def render_record_from_config(record, renderer = record_select_config.label)\n case renderer\n when Symbol, String\n # return full-html from the named partial\n render :partial => renderer.to_s, :locals => {:record => record}\n\n when Proc\n # return an html-cleaned descriptive string\n h renderer.call(record)\n end\n end",
"def render_record_from_config(record, renderer = record_select_config.label)\n case renderer\n when Symbol, String\n # return full-html from the named partial\n render :partial => renderer.to_s, :locals => {:record => record}\n\n when Proc\n # return an html-cleaned descriptive string\n instance_exec record, &renderer\n end\n end",
"def render_record(record)\n render_records(record)\n end",
"def collection_index_display(column_titles, items, item_name, row_partial, row_partial_local_opts = {}, pagination_opts = {})\n src = ''\n src << \"<table id=\\\"#{item_name.downcase.pluralize}\\\" class='data'>\"\n src << '<thead>'\n #src << pagination_display(items, item_name, column_titles.size, pagination_opts) unless items.nil?\n src << '<tr>'\n for title in column_titles do\n src << \"<th>#{title}</th>\"\n end\n src << '</tr>'\n src << '</thead>'\n if (!items.blank?) then\n src << '<tbody>'\n src << render(:partial => row_partial, :collection => items, :locals => row_partial_local_opts)\n src << '</tbody>'\n \n src << '<tfoot>'\n #src << pagination_display(items, item_name, column_titles.size, pagination_opts)\n src << '</tfoot>'\n end\n src << '</table>'\n src\n end",
"def render_table(arel)\n return if arel.first.nil?\n partial :table, :locals => {:model => arel.first.class, :arel => arel}\n end",
"def render_row(files)\n rendered_files = files.collect {|file| render_post(file)}.join(\"\\n\")\n if split_rows\n Html::Div.new(:class => 'row') << rendered_files\n else\n rendered_files\n end\n end",
"def modify_submission_records_for_html!(records, columns = nil)\n columns = submission_result_indices(columns)\n state_idx = SUBMISSION_COLUMNS.index(:state)\n data_idx = SUBMISSION_COLUMNS.index(:emma_data)\n records.map! { |record|\n # Skip evaluation of the first (schema) row.\n unless record.first == 'id'\n next if submission_incomplete?(record[state_idx])\n next if submission_bogus?(record[data_idx])\n end\n record.values_at(*columns)\n }.compact!\n records\n end",
"def render_no_records\n render partial: 'utility/no_records_found'\n end",
"def render_holdings_table(holdings_records)\n\n unless holdings_records.empty?\n render_holdings_table = \"\"\n\n bjl_items_records = []\n kdl_items_records = []\n map_items_records = []\n wise_items_records = []\n blaydes_items_records = []\n chemistry_items_records = [] \n other_records = [] \n\n bjl_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"bjl\" }\n kdl_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"kdl\" }\n wise_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"wise\" }\n map_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"map\" }\n blaydes_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"blaydes\" }\n chemistry_items_records = holdings_records.select { |record| record.local_location.downcase.include? \"chemistry\" }\n\n #All others...\n holdings_records.each do |record|\n unless((record.local_location.downcase.include? \"bjl\") || (record.local_location.downcase.include? \"kdl\") || (record.local_location.downcase.include? \"wise\") || (record.local_location.downcase.include? \"map\") || (record.local_location.downcase.include? \"blaydes\") || (record.local_location.downcase.include? \"chemistry\")) then\n other_records << record\n end\n end \n\n #BJL Ground Floor Reference (R) \tG 1818 C93 P5 \tLIB USE ONLY\n #location_list = {\"bjl\" => \"Brymor Jones Library\", \"kdl\" => \"Keith Donaldson Library\"}\n\n # Playing more generic solutions...\n # availability = {}\n # location_list.each do |key,value|\n # holdings_records.each do |record|\n # if record.local_location.downcase.include? key.downcase\n # \n # availability.merge!( Hash[ key => [:name => value, :list =>[ holdings_records.index(record) => [record.local_location, record.call_number, record.availability]]]])\n # end \n # end\n # end\n \n\n render_holdings_table << holdings_table(bjl_items_records, \"Brynmor Jones Library\") << holdings_table(kdl_items_records, \"Keith Donaldson Library\") << holdings_table(wise_items_records, \"Wilberforce Institute\") << holdings_table(map_items_records, \"Map Library\") << holdings_table(blaydes_items_records, \"Blaydes house\") << holdings_table(chemistry_items_records, \"Chemistry Department\") << holdings_table(other_records, \"Library / Department Only\")\n \n return render_holdings_table.html_safe\n end\n end",
"def html_db_record(fields, row: nil, start_col: 1, **opt)\n css = '.database-record'\n fields = :empty if fields.blank?\n type =\n case fields\n when Array then :array\n when Hash then :hierarchy\n when Symbol then fields\n end\n first = opt.delete(:first) || start_col\n last = opt.delete(:last)\n # noinspection RubyMismatchedArgumentType\n last ||= first + [(fields.size - 1 if type == :array).to_i, 0].max\n classes = []\n classes << \"row-#{row}\" if row\n classes << 'row-first' if row && (row == first)\n classes << 'row-last' if row && (row == last)\n rec_opt = prepend_css(opt, css, type, classes)\n html_div(rec_opt) do\n if type == :array\n opt[:first] = first\n opt[:last] = last\n fields.map.with_index(start_col) do |field, col|\n html_db_column(field, row: row, col: col, **opt)\n end\n elsif type != :empty\n html_db_column(fields, row: row, **opt)\n end\n end\n end",
"def render\n renderer.render(cells)\n end",
"def format_rows_only_report(req, dims, rows, show_sql)\n # Format the select by and order by list\n facts = []\n outer_select_list, inner_select_list, group_by_list, order_by_list, format_list, ctrl_brk_list = \n format_select_group_order_by_list(\"\", rows, facts, false)\n \n # Format the from list (each table only once)\n from_list = format_from_list(\"\", dims, rows, [], facts)\n\n # Format the where clause\n where_list = format_where_clause(\"\", dims, facts)\n\n # Format the display and executable SQL\n display_sql, execute_sql = \n format_the_SQL_for_display_and_execute(\n outer_select_list, inner_select_list, from_list, where_list, group_by_list, order_by_list)\n\n # Display the SQL if it's wanted\n if show_sql == \"true\"\n @get += display_sql + \"<br><hr>\"\n end\n # Execute the SQL and display the returned result set\n if @dont_execute_sql == \"false\" then\n begin\n @get += report_title(req, dims)\n @get += execute_and_display_results(\"Row\", execute_sql, format_list, ctrl_brk_list, rows, facts)\n rescue => ex\n @get += \"Unable to retrieve requested data.<br>\" + build_query_error_message(ex)\n end\n end\n # @get += \"<br>Control breaks: #{ctrl_brk_list.to_s}\"\n end",
"def display_errors_for(record)\n render :partial => \"site/display_errors_for\", :locals => {:record => record}\n end",
"def render_partial(*args, **kwargs); end",
"def render_holdings_html_partial(library_item)\n unless library_item.holdings_records_collection.nil?\n content_tag(:div, :class => \"holdings\") do\n render :partial => 'library_items/holdings_table'\n end\n end\n end",
"def actings_for(record)\n returning '' do |html|\n record.acting_roles.each do |role|\n view_path = record.class.acting_view_path(role)\n if File.exists?(view_path)\n html << @template.render(:file => view_path, :locals => {:object => record, :record => record})\n end\n end\n end\n end",
"def pagination_display(items, item_name, columns, paginate_opts)\n # show a row before the column header row, which contains a \"wordy total\" of the displayed results\n #(note: we don't include the %head in this partial, because it needs to be put in the calling file)\n src = ''\n src << \"<tr>\"\n # try to go half_way (in the case of odd column totals, this will yeild the integer closest to half way)\n half_way = (columns.to_i)/2\n src << \"<th colspan = '#{half_way}'>\"\n src << (will_paginate(items, paginate_opts) || \"\")\n src << \"</th>\"\n src << \"<th colspan = '#{half_way + columns.to_i.remainder(2)}', style = 'text-align: right'>\"\n src << page_entries_info(items, :entry_name => item_name)\n src << \"</th>\"\n src << \"</tr>\"\n src\n end",
"def render_simple_index(aggregation, options = {})\n return \"<p>No Records Found.</p>\" if aggregation[0].class.name == 'NilClass'\n options.symbolize_keys!\n options[:exclude] ||= []\n options[:ignored_foreign_keys] ||= []\n if options[:prefix]\n options[:prefix] += '_'\n else\n options[:prefix] = ''\n end\n linkcols = 0\n [options[:display_show], options[:display_edit], options[:display_destroy]].each do |opt|\n opt = true if opt.nil?\n linkcols += 1 if opt == true\n end\n options[:display_show] = true if options[:display_show].nil?\n options[:display_edit] = true if options[:display_edit].nil?\n options[:display_destroy] = true if options[:display_destroy].nil?\n haml_tag :table do\n haml_tag :thead do\n aggregation[0].class.columns.each do |c|\n next if options[:exclude].include? c.name.to_sym\n haml_tag :th do\n haml_concat h(c.name.humanize)\n end\n end\n linkcols > 0 ? haml_tag(:th, \"Options\", {:colspan => linkcols}) : nil\n end\n\n haml_tag :tbody do\n aggregation.each do |instance|\n haml_tag :tr do\n instance.class.columns.each do |c|\n haml_tag :td do\n if ((c.name.split('_')[-1] == 'id' && c.name != 'id') && (!options[:ignored_foreign_keys].include?(c.name.to_sym)))\n # handle foreign key fields\n foreign_model = (options[:prefix].to_s + c.name.gsub(/_id$/, '')).camelize.constantize\n if foreign_model.column_names.include?('name')\n haml_concat h(instance.__send__(foreign_model.to_s.underscore).name)\n elsif foreign_model.column_names.include?('description')\n haml_concat h(instance.__send__(foreign_model.to_s.underscore).description)\n else\n haml_concat h(instance.__send__(c.name))\n end\n else\n haml_concat h(instance.__send__(c.name))\n end # big if\n end #td\n end #columns.each\n if options[:display_show]\n haml_tag :td do\n haml_concat link_to('Show', url_for(instance))\n end\n end\n if options[:display_edit]\n haml_tag :td do\n haml_concat link_to('Edit', url_for(instance) + \"/edit\")\n end\n end\n if options[:display_destroy]\n haml_tag :td do\n haml_concat link_to('Destroy', url_for(instance), :confirm => \"Are You Sure?\", :method => :delete)\n end\n end\n end #tr\n end #aggregation\n end #tbody\n end #table\n end",
"def render_for_webquery( statement, group_by_columns )\n\n report_state_hash = YAML.load(self.view_state)\n conn = User.connection\n recordset = conn.select_all(Globals.cleanup_where(statement))\n\n if group_by_columns.nil? || group_by_columns.empty?\n if report_state_hash[:columns_list].nil? || report_state_hash[:columns_list].empty?\n if recordset.empty?\n keys = ['No records found']\n else\n keys = recordset[0].keys\n end\n else\n # Use the provided list of columns.\n keys = report_state_hash[:columns_list].map {|k| k.gsub('\"', '').strip }\n end\n else\n keys = group_by_columns\n # Include the sum or count columns.\n unless recordset.empty?\n extra_cols = recordset[0].keys - keys\n keys += extra_cols\n end\n end\n\n s = \"<table><tr><th>#{keys.join('</th><th>')}</th></tr>\"\n recordset.each do |record|\n s << '<tr>'\n keys.each do |k|\n s << \"<td>#{format_for_spreadsheet(record[k])}</td>\"\n end\n s << '</tr>'\n end\n s << '</table>'\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
TeleSign RestClient, useful for making generic RESTful requests against the API. +customer_id+ Your customer_id string associated with your account. +api_key+ Your api_key string associated with your account. +rest_endpoint+ (optional) Override the default rest_endpoint to target another endpoint. +timeout+ (optional) How long to wait for the server to send data before giving up, as a float. | def initialize(customer_id,
api_key,
rest_endpoint: 'https://rest-api.telesign.com',
proxy: nil,
timeout: 10)
@customer_id = customer_id
@api_key = api_key
@rest_endpoint = rest_endpoint
@http = Net::HTTP::Persistent.new(name: 'telesign', proxy: proxy)
unless timeout.nil?
@http.open_timeout = timeout
@http.read_timeout = timeout
end
end | [
"def get_testrail_api(credentials)\n client = TestRail::APIClient.new(TESTRAIL_URL)\n client.user = credentials[\"testrail_username\"]\n client.password = credentials[\"testrail_password\"]\n\n return client\n end",
"def api\n RestClient::Resource.new(@delicious[:api],\n @delicious[:username],\n @delicious[:password])\n end",
"def rest(method, uri)\n log_level = @options['log_level'] == :warn ? :error : @options['log_level'].to_sym # Default to :error\n client_setup('log_level' => log_level)\n uri_copy = uri.dup\n uri_copy.prepend('/') unless uri_copy.start_with?('/')\n if @options['data']\n begin\n data = { body: JSON.parse(@options['data']) }\n rescue JSON::ParserError => e\n fail_nice(\"Failed to parse data as JSON\\n#{e.message}\")\n end\n end\n data ||= {}\n response = @client.rest_api(method, uri_copy, data)\n if response.code.to_i.between?(200, 299)\n case @options['format']\n when 'yaml'\n puts JSON.parse(response.body).to_yaml\n when 'json'\n puts JSON.pretty_generate(JSON.parse(response.body))\n else # raw\n puts response.body\n end\n else\n body = JSON.pretty_generate(JSON.parse(response.body)) rescue response.body\n fail_nice(\"Request failed: #{response.inspect}\\nHeaders: #{response.to_hash}\\nBody: #{body}\")\n end\n rescue OneviewSDK::InvalidRequest => e\n fail_nice(e.message)\n end",
"def build_rest(rest_resource, rest_action, rest_payload = nil, rest_auth_type = 'basic', rest_verify_ssl = false)\n # set rest url\n rest_url = URI.join(@rest_base_url, rest_resource).to_s\n log(:info, \"Used rest_base_url: <#{@rest_base_url}>, and rest_resource: <#{rest_resource}>, to generate rest_url: <#{rest_url}>\")\n\n # set params for api call\n params = {\n :method => rest_action,\n :url => rest_url,\n :verify_ssl => rest_verify_ssl,\n :headers => {\n :content_type => @rest_content_type,\n :accept => @rest_return_type\n }\n }\n\n # set the authorization header based on the type requested\n if rest_auth_type == 'basic'\n params[:headers][:authorization] = \"Basic #{Base64.strict_encode64(\"#{@rest_api_user}:#{@rest_api_password}\")}\"\n else\n #\n # code for extra rest_auth_types goes here. currently only supports basic authentication\n #\n end\n\n # generate payload data\n if @rest_content_type.to_s == 'json'\n # generate our body in JSON format\n params[:payload] = JSON.generate(rest_payload) unless rest_payload.nil?\n else\n # generate our body in XML format\n params[:payload] = Nokogiri::XML(rest_payload) unless rest_payload.nil?\n end\n\n # get the rest_response and set it on the parent object\n rest_results = execute_rest(rest_url, params)\n end",
"def initialize(access_token, client_options={}, base_url='https://api.icmobile.singlewire.com/api/v1-DEV', path=nil, resource=nil)\n @access_token = access_token.freeze or raise ArgumentError, 'must pass :access_token'\n @base_url = base_url.freeze or raise ArgumentError, 'must pass :base_url'\n @path = path.freeze\n @resource = resource || RestClient::Resource.new(base_url, {:headers => {\n :authorization => \"Bearer #{access_token}\",\n :x_client_version => 'RubyClient 0.0.1'\n }}.merge(client_options || {}))\n end",
"def api_call(endpoint)\n url = config[:nomad] + endpoint\n headers = {}\n if config[:token]\n headers['X-Nomad-Token'] = config[:token]\n end\n\n begin\n response = RestClient.get(url, headers)\n rescue RestClient::ExceptionWithResponse => e\n critical \"Error #{e.http_code}: #{e.response}\"\n rescue => e\n critical \"Unable to connect to Nomad: #{e}\"\n else\n begin\n return JSON.parse(response)\n rescue => e\n critical \"Unable to parse JSON in response: #{e}\"\n end\n end\n end",
"def initialize_api_client(options = {})\n require 'right_api_client'\n\n options = {\n rl10: true,\n timeout: 20 * 60\n }.merge options\n\n client = RightApi::Client.new(options)\n client.log(Chef::Log.logger)\n client\n end",
"def api_client\n @api_client ||= ::Lokalise.client LokaliseRails.api_token, {enable_compression: true}.merge(LokaliseRails.timeouts)\n end",
"def initialize(key, logger = nil, options = {})\n @key = key\n @logger = logger\n @api = RestClient::Resource.new('https://api.octopus.energy/v1/', options.merge({ user: @key, password: '' }))\n end",
"def rest_client_get(api_call)\n response = nil\n with_retries(max_tries: 3, rescue: RestClient::Exception,\n base_sleep_seconds: 3.0, max_sleep_seconds: 10.1) do\n response = RestClient.get api_call\n fail RestClient::Exception if response.nil?\n end\n return response\n end",
"def http_client\n Faraday.new(url: API_URL, request: { timeout: 5, open_timeout: 5 }) do |conn|\n conn.request :oauth2, token, token_type: :bearer\n conn.response :json\n\n conn.adapter Faraday.default_adapter\n end\n end",
"def build_rest_client(url)\n RestClient::Resource.new(URI.encode(url), :verify_ssl => OpenSSL::SSL::VERIFY_NONE)\n end",
"def load_rest_client(rest_client = RestClient.new)\n @rest_client ||= rest_client\n end",
"def make_request(method, endpoint, payload={})\n params = [\"#{API_URL_BASE}/#{endpoint}\", payload, @request_header]\n begin\n JSON.parse RestClient.send(method.to_sym, *params.reject { |p| p.empty? })\n rescue RestClient::Exception => e\n return { \"error\" => e.message }\n end\n end",
"def test_client\n TflApi::Client.new(app_id: 12345, app_key: 6789, host: 'https://somehost', log_location: '/dev/null')\n end",
"def make_api_call(method, url, token, params = nil, payload = {}, custom_headers=nil)\n\n conn_params = {\n url: @api_host % { version: @version }\n }\n\n if @enable_fiddler\n conn_params[:proxy] = 'http://127.0.0.1:8888'\n conn_params[:ssl] = {:verify => false}\n end\n\n conn = Faraday.new(conn_params) do |faraday|\n # Uses the default Net::HTTP adapter\n faraday.adapter Faraday.default_adapter\n faraday.response :logger if @debug\n end\n\n conn.headers = {\n 'Authorization' => \"Bearer #{token}\",\n 'Accept' => \"application/json\",\n\n # Client instrumentation\n # See https://msdn.microsoft.com/EN-US/library/office/dn720380(v=exchg.150).aspx\n 'User-Agent' => @user_agent,\n 'client-request-id' => SecureRandom.uuid,\n 'return-client-request-id' => \"true\"\n }\n\n if custom_headers && custom_headers.class == Hash\n conn.headers = conn.headers.merge( custom_headers )\n end\n \n case method.upcase\n when \"GET\"\n response = conn.get do |request|\n request.url url, params\n end\n when \"POST\"\n conn.headers['Content-Type'] = \"application/json\"\n response = conn.post do |request|\n request.url url, params\n request.body = JSON.dump(payload)\n end\n when \"PATCH\"\n conn.headers['Content-Type'] = \"application/json\"\n response = conn.patch do |request|\n request.url url, params\n request.body = JSON.dump(payload)\n end\n when \"DELETE\"\n response = conn.delete do |request|\n request.url url, params\n end\n end\n\n if response.status >= 300\n error_info = if response.body.empty?\n ''\n else\n begin\n JSON.parse( response.body )\n rescue JSON::ParserError => _e\n response.body\n end\n end\n return JSON.dump({\n 'ruby_outlook_error' => response.status,\n 'ruby_outlook_response' => error_info })\n end\n\n response.body\n end",
"def callAPI(endpoint, params = {})\n\t\t\tif !@api_client\n\t\t\t\t@api_client = ApiClient.new()\n\t\t\tend\n\n\t\t\tres = @api_client.call(@username, @api_key, endpoint, params)\n\n\t\t\t# Set up the return value\n\t\t\tretval = res['data']\n\n\t\t\t# Update the rate limits from the headers\n\t\t\t@rate_limit = res['rate_limit']\n\t\t\t@rate_limit_remaining = res['rate_limit_remaining']\n\n\t\t\tcase res['response_code']\n\t\t\twhen 200\n\t\t\t\t# Do nothing\n\t\t\twhen 401\n\t\t\t\t# Authentication failure\n\t\t\t\traise AccessDeniedError, retval.has_key?('error') ? retval['error'] : 'Authentication failed'\n\t\t\twhen 403\n\t\t\t\t# Check the rate limit\n\t\t\t\traise RateLimitExceededError, retval['comment'] if @rate_limit_remaining == 0\n\t\t\t\t# Rate limit is ok, raise a generic exception\n\t\t\t\traise APIError.new(403), retval.has_key?('error') ? retval['error'] : 'Unknown error'\n\t\t\telse\n\t\t\t\traise APIError.new(res['http_code']), retval.has_key?('error') ? retval['error'] : 'Unknown error'\n\t\t\tend\n\n\t\t\tretval\n\t\tend",
"def rest_client=(value)\n raise Amfetamine::ConfigurationInvalid, 'Invalid value for rest_client' if ![:get,:put,:delete,:post].all? { |m| value.respond_to?(m) }\n @rest_client = value\n end",
"def resource\n @resource ||= RestClient::Resource.new(self.url, {\n :user => self.access_key,\n :password => self.secret_key\n })\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generic TeleSign REST API POST handler. +resource+ The partial resource URI to perform the request against, as a string. +params+ Body params to perform the POST request with, as a hash. | def post(resource, **params)
execute(Net::HTTP::Post, 'POST', resource, **params)
end | [
"def post(resource, params)\n case resource\n when \"pedidos\", \"place_order\", \"new_order\" then url = \"/pedidos\"\n when \"envios\", \"shipping\" then url = \"/envios\"\n else url = \"/#{resource}\"\n end\n\n post_request(url, params)\n end",
"def post(resource, params)\n response = @sess.post(resource_url(resource), \"[#{params.to_json}]\", \"Content-Type\" => \"application/json\")\n JSON.parse(response.body).try(:first).try(:symbolize_keys)\n rescue => exception\n nil\n end",
"def post(resource, body = \"\", headers = {})\n prepare_request(:post, resource, body, headers)\n end",
"def send_post(resource, data)\n\n url = URI.parse(primavera_path(resource))\n req = Net::HTTP::Post.new(url.to_s, initheader = {'Content-Type' => 'application/json'})\n req.body = data\n\n puts 'Sending POST request to ' + url.to_s\n\n send_request(url, req)\n end",
"def post\n resource.post(request, response)\n end",
"def execute(method_function, method_name, resource, **params)\n\n resource_uri = URI.parse(\"#{@rest_endpoint}#{resource}\")\n\n encoded_fields = ''\n if %w[POST PUT].include? method_name\n request = method_function.new(resource_uri.request_uri)\n if content_type == \"application/x-www-form-urlencoded\"\n unless params.empty?\n encoded_fields = URI.encode_www_form(params, Encoding::UTF_8)\n request.set_form_data(params)\n end\n else\n encoded_fields = params.to_json\n request.body = encoded_fields\n request.set_content_type(\"application/json\")\n end\n else\n resource_uri.query = URI.encode_www_form(params, Encoding::UTF_8)\n request = method_function.new(resource_uri.request_uri)\n end\n\n headers = RestClient.generate_telesign_headers(@customer_id,\n @api_key,\n method_name,\n resource,\n content_type,\n encoded_fields,\n user_agent: @@user_agent)\n\n headers.each do |k, v|\n request[k] = v\n end\n\n http_response = @http.request(resource_uri, request)\n\n Response.new(http_response)\n end",
"def create_request(endpoint, resource, data_fmt = :json_data_from)\n req = Net::HTTP::Post.new(endpoint.path)\n req[\"Content-Type\"] = \"application/vnd.api+json\"\n req[\"Authorization\"] = \"Bearer #{@api_key}\"\n req[\"User-Agent\"] = \"imgix #{@library}-#{@version}\"\n\n if data_fmt.is_a?(Proc)\n req.body = data_fmt.call(resource)\n elsif data_fmt.is_a?(Symbol)\n req.body = send(data_fmt, resource)\n else\n fmt_arg_error = \"`fmt' is required to be of class Symbol or \" \\\n \"Proc but was found to be\\n\\s\\sof class #{data_fmt.class}\\n\"\n raise ArgumentError, fmt_arg_error\n end\n\n req\n end",
"def api_gateway_post(path, params)\n api_gateway_body_fwd = params.to_json\n rack_input = StringIO.new(api_gateway_body_fwd)\n\n post path, real_params = {}, 'rack.input' => rack_input\nend",
"def raw_post_request raw_params\n json_body = raw_params.to_json\n Rubix.logger.log(Logger::DEBUG, \"SEND: #{json_body}\") if Rubix.logger\n Net::HTTP::Post.new(uri.path).tap do |req|\n req['Content-Type'] = 'application/json-rpc'\n req.body = json_body\n end\n end",
"def api_gateway_post(path, params)\n api_gateway_body_fwd = params.to_json\n rack_input = StringIO.new(api_gateway_body_fwd)\n\n post path, real_params = {}, {\"rack.input\" => rack_input}\nend",
"def add(resource, object)\n LOG.debug \"add: #{resource}\"\n raise \"Invalid resource\" if not valid_resource?(resource)\n\n # https://groups.google.com/forum/?fromgroups=#!topic/httparty-gem/4sA4YxakqSU\n # options = { :body => JSON.dump(object), :headers => @my_headers}\n self.class.post(\n \"#{@my_base_url}/#{resource}\", \n :body => JSON.dump(object), :headers => @my_headers)\n # response = self.class.post(\"/#{resource}\", options)\n # case response.code\n # when 201\n # return response[\"result\"]\n # else\n # raise \"#{response[\"statusMessage\"]} (Status code: #{response[\"statusCode\"]})\"\n # end\n end",
"def post endpoint, data\n do_request :post, endpoint, data\n end",
"def segment_post(params)\n call_api(:post, __method__, params)\n end",
"def post_http_request(resource_name)\n ChefSpec::Matchers::ResourceMatcher.new(:http_request, :post, resource_name)\n end",
"def post_request endpoint, opts={}, &block\n connection.post endpoint, opts, &block\n end",
"def send_post_request(endpoint, request_params)\n perform_and_handle_exceptions('u_hh_1', 'POST request failed') do\n base_params = get_base_params(endpoint, request_params)\n uri = post_api_uri(endpoint)\n http = setup_request(uri)\n r_params = base_params.merge(request_params)\n query_string = Rack::Utils.build_nested_query(r_params)\n escaped_query_string = URI.escape(query_string, '*')\n if @api_spec\n return OSTSdk::Util::Result.success({data: {request_uri: uri.to_s, request_type: 'POST', request_params: escaped_query_string}})\n else\n result = http.post(uri.path, escaped_query_string)\n return format_response(result)\n end\n end\n end",
"def create(client, resource: required(\"resource\"), target: required(\"target\"), options: {}, **data)\n with_params = data.merge(resource: resource, target: target).reject { |_,v| v.nil? || Array(v).empty? }\n self.new(parse(client.post(\"/webhooks\", body: with_params, options: options)).first, client: client)\n end",
"def post payload, path = \"\" \n make_request(path, \"post\", payload)\n end",
"def post(path, params={}); make_request(:post, host, port, path, params); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generic TeleSign REST API GET handler. +resource+ The partial resource URI to perform the request against, as a string. +params+ Body params to perform the GET request with, as a hash. | def get(resource, **params)
execute(Net::HTTP::Get, 'GET', resource, **params)
end | [
"def send_get_request(resource, params)\n api_resource = 'https://api.twitter.com/1.1'\n uri = URI(api_resource + resource) \n uri.query = URI.encode_www_form(params) # add parameters to resource\n #send HTTPS GET request\n response = Net::HTTP.start(uri.hostname, uri.port,\n :use_ssl => uri.scheme == 'https') do |http|\n req = Net::HTTP::Get.new uri\n req = add_headers(req)\n req['Authorization'] = \"Bearer #{@@access_token}\"\n res = http.request req\n JSON.parse(res.body) \n end\n \n if response.empty? || response.nil?\n raise EmptyResponse\n end\n \n response\n end",
"def get(resource, headers = {})\n prepare_request(:get, resource, nil, headers)\n end",
"def get(path, params={}, &block)\n\t\t\tapi_request(Net::HTTP::Get, build_uri(path, params), &block)\n\t\tend",
"def get_resource\n execute(resource_path, method: :get)\n end",
"def get(params = {})\n request! do\n api[url.path + to_query(params)].get(API_HEADERS)\n end\n end",
"def do_get url, params\n \n # Convert all the params to strings\n request_params = convert_params_for_request(params)\n \n # Add in the common params\n append_common_params(request_params)\n \n queryString = \"\"\n request_params.keys.each_with_index do |key, index|\n queryString << \"#{index == 0 ? '?' : '&'}#{key}=#{::URI.escape(request_params[key])}\"\n end\n \n # Create the request\n http = Net::HTTP.new(url.host, url.port)\n response = http.start() {|req| req.get(url.path + queryString)}\n \n return Episodic::Platform::HTTPResponse.new(response.code, response.body)\n end",
"def domainResourceGet args\n \tif not args.has_key?(:ResourceID)\n \t\traise \"ResourceID argument missing from argument list\"\n \tend\n \t\n \tmake_request this_method, args\n end",
"def send_get(resource)\n\n url = URI.parse(primavera_path(resource))\n req = Net::HTTP::Get.new(url.to_s)\n\n puts 'Sending GET request to ' + url.to_s\n\n send_request(url, req)\n end",
"def get(device, instance, *args)\n *get_args, last = *args\n instance_plural = convert_plural(instance)\n\n if get_args.empty? or last.empty?\n get_method = instance_plural.downcase + \"_get_with_http_info\"\n else\n instance_var_name = get_mapped_object_name(instance)\n get_method = instance_plural.downcase + \"_\" + instance_var_name + \"_get_with_http_info\"\n end\n\n auth_header = get_auth_header(device)\n\n #\n # Form the instance based on the URL passed and invoke the appropriate SwaggerClient SDK.\n # Each provider will pass the object type as the instance and we would instantiate the appropriate SwaggerClient object.\n #\n object_instance = \"SwaggerClient::#{instance}Api\"\n\n instance_method = Object.const_get(object_instance).new(device).method(get_method)\n response,status_code,headers = instance_method.call(auth_header, *args)\n parsed_response = JSON.parse(response)\n\n Puppet.debug(self.class.to_s.split(\"::\").last + \": Response received from WAF for GET operation: #{parsed_response}\")\n\n if response.to_s.empty?\n fail(self.class.to_s.split(\"::\").last + \": Not able to process the request. Please check the request parameters.\")\n end\n\n failure?(parsed_response, status_code, \"GET\")\n return parsed_response, status_code, headers\n end",
"def api_get(action, data)\n api_request(action, data, 'GET')\n end",
"def get(path, params={}); make_request(:get, host, port, path, params); end",
"def resource(resource, action, data = { :id => nil, :params => nil, :data => nil })\n case action\n when \"get\", \"update\", \"delete\"\n endpoint = resource + \"/\" + data[:id]\n when \"insert\", \"list\"\n endpoint = resource\n else\n return { :error => true }\n end\n\n # Check if Shoper Token is still valid\n self.get_token if Time.now > @configuration[:shoper][:token_expires]\n\n r = RestClient::Resource.new(@configuration[:shoper][:api] + endpoint,\n :headers => {\n :authorization => \"Bearer \" +\n @configuration[:shoper][:token],\n })\n\n case action\n when \"get\"\n res = r.get :params => data[:params]\n when \"update\"\n res = r.put data[:data].to_json, :content_type => :json\n when \"delete\"\n res = r.delete\n when \"insert\"\n res = r.post data[:data].to_json, :content_type => :json\n when \"list\"\n res = r.get\n else\n return { :error => true }\n end\n end",
"def get endpoint\n do_request :get, endpoint\n end",
"def get(path, params=nil, &blk) \n registrations << Registration.new(:get, path, \n handler_generator.for(params, &blk))\n end",
"def query_get endpoint, params = nil\n if not @ssl\n # fixme \"#{self}::#{__method__} Allow HTTPS requests\"\n begin\n node = ::Net::HTTP.new @host, @port\n uri = URI.parse \"http://#{host}:#{port}/api/#{endpoint}\"\n if not params.nil?\n uri.query = URI.encode_www_form params\n end\n request = ::Net::HTTP::Get.new uri\n response = node.request request\n @active = true\n result = JSON::parse response.body\n rescue Timeout::Error => e\n @active = false\n p \"Can't connect to the Lisk node: Timeout!\"\n rescue Errno::EHOSTUNREACH => e\n @active = false\n p \"Can't connect to the Lisk node: Host Unreachable!\"\n rescue Errno::ECONNREFUSED => e\n @active = false\n p \"Can't connect to the Lisk node: Connection Refused!\"\n end\n end\n end",
"def get(path, params = {}, env = {})\n request(path, env.merge(method: Rack::GET, params: params))\n end",
"def get(url, resource_name, options = {})\n build_response(resource_name) do\n connection.get do |req|\n req.url url\n req.params = options\n end\n end\n end",
"def get_resource(id, type)\n\t\t@client.method(type).call.get(id)\n\tend",
"def GET(inputs)\n url = self.class.sign(self.class.parameterize(self.class.scope(@url, inputs), inputs))\n OutsideIn.logger.debug(\"Requesting #{url}\") if OutsideIn.logger\n response = HTTParty.get(url)\n unless response.code < 300\n raise ForbiddenException if response.code == 403\n raise NotFoundException if response.code == 404\n if response.headers.include?('x-mashery-error-code')\n raise ServiceException, response.headers['x-mashery-error-code']\n else\n raise QueryException.new(JSON[response.body])\n end\n end\n JSON[response.body]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generic TeleSign REST API PUT handler. +resource+ The partial resource URI to perform the request against, as a string. +params+ Body params to perform the PUT request with, as a hash. | def put(resource, **params)
execute(Net::HTTP::Put, 'PUT', resource, **params)
end | [
"def put(resource, body = \"\", headers = {})\n prepare_request(:put, resource, body, headers)\n end",
"def _http_put resource, path\n uri = ::URI.parse(resource.auth_uri)\n path = _path uri, path\n request = Net::HTTP::Put.new(path)\n _build_request resource, request\nend",
"def send_put(resource, data)\n\n url = URI.parse(primavera_path(resource))\n req = Net::HTTP::Put.new(url.to_s, initheader = {'Content-Type' => 'application/json'})\n req.body = data\n\n puts 'Sending PUT request to ' + url.to_s\n\n send_request(url, req)\n end",
"def put(path, params={}); make_request(:put, host, port, path, params); end",
"def put payload, path = \"\"\n make_request(path, \"put\", payload)\n end",
"def put *args\n make_request :put, *args\n end",
"def put(id, json)\n with_endpoint do |endpoint|\n url = [endpoint, @resource_name, id].compact.join('/')\n url += \"/\" \n return HTTParty.put(url, :body => json, :timeout => 4, :headers => { 'Content-Type' => 'application/json' })\n end\n end",
"def put_http_request(resource_name)\n ChefSpec::Matchers::ResourceMatcher.new(:http_request, :put, resource_name)\n end",
"def update(resource, id, format=@default_format)\n options = { resource: resource.class, id: id, format: format }\n reply = put resource_url(options), resource, fhir_headers(options)\n reply.resource = parse_reply(resource.class, format, reply)\n reply.resource_class = resource.class\n reply\n end",
"def put(path, params = {}, env = {})\n request(path, env.merge(method: \"PUT\".freeze, params: params))\n end",
"def put(params)\n self.class.put(url, body: params)\n end",
"def _put(url=\"\", params={}, headers={}, payload)\n\t\tif !params.empty? then\n\t\t\theaders[:params] = params\n\t\tend\n\t\tresponse = RestClient.put(url, payload, headers)\n\t\thandle_response(response)\n\tend",
"def put(path, params = {}, env = {}, &block)\n env[:request_method] = \"PUT\"\n mock_request(path, params, env, &block)\n end",
"def put(params)\n\n res = @@commands.write(\n params.merge({ :action => @action })\n )\n { :result => res[:result], :object => res[:data]}\n end",
"def put_request(_context, endpoint, data)\n Puppet.runtime[:http].put(\n build_uri(endpoint),\n Puppet::Util::Json.dump(data),\n headers: {\n 'Content-Type' => 'application/json'\n },\n options: build_options,\n )\n end",
"def put(path, &block)\n route 'PUT', path, &block\n end",
"def http_put(path, entity)\n entity = JSON.dump(entity) unless entity.is_a?(String)\n\n @http ||= Net::HTTP.start(@http_url.host, @http_url.port)\n url = @http_url.dup\n url.path = path\n\n request = Net::HTTP::Put.new url\n request.body = entity\n response = @http.request request\n\n response.code.to_i\n end",
"def update(params = {})\n resource.update_by_api service_scoped(params)\n end",
"def put(*a) route 'PUT', *a end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generic TeleSign REST API DELETE handler. +resource+ The partial resource URI to perform the request against, as a string. +params+ Body params to perform the DELETE request with, as a hash. | def delete(resource, **params)
execute(Net::HTTP::Delete, 'DELETE', resource, **params)
end | [
"def delete(resource, body = nil, params = {})\n connection(:delete, resource, body, params)\n end",
"def http_delete(path, data = nil, content_type = 'application/json')\n http_methods(path, :delete, data, content_type)\n end",
"def delete(path, params = {}, env = {})\n request(path, env.merge(method: \"DELETE\".freeze, params: params))\n end",
"def delete(path, params={}); make_request(:delete, host, port, path, params); end",
"def delete(resource, id, details=nil)\n raise IncompleteImplementation, \"implement a delete handler for #{self}\"\n end",
"def DELETE(path, parameters=nil, headers=nil, ignore_throttle=false)\n url = path\n if parameters\n # URI escape each key and value, join them with '=', and join those pairs with '&'. Add\n # that to the URL with an prepended '?'.\n url += '?' + parameters.map {\n |k, v|\n [k, v].map {\n |x|\n CGI.escape(x.to_s)\n }.join('=')\n }.join('&')\n end\n\n headers ||= {}\n request = Net::HTTP::Delete.new(path_join(@version, @practiceid, url))\n call(request, {}, headers, false, ignore_throttle)\n end",
"def delete(z_api_args)\n # validate the resource URL\n error_msg = bad_uri?(z_api_args.uri)\n raise ArgumentError, error_msg if error_msg\n\n # For DELETE, reject if request_body is present\n request_body = z_api_args.req_body\n if request_body\n error_msg = \"Extraneous request body argument #{request_body.pretty_inspect.chomp} found.\"\n Z_Logger.instance.log(error_msg)\n raise ArgumentError, error_msg\n end\n\n # verify tenant's credential existence\n error_msg = credentials_exist?\n raise ArgumentError, error_msg if error_msg\n\n # may or may not have query string\n query_string = z_api_args.query_string ? z_api_args.query_string.to_hash : {}\n\n begin\n # if the caller has a block, yield with response after invocation\n if block_given?\n yield @z_api.exec_delete_api(z_api_args.uri, query_string)\n else\n @z_api.exec_delete_api(z_api_args.uri, query_string)\n end\n rescue Exception => e\n Z_Logger.instance(e.message)\n Z_Logger.instance.log(e.backtrace.join(\"\\n\"))\n raise RuntimeError, \"HTTP DELETE Exception. Please see logs for details.\"\n end\n end",
"def delete(params)\n params = prepare_params(nil, params)\n add_to_actions({delete: params})\n end",
"def send_delete_request(endpoint, request_params)\n perform_and_handle_exceptions('u_hh_3', 'DELETE request Failed') do\n escaped_query_string = get_query_string(endpoint, request_params)\n raw_url = get_api_url(endpoint) + \"?#{escaped_query_string}\"\n uri = URI(raw_url)\n http = setup_request(uri)\n if @api_spec\n return {request_uri: uri.to_s.split(\"?\")[0], request_type: 'DELETE', request_params: escaped_query_string}\n else\n result = {}\n Timeout.timeout(@timeout) do\n result = http.delete(uri)\n end\n return format_response(result)\n end\n end\n end",
"def delete(resource)\n url = self.build_url(resource)\n resp = self.ua.delete(url, nil, self.extheader)\n if ( resp.status == 200 )\n return true\n else\n raise \"Could not delete #{url}: #{resp.status}\"\n end\n\n end",
"def delete(uri, params = {})\n send_request(uri, :delete, params)\n end",
"def delete(path, params = {}, env = {}, &block)\n env[:request_method] = \"DELETE\"\n mock_request(path, params, env, &block)\n end",
"def delete_route(resource_name)\n desc \"Deletes an existing #{resource_name}\"\n params do\n requires :query_parameter_id, type: String, desc: \"The id of the #{resource_name}\"\n end\n delete ':query_parameter_id' do\n delete_instance(find_instance(params[:query_parameter_id]))\n body false\n end\n end",
"def fire_delete(url_or_path, options = {}, &block)\n params = options.fetch(:params, {})\n params.merge!(mapped_params(options))\n\n url = absolute_url(url_or_path, params: params)\n headers = {:Accept => MEDIA_TYPE_JSON}.merge(options.fetch(:headers, {}))\n headers = merge_log_weasel_header(headers)\n timeout = options.fetch(:timeout, Ladon.default_request_timeout)\n response = Typhoeus::Request.delete(url, headers: headers, timeout: timeout)\n handle_response(response, method: :delete, default_data: options[:default_data], url: url,\n raise_on_error: options[:raise_on_error], &block)\n end",
"def delete(klass_or_object, path_params_or_url = {})\n if klass_or_object.respond_to?(:attributes_for_api) # klass_or_object is an object\n klass = klass_or_object.class\n if path_params_or_url.kind_of?(String)\n url = url_for(klass, path_params_or_url)\n else\n params = default_path_parameters.merge(klass_or_object.attributes_for_api)\n url = url_for(klass, params)\n end\n else # klass_or_object is a class\n klass = klass_or_object\n #logger.debug \"Calling delete with class #{klass.name} and params: #{path_params.inspect}\"\n if path_params_or_url.kind_of?(String)\n url = url_for(klass, path_params_or_url)\n else\n params = default_path_parameters.merge(path_params_or_url)\n url = url_for(klass, params)\n end\n end\n\n #logger.info \"DELETE #{url}\"\n response = client.delete(url, request_headers) do |response, request, result, &block|\n @last_response = response\n response.return!(request, result, &block)\n end\n end",
"def delete(packet, opts = {})\n no_reroute = opts.delete(:no_reroute)\n if packet.request.delete? || !opts.blank?\n id = opts.delete(:resource_id) || packet['route.resource_id']\n m = model_class.get(packet['route.resource_id'])\n before = beforeDelete(packet, m, opts)\n onDelete(packet, m, opts) if m && before\n afterDelete(packet, m, opts) if before\n orange.fire(:model_resource_deleted, packet, {:resource_id => id, :resource => @my_orange_name})\n end\n packet.reroute(@my_orange_name, :orange) unless (packet.request.xhr? || no_reroute)\n end",
"def delete(action, parameters = nil)\n @request.env['REQUEST_METHOD'] = 'DELETE'\n process action, parameters\n end",
"def delete endpoint\n do_request :delete, endpoint\n end",
"def delete(packet, opts = {})\n no_reroute = opts.delete(:no_reroute)\n if packet.request.delete? || !opts.blank?\n id = opts.delete(:resource_id) || packet['route.resource_id']\n m = model_class.get(packet['route.resource_id'])\n before = beforeDelete(packet, m, opts)\n onDelete(packet, m, opts) if m && before\n afterDelete(packet, m, opts) if before\n end\n packet.reroute(@my_orange_name, :orange) unless (packet.request.xhr? || no_reroute)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Generic TeleSign REST API request handler. +method_function+ The net/http request to perform the request. +method_name+ The HTTP method name, as an upper case string. +resource+ The partial resource URI to perform the request against, as a string. +params+ Body params to perform the HTTP request with, as a hash. | def execute(method_function, method_name, resource, **params)
resource_uri = URI.parse("#{@rest_endpoint}#{resource}")
encoded_fields = ''
if %w[POST PUT].include? method_name
request = method_function.new(resource_uri.request_uri)
if content_type == "application/x-www-form-urlencoded"
unless params.empty?
encoded_fields = URI.encode_www_form(params, Encoding::UTF_8)
request.set_form_data(params)
end
else
encoded_fields = params.to_json
request.body = encoded_fields
request.set_content_type("application/json")
end
else
resource_uri.query = URI.encode_www_form(params, Encoding::UTF_8)
request = method_function.new(resource_uri.request_uri)
end
headers = RestClient.generate_telesign_headers(@customer_id,
@api_key,
method_name,
resource,
content_type,
encoded_fields,
user_agent: @@user_agent)
headers.each do |k, v|
request[k] = v
end
http_response = @http.request(resource_uri, request)
Response.new(http_response)
end | [
"def method_missing(method_name, *arguments, &block)\n if method_name.to_s =~ /(post|get|put|patch|delete)/\n request($1.to_sym, *arguments, &block)\n else\n super\n end\n end",
"def custom_method(resource, target, name, http_verb, http_path); end",
"def rest(connection, endpoint, http_method, query_params={}, data=nil, download_collector=false)\n # Sanity Check on Endpoint\n endpoint.prepend('/') unless endpoint.start_with?'/'\n\n http_method = http_method.upcase\n\n # Build URI and add query Parameters\n uri = URI.parse(\"https://#{resource[:account]}.logicmonitor.com/santaba/rest#{endpoint}\")\n\n # For PATCH requests, we want to use opType replace so that device/device group \n # properties set outside of the module don't get deleted\n if http_method == HTTP_PATCH\n query_params['opType'] = 'replace'\n end\n\n # URL Encode Query Parameters\n uri.query = URI.encode_www_form query_params unless nil_or_empty?(query_params)\n\n # Build Request Object\n request = nil\n if http_method == HTTP_POST\n raise ArgumentError, 'Invalid data for HTTP POST request' if nil_or_empty? data\n request = Net::HTTP::Post.new uri.request_uri, {'Content-Type' => 'application/json'}\n request.body = data\n elsif http_method == HTTP_PUT\n raise ArgumentError, 'Invalid data for HTTP PUT request' if nil_or_empty? data\n request = Net::HTTP::Put.new uri.request_uri, {'Content-Type' => 'application/json'}\n request.body = data\n elsif http_method == HTTP_PATCH\n raise ArgumentError, 'Invalid data for HTTP PATCH request' if nil_or_empty? data\n request = Net::HTTP::Patch.new uri.request_uri, {'Content-Type' => 'application/json'}\n request.body = data\n elsif http_method == HTTP_GET\n request = Net::HTTP::Get.new uri.request_uri, {'Accept' => 'application/json'}\n elsif http_method == HTTP_DELETE\n request = Net::HTTP::Delete.new uri.request_uri, {'Accept' => 'application/json'}\n else\n debug(\"Error: Invalid HTTP Method: #{http_method}\")\n end\n\n # Add Authentication Information to Request\n request['Authorization'] = generate_token(endpoint, http_method, data)\n\n # Execute Request and Return Response\n if connection.nil?\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.start\n else\n http = connection\n end\n\n rate_limited = false\n begin\n response = http.request(request)\n\n if response.code == '429'\n rate_limited = true\n debug \"Error: Request Rate Limited, sleep 1 second, retry\"\n sleep 1\n raise 'Rate Limited'\n end \n\n unless response.kind_of? Net::HTTPSuccess\n alert \"Request failed: endpoint: #{endpoint}, method: #{http_method}, data: #{data}, query_param: #{uri.query}, response code: #{response.code}, response body: #{response.body}\"\n raise \"Request Failed\"\n end\n rescue Exception => e\n if rate_limited\n retry\n end\n alert \"Request Failed. http_method: #{http_method}, scheme: #{uri.scheme}, host: #{uri.host}, path: #{uri.path}, query: #{uri.query}, data: #{data}\"\n raise e\n end\n\n download_collector ? response.body : JSON.parse(response.body)\n end",
"def http_request_for_method(method_name, method_url, request_body = nil)\n content_type_header = {\"Content-Type\" => \"application/json\"}\n\n # This is a workaround for a potential problem that arises from mis-using the\n # API. If you call SoftLayer_Virtual_Guest and you call the getObject method\n # but pass a virtual guest as a parameter, what happens is the getObject method\n # is called through an HTTP POST verb and the API creates a new CCI that is a copy\n # of the one you passed in.\n #\n # The counter-intuitive creation of a new CCI is unexpected and, even worse,\n # is something you can be billed for. To prevent that, we ignore the request\n # body on a \"getObject\" call and print out a warning.\n if (method_name == :getObject) && (nil != request_body) then\n $stderr.puts \"Warning - The getObject method takes no parameters. The parameters you have provided will be ignored.\"\n request_body = nil\n end\n\n if request_body && !request_body.empty?\n url_request = Net::HTTP::Post.new(method_url.request_uri(), content_type_header)\n else\n \turl_request = Net::HTTP::Get.new(method_url.request_uri())\n end\n\n # This warning should be obsolete as we should be using POST if the user\n \t # has provided parameters. I'm going to leave it in, however, on the off\n \t # chance that it catches a case we aren't expecting.\n if request_body && !url_request.request_body_permitted?\n $stderr.puts(\"Warning - The HTTP request for #{method_name} does not allow arguments to be passed to the server\")\n else\n # Otherwise, add the arguments as the body of the request\n url_request.body = request_body\n end\n\n \t url_request\n end",
"def request(method, scenario, path = '', params = {})\n options = { accept: :json, content_type: :json }\n\n if method == :get\n options[:params] = params\n params = nil\n else\n params = params.to_json\n end\n\n JSON.parse(RestClient.public_send(\n method,\n \"#{base_url(scenario)}#{path}\",\n *[params, options].compact\n ))\n end",
"def method_missing(method, *args)\n action = method.to_s.dasherize\n params = args.first\n\n request(action, params)\n end",
"def call resource, id='', options=nil, method=:get\n raise NotAllowedException.new(\"#{method.inspect} is not allowed. Use one of [:#{ALLOWED_METHODS*', :'}]\") unless ALLOWED_METHODS.include?(method)\n\n if id.kind_of? Hash\n options = id\n id = ''\n end\n\n #no_accept = options.delete :no_accept\n @http_adapter.__send__(method) { |req|\n if method == :get && options && !options.empty?\n req.url \"/api/#{resource}/#{id}\", options\n else\n req.url \"/api/#{resource}/#{id}\"\n end\n req.headers['X-BillomatApiKey'] = @api_key\n req.headers['Accept'] = 'application/json'\n req.headers['Content-Type'] = 'application/json' if [:post, :put].include?(method)\n req.body = options if method != :get && options && !options.empty?\n }.body\n rescue Faraday::Error::ClientError => error\n if error.response && error.response.has_key?(:body) && error.response[:body]\n body = error.response[:body]\n if !body.kind_of?(Hash) && ['{', '[', '\"'].include?(body[0])\n body = JSON.parse body\n end\n Hashie::Mash.new error: error, body: body\n else\n Hashie::Mash.new error: error\n end\n end",
"def request(method, *args)\n defaults = {\n :api_key => @api_key,\n :auth_key => @auth\n }\n\n # Get parameters\n if args.last.kind_of? Hash\n params = args.pop\n else\n params = {}\n end\n\n # Construct query fragment\n query = defaults.merge(params).inject('?') { |s, (k, v)|\n if v.kind_of? Array\n v = v.join(',')\n end\n s << \"#{Vodpod::escape(k)}=#{Vodpod::escape(v)}&\"\n }[0..-2]\n\n # Join path fragments\n path = Vodpod::BASE_URI + args.map{|e| Vodpod::escape(e)}.join('/') + '.json'\n\n begin\n # Get URI\n case method\n when :get\n # GET request\n uri = URI.parse(path + query)\n res = Net::HTTP.start(uri.host, uri.port) do |http|\n http.open_timeout = @timeout\n http.read_timeout = @timeout\n http.get(uri.path + query)\n end\n when :post\n # POST request\n uri = URI.parse(path)\n res = Net::HTTP.start(uri.host, uri.port) do |http|\n http.open_timeout = @timeout\n http.read_timeout = @timeout\n http.post(uri.path, query[1..-1])\n end\n else\n # Don't know how to do that kind of request\n raise Error.new(\"Unsupported request method #{method.inspect}; should be one of :get, :post.\")\n end\n rescue => e\n raise Error.new(\"Error retrieving #{uri.path}#{query}: #{e.message}\")\n end\n\n # Parse response as JSON\n begin\n data = JSON.parse res.body\n rescue => e\n raise Error, \"server returned invalid json: #{e.message}\" + \"\\n\\n\" + res\n end\n\n # Check for errors\n if data[0] == false\n raise Error, data[1]['message']\n end\n\n # Return data section\n data[1]\n end",
"def resource(resource, action, data = { :id => nil, :params => nil, :data => nil })\n case action\n when \"get\", \"update\", \"delete\"\n endpoint = resource + \"/\" + data[:id]\n when \"insert\", \"list\"\n endpoint = resource\n else\n return { :error => true }\n end\n\n # Check if Shoper Token is still valid\n self.get_token if Time.now > @configuration[:shoper][:token_expires]\n\n r = RestClient::Resource.new(@configuration[:shoper][:api] + endpoint,\n :headers => {\n :authorization => \"Bearer \" +\n @configuration[:shoper][:token],\n })\n\n case action\n when \"get\"\n res = r.get :params => data[:params]\n when \"update\"\n res = r.put data[:data].to_json, :content_type => :json\n when \"delete\"\n res = r.delete\n when \"insert\"\n res = r.post data[:data].to_json, :content_type => :json\n when \"list\"\n res = r.get\n else\n return { :error => true }\n end\n end",
"def method_missing(method, *args)\n @uri.path = \"/#{args[0]}.#{@format.extension}\" # Should be the form of /resources\n if verb = method.to_s.match(/^http_(get|post|put|delete|head)$/)\n run_verb(verb.to_s.split(\"_\").last, args[1])\n end\n end",
"def create_request(method, uri, data = nil)\r\n method = method.upcase\r\n if(method == 'GET')\r\n return Net::HTTP::Get.new(uri)\r\n elsif(method == 'POST')\r\n request = Net::HTTP::Post.new(uri)\r\n request.body = data.to_json\r\n return request\r\n elsif(method == 'PUT')\r\n request = Net::HTTP::Put.new(uri)\r\n request.body = data.to_json\r\n return request\r\n elsif(method == 'DELETE')\r\n return Net::HTTP::Delete.new(uri)\r\n else\r\n raise CLXException, 'Unknown HTTP method'\r\n end\r\n end",
"def api_request method, params = nil\n\t\t\tconnection = ZenfolioAPI::HTTP.new()\n\t\t\t@response = connection.POST(method, params, @auth.token)\n\t\tend",
"def create_request(endpoint, resource, data_fmt = :json_data_from)\n req = Net::HTTP::Post.new(endpoint.path)\n req[\"Content-Type\"] = \"application/vnd.api+json\"\n req[\"Authorization\"] = \"Bearer #{@api_key}\"\n req[\"User-Agent\"] = \"imgix #{@library}-#{@version}\"\n\n if data_fmt.is_a?(Proc)\n req.body = data_fmt.call(resource)\n elsif data_fmt.is_a?(Symbol)\n req.body = send(data_fmt, resource)\n else\n fmt_arg_error = \"`fmt' is required to be of class Symbol or \" \\\n \"Proc but was found to be\\n\\s\\sof class #{data_fmt.class}\\n\"\n raise ArgumentError, fmt_arg_error\n end\n\n req\n end",
"def request(*)\n raise 'HttpApiBuilder::BaseClient#request must be implemented, see documentation'\n end",
"def make_request(api_path, method, data = nil)\n uri = URI.parse(@endpoint + api_path)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n headers = {\n 'Authorization' => 'Token ' + @auth_token,\n 'User-Agent' => 'Fomo/Ruby/' + @version\n }\n case method\n when 'GET'\n request = Net::HTTP::Get.new(api_path, initheader=headers)\n response = http.request(request)\n return response.body\n when 'POST'\n headers['Content-Type'] = 'application/json'\n request = Net::HTTP::Post.new(api_path, initheader=headers)\n request.body = data.to_json\n response = http.request(request)\n return response.body\n when 'PATCH'\n headers['Content-Type'] = 'application/json'\n request = Net::HTTP::Patch.new(api_path, initheader=headers)\n request.body = data.to_json\n response = http.request(request)\n return response.body\n when 'DELETE'\n request = Net::HTTP::Delete.new(api_path, initheader=headers)\n response = http.request(request)\n return response.body\n else\n puts('Unknown method')\n end\n end",
"def api_execute(path, method, options = {})\n params = options[:params]\n case method\n when :get\n req = build_http_request(Net::HTTP::Get, path, params)\n when :post\n req = build_http_request(Net::HTTP::Post, path, nil, params)\n when :put\n req = build_http_request(Net::HTTP::Put, path, nil, params)\n when :delete\n req = build_http_request(Net::HTTP::Delete, path, params)\n else\n fail \"Unknown http action: #{method}\"\n end\n http = Net::HTTP.new(host, port)\n http.read_timeout = options[:timeout] || read_timeout\n setup_https(http)\n req.basic_auth(user_name, password) if [user_name, password].all?\n Log.debug(\"Invoking: '#{req.class}' against '#{path}\")\n res = http.request(req)\n Log.debug(\"Response code: #{res.code}\")\n Log.debug(\"Response body: #{res.body}\")\n process_http_request(res)\n end",
"def request(method, path, params)\n response = connection.send(method) do |request|\n case method.to_sym\n when :delete, :get\n request.url(path, params)\n when :post, :put\n request.path = path\n request.body = params unless params.empty?\n end\n end\n\n response.body\n end",
"def create_http_request(http_method,path,*arguments)\n http_method=http_method.to_sym\n if [:post,:put].include?(http_method)\n data=arguments.shift\n end\n headers=(arguments.first.is_a?(Hash) ? arguments.shift : {})\n case http_method\n when :post\n request=Net::HTTP::Post.new(path,headers)\n request[\"Content-Length\"]=0 # Default to 0\n when :put\n request=Net::HTTP::Put.new(path,headers)\n request[\"Content-Length\"]=0 # Default to 0\n when :get\n request=Net::HTTP::Get.new(path,headers)\n when :delete\n request=Net::HTTP::Delete.new(path,headers)\n when :head\n request=Net::HTTP::Head.new(path,headers)\n else\n raise ArgumentError, \"Don't know how to handle http_method: :#{http_method.to_s}\"\n end\n if data.is_a?(Hash)\n request.set_form_data(data)\n elsif data\n request.body=data.to_s\n request[\"Content-Length\"]=request.body.length\n end\n request\n end",
"def method_missing(method_name, params)\n send_request(\"/#{method_name}\", map_params(params))\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /thresholds GET /thresholds.json | def index
@thresholds = Threshold.all
end | [
"def query_distance_thresholds(options={}) path = \"/api/v2/distancethresholds\"\n get(path, options, AvaTax::VERSION) end",
"def collect_thresholds_for(threshold)\n active_thresholds = Array.new()\n threshold.indicators.each_value { |value| \n active_thresholds << value if(value.is_active)\n }\n return active_thresholds\n end",
"def show\n @threshold = Threshold.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @threshold }\n end\n end",
"def list_distance_thresholds(companyId, options={}) path = \"/api/v2/companies/#{companyId}/distancethresholds\"\n get(path, options, AvaTax::VERSION) end",
"def create\n @threshold = Threshold.new(threshold_params)\n\n respond_to do |format|\n if @threshold.save\n format.html { redirect_to thresholds_path, notice: 'Threshold was successfully created.' }\n format.json { render :show, status: :created, location: @threshold }\n else\n format.html { render :new }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def collect_active_thresholds\n @warnings = Hash.new()\n @warnings[:air_temperature] = collect_thresholds_for(@temperature_threshold)\n @warnings[:wind_speed] = collect_thresholds_for(@wind_threshold)\n @warnings[:rain] = collect_thresholds_for(@rain_threshold)\n nil\n end",
"def change_thresholds *thresholds\n unless thresholds.first.is_a? Hash\n return change_thresholds LEVELS[0, thresholds.size].zip(thresholds).to_h\n end\n thresholds.first.each do |k, new_threshold|\n next unless (badge = @badge[k])\n @badge.delete badge.threshold\n badge.threshold = new_threshold\n @badge[new_threshold] = badge\n end\n end",
"def exceeded_thresholds\n session[:merb_threshold_exceeded_thresholds] ||= []\n session[:merb_threshold_exceeded_thresholds] \n end",
"def get_threshold_params(threshold)\n case threshold\n when 'success'\n name = 'SUCCESS'\n ordinal = 0\n color = 'BLUE'\n when 'unstable'\n name = 'UNSTABLE'\n ordinal = 1\n color = 'YELLOW'\n when 'failure'\n name = 'FAILURE'\n ordinal = 2\n color = 'RED'\n end\n return name, ordinal, color\n end",
"def get_threshold\n d = self.vspecs.select(:value).where(name: 'threshold').first\n return d != nil ? d.value : d\n end",
"def warning_status_threshold\n threshold = TriageThreshold.for_group_or_defaults( self.is_halouser_of_what.first ).select {|e| e.status.downcase == \"warning\" }\n threshold = TriageThreshold.new( :status => \"warning\", :attribute_warning_hours => 48, :approval_warning_hours => 4) if threshold.blank? # default status, if one not found in definitions\n end",
"def create\n @threshold = Threshold.new(params[:threshold])\n\n respond_to do |format|\n if @threshold.save\n format.html { redirect_to @threshold, notice: 'Threshold was successfully created.' }\n format.json { render json: @threshold, status: :created, location: @threshold }\n else\n format.html { render action: \"new\" }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def threshold\n @threshold ||= Reputation::Threshold.lookup(self.action, self.sender, :tier => self.tier)\n end",
"def update\n respond_to do |format|\n if @threshold.update(threshold_params)\n format.html { redirect_to thresholds_path, notice: 'Threshold was successfully updated.' }\n format.json { render :show, status: :ok, location: @threshold }\n else\n format.html { render :edit }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @threshold = Threshold.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @threshold }\n end\n end",
"def limits\n login\n\n uri = @services['compute'] + 'limits'\n\n body = request Net::HTTP::Get, uri\n\n limits = body['limits']\n\n @rate_limits = limits['rate']\n @absolute_limits = limits['absolute']\n\n limits\n end",
"def get_object_temperature_callback_threshold\n send_request(FUNCTION_GET_OBJECT_TEMPERATURE_CALLBACK_THRESHOLD, [], '', 5, 'k s s')\n end",
"def get_voltage_callback_threshold\n send_request(FUNCTION_GET_VOLTAGE_CALLBACK_THRESHOLD, [], '', 9, 'k l l')\n end",
"def threshold\n @threshold\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /thresholds POST /thresholds.json | def create
@threshold = Threshold.new(threshold_params)
respond_to do |format|
if @threshold.save
format.html { redirect_to thresholds_path, notice: 'Threshold was successfully created.' }
format.json { render :show, status: :created, location: @threshold }
else
format.html { render :new }
format.json { render json: @threshold.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @threshold = Threshold.new(params[:threshold])\n\n respond_to do |format|\n if @threshold.save\n format.html { redirect_to @threshold, notice: 'Threshold was successfully created.' }\n format.json { render json: @threshold, status: :created, location: @threshold }\n else\n format.html { render action: \"new\" }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def assign_thresholds params\n\t\texisting = DiagnosticTestThreshold.where(:diagnostic_test_id => self.id)\n\t\texisting = existing.collect{|x| x.id}\n\t\tparams.keys.each do |th_id|\n\t\t\tif th_id.to_i < 0\n\t\t\t\tDiagnosticTestThreshold.create(:diagnostic_test_id=>self.id, :threshold=>params[th_id])\n\t\t\telse\n\t\t\t\texisting.delete(th_id.to_i)\n\t\t\t\tbegin\n\t\t\t\t\tdtt = DiagnosticTestThreshold.find(th_id)\n\t\t\t\t\tdtt.threshold = params[th_id]\n\t\t\t\t\tdtt.save\n\t\t\t\trescue Exception => e\n\t\t\t\t\tputs \"Caught Exception: #{e.message}\\n\\n\"\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\t# remove any of the previously existing thresholds that still remain\n\t\texisting.each do |e|\n\t\t\tDiagnosticTestThreshold.destroy(e)\n\t\tend\n\tend",
"def index\n @thresholds = Threshold.all\n end",
"def change_thresholds *thresholds\n unless thresholds.first.is_a? Hash\n return change_thresholds LEVELS[0, thresholds.size].zip(thresholds).to_h\n end\n thresholds.first.each do |k, new_threshold|\n next unless (badge = @badge[k])\n @badge.delete badge.threshold\n badge.threshold = new_threshold\n @badge[new_threshold] = badge\n end\n end",
"def create\n @triage_threshold = TriageThreshold.new(params[:triage_threshold])\n @groups = current_user.groups_where_admin\n\n respond_to do |format|\n if @triage_threshold.save\n flash[:notice] = 'TriageThreshold was successfully created.'\n format.html { redirect_to :controller => 'triage_thresholds', :action => 'show', :id => @triage_threshold }\n format.xml { render :xml => @triage_threshold, :status => :created, :location => @triage_threshold }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @triage_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @threshold.update(threshold_params)\n format.html { redirect_to thresholds_path, notice: 'Threshold was successfully updated.' }\n format.json { render :show, status: :ok, location: @threshold }\n else\n format.html { render :edit }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @threshold = Threshold.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @threshold }\n end\n end",
"def threshold=(value)\n @threshold = value\n end",
"def update\n @threshold = Threshold.find(params[:id])\n\n respond_to do |format|\n if @threshold.update_attributes(params[:threshold])\n format.html { redirect_to @threshold, notice: 'Threshold was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def warning_status_threshold\n threshold = TriageThreshold.for_group_or_defaults( self.is_halouser_of_what.first ).select {|e| e.status.downcase == \"warning\" }\n threshold = TriageThreshold.new( :status => \"warning\", :attribute_warning_hours => 48, :approval_warning_hours => 4) if threshold.blank? # default status, if one not found in definitions\n end",
"def create\n @prd_threshold = PrdThreshold.new(params[:prd_threshold])\n\n respond_to do |format|\n if @prd_threshold.save\n format.html { redirect_to(@prd_threshold, :notice => 'PrdThreshold was successfully created.') }\n format.xml { render :xml => @prd_threshold, :status => :created, :location => @prd_threshold }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @prd_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def reputation_threshold_action\n raise \"Reputation threshold needs to be defined in reputation_threshold_action method\"\n end",
"def exceeded_thresholds\n session[:merb_threshold_exceeded_thresholds] ||= []\n session[:merb_threshold_exceeded_thresholds] \n end",
"def get_threshold_params(threshold)\n case threshold\n when 'success'\n name = 'SUCCESS'\n ordinal = 0\n color = 'BLUE'\n when 'unstable'\n name = 'UNSTABLE'\n ordinal = 1\n color = 'YELLOW'\n when 'failure'\n name = 'FAILURE'\n ordinal = 2\n color = 'RED'\n end\n return name, ordinal, color\n end",
"def threshold\n @threshold ||= Reputation::Threshold.lookup(self.action, self.sender, :tier => self.tier)\n end",
"def collect_thresholds_for(threshold)\n active_thresholds = Array.new()\n threshold.indicators.each_value { |value| \n active_thresholds << value if(value.is_active)\n }\n return active_thresholds\n end",
"def validate_sensu_keepalives_thresholds(sensu)\n thresholds = sensu[:keepalives][:thresholds]\n must_be_a_hash_if_set(thresholds) ||\n invalid(sensu, \"sensu keepalives thresholds must be a hash\")\n if is_a_hash?(thresholds)\n must_be_an_integer_if_set(thresholds[:warning]) ||\n invalid(sensu, \"sensu keepalives warning threshold must be an integer\")\n must_be_an_integer_if_set(thresholds[:critical]) ||\n invalid(sensu, \"sensu keepalives critical threshold must be an integer\")\n end\n end",
"def collect_active_thresholds\n @warnings = Hash.new()\n @warnings[:air_temperature] = collect_thresholds_for(@temperature_threshold)\n @warnings[:wind_speed] = collect_thresholds_for(@wind_threshold)\n @warnings[:rain] = collect_thresholds_for(@rain_threshold)\n nil\n end",
"def set_Threshold(value)\n set_input(\"Threshold\", value)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /thresholds/1 PATCH/PUT /thresholds/1.json | def update
respond_to do |format|
if @threshold.update(threshold_params)
format.html { redirect_to thresholds_path, notice: 'Threshold was successfully updated.' }
format.json { render :show, status: :ok, location: @threshold }
else
format.html { render :edit }
format.json { render json: @threshold.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @threshold = Threshold.find(params[:id])\n\n respond_to do |format|\n if @threshold.update_attributes(params[:threshold])\n format.html { redirect_to @threshold, notice: 'Threshold was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @threshold.errors, status: :unprocessable_entity }\n end\n end\n end",
"def change_thresholds *thresholds\n unless thresholds.first.is_a? Hash\n return change_thresholds LEVELS[0, thresholds.size].zip(thresholds).to_h\n end\n thresholds.first.each do |k, new_threshold|\n next unless (badge = @badge[k])\n @badge.delete badge.threshold\n badge.threshold = new_threshold\n @badge[new_threshold] = badge\n end\n end",
"def update\n @triage_threshold = TriageThreshold.find(params[:id].to_i)\n @groups = current_user.groups_where_admin\n\n respond_to do |format|\n if @triage_threshold.update_attributes(params[:triage_threshold])\n flash[:notice] = 'TriageThreshold was successfully updated.'\n format.html { redirect_to :controller => 'triage_thresholds', :action => 'show', :id => @triage_threshold }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @triage_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\r\n respond_to do |format|\r\n if @measurement_threshold.update_attributes(params[resource_name.to_sym])\r\n format.html { redirect_to([patient,@measurement_threshold], :notice => I18n.t(\"notifications.measurement_thresholds.update_success\")) }\r\n format.xml { head :ok }\r\n format.js { render :json => @measurement_threshold}\r\n else\r\n format.html { render :edit }\r\n format.xml { render :xml => [patient,@measurement_threshold.errors], :status => :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n respond_to do |format|\n if @flag_setting.update(flag_setting_params)\n\n if %w[min_accuracy min_post_count].include? @flag_setting.name\n # If an accuracy/post count requirement is changed,\n # we want to re-validate all existing FlagConditions\n # and disable them if they aren't in compliance with the\n # new settings\n RevalidateFlagConditionsJob.perform_later\n end\n\n format.html { redirect_to flag_settings_path, notice: 'Flag setting was successfully updated.' }\n format.json { render :show, status: :ok, location: @flag_setting }\n else\n format.html { render :edit, status: 422 }\n format.json { render json: @flag_setting.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(options={})\n data = Hash.new\n data[:type] = options[:type].upcase if options[:type]\n data[:delay] = options[:delay] if options[:delay]\n data[:timeout] = options[:timeout] if options[:timeout]\n data['attemptsBeforeDeactivation'] = options[:attempts_before_deactivation] if options[:attempts_before_deactivation]\n data[:type].upcase! if data[:type]\n if ['HTTP','HTTPS'].include?(data[:type])\n data[:path] = options[:path] if options[:path]\n data['statusRegex'] = options[:status_regex] if options[:status_regex]\n data['bodyRegex'] = options[:body_regex] if options[:body_regex]\n end\n response = @connection.lbreq(\"PUT\",@lbmgmthost,\"#{@lbmgmtpath}/loadbalancers/#{CloudLB.escape(@load_balancer.id.to_s)}/healthmonitor\",@lbmgmtport,@lbmgmtscheme,{},data.to_json)\n CloudLB::Exception.raise_exception(response) unless response.code.to_s.match(/^20.$/)\n populate\n true\n end",
"def update\n @prd_threshold = PrdThreshold.find(params[:id])\n\n respond_to do |format|\n if @prd_threshold.update_attributes(params[:prd_threshold])\n format.html { redirect_to(@prd_threshold, :notice => 'PrdThreshold was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @prd_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_distance_threshold(companyId, id, model) path = \"/api/v2/companies/#{companyId}/distancethresholds/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def update(options={})\n data = Hash.new\n data['maxConnections'] = options[:max_connections] if options[:max_connections]\n data['minConnections'] = options[:min_connections] if options[:min_connections]\n data['maxConnectionRate'] = options[:max_connection_rate] if options[:max_connection_rate]\n data['rateInterval'] = options[:rate_interval] if options[:rate_interval]\n \n response = @connection.lbreq(\"PUT\",@lbmgmthost,\"#{@lbmgmtpath}/loadbalancers/#{CloudLB.escape(@load_balancer.id.to_s)}/connectionthrottle\",@lbmgmtport,@lbmgmtscheme,{},data.to_json)\n CloudLB::Exception.raise_exception(response) unless response.code.to_s.match(/^20.$/)\n populate\n true\n end",
"def update\n @deposit_threshold = DepositThreshold.find(params[:id])\n\n respond_to do |format|\n if @deposit_threshold.update_attributes(params[:deposit_threshold])\n format.html { redirect_to(@deposit_threshold, :notice => 'DepositThreshold was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @deposit_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @gauge = Gauge.find(params[:id])\n\n respond_to do |format|\n if @gauge.update_attributes(params[:gauge])\n format.html { redirect_to @gauge, notice: 'Gauge was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @gauge.errors, status: :unprocessable_entity }\n end\n end\n end",
"def assign_thresholds params\n\t\texisting = DiagnosticTestThreshold.where(:diagnostic_test_id => self.id)\n\t\texisting = existing.collect{|x| x.id}\n\t\tparams.keys.each do |th_id|\n\t\t\tif th_id.to_i < 0\n\t\t\t\tDiagnosticTestThreshold.create(:diagnostic_test_id=>self.id, :threshold=>params[th_id])\n\t\t\telse\n\t\t\t\texisting.delete(th_id.to_i)\n\t\t\t\tbegin\n\t\t\t\t\tdtt = DiagnosticTestThreshold.find(th_id)\n\t\t\t\t\tdtt.threshold = params[th_id]\n\t\t\t\t\tdtt.save\n\t\t\t\trescue Exception => e\n\t\t\t\t\tputs \"Caught Exception: #{e.message}\\n\\n\"\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\t# remove any of the previously existing thresholds that still remain\n\t\texisting.each do |e|\n\t\t\tDiagnosticTestThreshold.destroy(e)\n\t\tend\n\tend",
"def update\n respond_to do |format|\n if @metric_ping.update(metric_ping_params)\n format.html { redirect_to @metric_ping, notice: 'Metric ping was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @metric_ping.errors, status: :unprocessable_entity }\n end\n end\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def update\n respond_to do |format|\n if @severity_level.update(severity_level_params)\n format.html do\n redirect_to @severity_level,\n notice: 'Severity level was successfully updated.'\n end\n format.json { render :show, status: :ok, location: @severity_level }\n else\n format.html { render :edit }\n format.json { render json: @severity_level.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n metric.update_attributes(update_params)\n\n respond_with(metric)\n end",
"def update \n if @tuning.update(tuning_params)\n render json: @tuning\n else\n render json: { error: \"Failed to update tuning\"}, status: :unprocessable_entity\n end\n end",
"def patch(path, params: {}, headers: {})\n request_json :patch, path, params, headers\n end",
"def update\n @position_threshold = PositionThreshold.find(params[:id])\n\n respond_to do |format|\n if @position_threshold.update_attributes(params[:position_threshold])\n flash[:notice] = 'PositionThreshold was successfully updated.'\n format.html { redirect_to(@position_threshold) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @position_threshold.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /thresholds/1 DELETE /thresholds/1.json | def destroy
@threshold.destroy
respond_to do |format|
format.html { redirect_to thresholds_url, notice: 'Threshold was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @threshold = Threshold.find(params[:id])\n @threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to thresholds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @measurement_threshold.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to(measurement_thresholds_url) }\r\n format.xml { head :ok }\r\n end\r\n end",
"def destroy\n @triage_threshold = TriageThreshold.find(params[:id].to_i)\n @triage_threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to :controller => 'triage_thresholds', :action => 'show', :id => @triage_threshold }\n format.xml { head :ok }\n end\n end",
"def destroy\n @prd_threshold = PrdThreshold.find(params[:id])\n @prd_threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to(prd_thresholds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @deposit_threshold = DepositThreshold.find(params[:id])\n @deposit_threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to(deposit_thresholds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @configattribincl.destroy\n respond_to do |format|\n format.html { redirect_to configattribs_path, notice: 'Configattribincl Threshold is reset to default.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @throughput_metric.destroy\n respond_to do |format|\n format.html { redirect_to throughput_metrics_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @dbs_deposit_threshold = DbsDepositThreshold.find(params[:id])\n @dbs_deposit_threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to(dbs_deposit_thresholds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @health_check = HealthCheck.find(params[:id])\n @health_check.destroy\n\n respond_to do |format|\n format.html { redirect_to health_checks_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position_threshold = PositionThreshold.find(params[:id])\n @position_threshold.destroy\n\n respond_to do |format|\n format.html { redirect_to(position_thresholds_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @metric_config.destroy\n respond_to do |format|\n format.html { redirect_to metric_configs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @loadbalancer = Loadbalancer.find(params[:id])\n checkaccountobject(\"loadbalancers\",@loadbalancer)\n @loadbalancer.send_delete\n\n respond_to do |format|\n format.html { redirect_to loadbalancers_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @health_level.destroy\n\n respond_to do |format|\n format.html { redirect_to health_levels_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gauge = Gauge.find(params[:id])\n @gauge.destroy\n\n respond_to do |format|\n format.html { redirect_to gauges_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @metric_http.destroy\n respond_to do |format|\n format.html { redirect_to metric_https_url }\n format.json { head :no_content }\n end\n end",
"def test_delete_not_exist_metric\n not_exist_id = '10000'\n output = `curl -X DELETE http://localhost:8080/metrics/metrics/#{not_exist_id}`\n assert_match \"<html>\", output, \"TEST 5: delete not existing metric - FAILED\"\n end",
"def destroy\n authorize! :manage, @metric\n\n @metric.destroy\n\n respond_to do |format|\n format.html { redirect_to service_metrics_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @heartbeat = Heartbeat.find(params[:id])\n @heartbeat.destroy\n\n respond_to do |format|\n format.html { redirect_to heartbeats_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @metric = Metric.find(params[:id])\n @metric.destroy\n\n head :no_content\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Method for search the surgery term | def find_surgery_term surgeryterm
# Search the surgery terms
Surgery.where(user_type: "admin").any_of({ :name => /^#{surgeryterm}/i })
end | [
"def search_surgery\n\t\t# Searching for surgery as per user entered term\n\t\tif Surgery.where(user_type: \"admin\").any_of({ :name => /^#{params[:term]}/i }).present?\n\t\t\t# if search show from the admin then the search term from surgery of admin\n\t\t\tsurgeries = Surgery.where(user_type: \"admin\").any_of({ :name => /^#{params[:term]}/i }).all.collect{|surgery| {label: surgery.name}}.uniq.to_json\n\t\telse\n\t\t\t# else the term is not equal to the admin surgery then that search from the user term\n\t\t\tsurgeries = current_user.surgeries.any_of({ :name => /^#{params[:term]}/i }).all.collect{|surgery| {label: surgery.name}}.uniq.to_json \n\t\tend\n\t\t# render to the surgery name page\n\t\trespond_to do |format|\n\t\t format.json { render :json => surgeries }\n\t\tend\n\tend",
"def searchterm\r\n @@st.get_searchterm(referer)\r\n end",
"def search\n @search_text = params[:search_text] || \"\"\n \n escaped_search_text = Riddle.escape(@search_text)\n \n @surveys = Survey.search escaped_search_text,\n :geo => [current_organization.latitude, current_organization.longitude],\n :order => '@weight desc, @geodist asc' # Sort by relevance, then distance\n end",
"def search_terms(_word)\n self.class.search_terms\n end",
"def search_term( search_id )\n param( search_id.sym )\n end",
"def search_keywords\n @developer_id = Developer.find_by_gamer_id(current_gamer.id).id\n @projects = Project.where(owner_id: @developer_id).all\n @project_id = params[:project_id]\n @search_keyword = params[\"search\"]\n if(!@search_keyword.blank?)\n @search_keyword = @search_keyword.strip\n @search_keyword = @search_keyword.split(\" \").join(\" \")\n if Keyword.find_by_name(@search_keyword)\n redirect_to search_path, search: @search_keyword\n end\n @similar_keywords =\n Keyword.get_similar_keywords(@search_keyword, [])\n end\n end",
"def search\n go_to_manage_soc\n on ManageSocPage do |page|\n page.term_code.set @term_code\n page.go_action\n end\n end",
"def search_people(searchterm,params={})\n @opensearch.search_people(searchterm,params) \n end",
"def profile_specialist_search\n\t\t# search the the speciality name according to the terms\n\t\tspecialities = Speciality.any_of({ :name => /^#{params[:term]}/i }).all.collect{|speciality| {label: speciality.name ,value: speciality.id.to_s}}.to_json \n\t\t# render to the surgery name page\n\t\trespond_to do |format|\n\t\t format.json { render :json => specialities }\n\t\tend\n\tend",
"def search(search_term)\n api.search(search_term)\n end",
"def editor_search\n @results = Soc_med.where text: params[:term]\n redirect_to '/dashboard'\n end",
"def search\n return @search\n end",
"def tag_search_term\n params[:tag]\n end",
"def advanced_search\n \n end",
"def spotifysearch\n search\n end",
"def search_terms\n return @search_terms\n end",
"def keyword_query_string\n processed_terms = []\n self.search_terms.each do |search_val|\n # spaces or dashes (-) need to be quoted to be treated as single values\n term = search_val.match?(/[\\s-]/) ? \"\\\"#{search_val}\\\"\" : search_val\n processed_terms << term\n end\n processed_terms.join(' ')\n end",
"def query\n\t\t\t[@search_term,@filter].compact.join(\" \")\n\t\tend",
"def query_param\n return unless search_query\n \"search=#{search_query}&phrase=true\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Renames the Service type from "type" to "service_type" to avoid conflicts | def rename_service_type(hash)
hash["service_type"] = hash["type"]
hash.delete("type")
hash
end | [
"def service_type=(value)\n @service_type = value\n end",
"def service_type_value=(value)\n value = ServiceTypeValue[value] unless value.kind_of?(ServiceTypeValue) \n self.service_type_value_name = value.name \n end",
"def _service_type\n self.class.service_type\n end",
"def shrink_type_name(type)\n name_parts = shrink_type_name_parts(type)\n\n # Isolate the Google common prefix\n name_parts = name_parts.drop(property_ns_prefix.size)\n num_parts = name_parts.flatten.size\n shrunk_names = recurse_shrink_name(name_parts,\n (1.0 * MAX_NAME / num_parts).round)\n type_name = Google::StringUtils.camelize(shrunk_names.flatten.join('_'),\n :upper)\n property_ns_prefix.concat([type_name])\n end",
"def rename_metadata(type, oldFullName, newFullName)\n type = type.to_s.camelize\n request :rename_metadata do |soap|\n soap.body = {\n :type => type,\n :old_full_name => oldFullName, \n :new_full_name => newFullName, \n }\n end\n end",
"def pp_service_type( mt_request_service_type )\n if mt_request_service_type.kind_of? MaintenanceRequest\n mt_request_service_type = mt_request_service_type.service_type\n end\n return icon_service_type(mt_request_service_type) + loc.mt_req_service_type( mt_request_service_type);\n end",
"def service_type\n return @service_type\n end",
"def service_types\n types = self.service_versions.collect{|sv| sv.service_versionified.service_type_name}.uniq\n types << \"Soaplab\" unless self.soaplab_server.nil?\n return types\n end",
"def normalize_type(ingredient_type)\n \"Alchemy::Ingredients::#{ingredient_type.to_s.classify.demodulize}\"\n end",
"def update_service(service_name, new_value)\n self.services[service_name.to_s] = new_value\n self.update_attribute :services, self.services\n end",
"def service_type_name(account_name, name)\n map_opts = get_acct_catalog_map(account_name)\n case map_opts[:provider]\n when \"hp\"\n service_name = name\n unless map_opts[:catalog].nil?\n service_catalog_key = name.to_s.downcase.gsub(' ','_').to_sym\n service_name = map_opts[:catalog][service_catalog_key] if map_opts[:catalog].has_key?(service_catalog_key)\n end\n return service_name\n else\n return name\n end\n end",
"def service_class_for( entity_type )\n entity_map_cache[ entity_type.to_sym ]\n end",
"def change_type(new_type)\n \t@type = new_type\n end",
"def resolve_service_type(service_type, residential)\n if residential && (service_type == ServiceTypes::FEDEX_GROUND)\n ServiceTypes::GROUND_HOME_DELIVERY\n else\n service_type\n end\n end",
"def set_type\n new_type = [sport.name, \"Infrastructure\"].join('')\n\n begin\n new_type.constantize\n self.type = new_type\n rescue NameError\n self.type = nil\n end\n\n end",
"def resolve_service_type(service_type, residential)\n if residential && (service_type == Fedex::ShipConstants::ServiceTypes::FEDEX_GROUND)\n Fedex::ShipConstants::ServiceTypes::GROUND_HOME_DELIVERY\n else\n service_type\n end\n end",
"def process_service_type(element, service_type, service_data)\n # all instances of a given type share the same config data. We have a default type for services that don't have a plugin.\n type = get_service_type(service_type, service_data)\n @logger.debug(\"matched service #{service_type} to plugin #{type}\")\n config = @config[type]\n xml_element = config[XML_STANZA_TYPE]\n if xml_element.nil?\n xml_element = 'none'\n @logger.warn(\"The configuration file for service type #{type} is missing the required server_xml_stanza element\")\n end\n target_array = find_autoconfig_option(service_type)\n @logger.debug(\"processing service instances of type #{type}. Config is #{config}\")\n service_data.each do |instance|\n service_instance = create_instance(element, type, config, instance)\n next if service_instance.nil?\n instance_hash = { INSTANCE => service_instance, CONFIG => config }\n target_array.push(instance_hash)\n if @service_type_instances[xml_element].nil?\n @service_type_instances[xml_element] = 1\n else\n @service_type_instances[xml_element] = @service_type_instances[xml_element] + 1\n end\n end\n end",
"def type=(type)\n type = type.downcase\n @fields['type'] = type if REFERENCE_TYPES.include?(type)\n end",
"def rename_enum(current_name, new_name)\n current_name = EnumKit.sanitize_name!(current_name)\n new_name = EnumKit.sanitize_name!(new_name)\n\n enum_execute \"ALTER TYPE #{current_name} RENAME TO #{new_name}\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Instantiate an instance of +klass+ and pass +output+ | def respond_with(klass, output)
klass.new(self, output)
end | [
"def output_from new_output\n return @output = new_output.new(self) if Class === new_output\n\n klass =\n case new_output.to_s\n when /^(Player::)?benchmark$/i then Benchmark\n when /^(Player::)?stream$/i then Stream\n when /^(Player::)?suite$/i then Suite\n else\n Kronk.find_const new_output\n end\n\n @output = klass.new self if klass\n end",
"def compileclass(output,classname)\n #File.open(output, \"a\") { |f| f.write \"<class>\\n <\" }\n end",
"def create_class\n generated_class = Class.new do\n def speak\n puts \"hello!\"\n end\n end\nend",
"def writer(klass, xparms = nil, &block)\n w = klass.new(@output, @params.merge(xparms || {}))\n block.call(w) if block_given?\n w\n end",
"def visitor(klass, xprops = nil, &block)\n v = klass.new(@output, @properties.merge(xprops || {}), self)\n block.call(v) if block_given?\n v\n end",
"def add_output(type, *args)\n class_name = type.to_s.capitalize\n klass = Drone::Interfaces.const_get(class_name)\n @output_modules << klass.new(*args)\n end",
"def class() end",
"def initialize(output, escape_from = nil)\n @output = output\n @escape_from = escape_from\n end",
"def GetOutputByClass(cls)\n outputsWithClass = GetOutputsByClass(cls)\n if(outputsWithClass.length == 0)\n return nil\n end\n return outputsWithClass[0]\n end",
"def create_output(arg)\n if arg.nil?\n \"\"\n elsif arg.respond_to?(:<<)\n arg\n else\n raise \"Illegal output object: #{arg.inspect}\"\n end\n end",
"def create_output(params = {})\n Bitmovin::Output.create(params)\n end",
"def initialize(output_type=:info)\n @output_type = output_type\n clear\n end",
"def _do_create_ruby_class(klass, arguments)\n klass.new *H8::arguments_to_a(arguments.to_ruby.values)\n end",
"def make_instantiater(name,klass,&ruby_block)\n # puts \"make_instantiater with name=#{name}\"\n # Set the instanciater.\n @instance_procs = [ ruby_block ]\n # Set the target instantiation class.\n @instance_class = klass\n\n # Unnamed types do not have associated access method.\n return if name.empty?\n\n obj = self # For using the right self within the proc\n\n # Create and register the general instantiater.\n High.space_reg(name) do |*args|\n # puts \"Instantiating #{name} with args=#{args.size}\"\n # If no arguments, return the system as is\n return obj if args.empty?\n # Are there any generic arguments?\n if ruby_block.arity > 0 then\n # Yes, must specialize the system with the arguments.\n # If arguments, create a new system specialized with them\n return SystemT.new(:\"\") { include(obj,*args) }\n end\n # It is the case where it is an instantiation\n # Get the names from the arguments.\n i_names = args.shift\n # puts \"i_names=#{i_names}(#{i_names.class})\"\n i_names = [*i_names]\n instance = nil # The current instance\n i_names.each do |i_name|\n # Instantiate.\n instance = obj.instantiate(i_name,*args)\n end\n # # Return the last instance.\n instance\n end\n\n # Create and register the array of instances instantiater.\n ::Array.class_eval do\n define_method(name) { |*args| make(name,*args) }\n end\n end",
"def create_class(classname, superclass); end",
"def new\n @klass = Klass.new\n end",
"def instantiate_object_for(klass, value)\n return value unless klass.is_a?(Class)\n klass.new(value)\n end",
"def new(class_name, *args, &block)\n #We need to check finalizers first, so we wont accidently reuse an ID, which will then be unset in the process.\n self.check_finalizers\n \n #Spawn and return the object.\n return self.spawn_object(class_name, nil, *args, &block)\n end",
"def new_instance(clazz, *args)\n @class_loader.newInstance(clazz, *args)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This method is responsible for receiving the exchange_rate of a specific currency to the GBP from an API. | def find_rate()
uri_string = "https://free.currconv.com/api/v7/convert?q=GBP_"\
"#{@currency}&compact=ultra&apiKey=2d46a9b5b650dca0dbb1"
uri = URI(uri_string)
res = Net::HTTP.get_response(uri)
return(JSON.parse(res.body)["GBP_#{@currency}"]/100.0)
end | [
"def exchange_rate(to_currency)\n self.bank.exchange(self.base_currency, to_currency)\n end",
"def conv_exchange_rate\n currency_exchange.exchange_rate\n end",
"def fetch_exchange_rates(currency)\n resp = client.get('exchange-rates', { currency: currency })\n\n case resp.status\n when 200 then JSON.parse(resp.body).dig('data', 'rates')\n when 404 then raise Money::Bank::UnknownRate\n when 400\n if resp.headers['content-type'].start_with?('application/json')\n errors = JSON.parse(resp.body)&.dig('errors')\n if errors&.first&.dig('message')&.start_with?('Invalid currency')\n raise Money::Bank::UnknownRate\n end\n end\n\n raise CoinbaseExchangeFetchError\n else\n raise CoinbaseExchangeFetchError\n end\n rescue Faraday::Error, JSON::ParserError\n raise CoinbaseExchangeFetchError\n end",
"def get_exchange_rate(currencyCode)\n begin\n exhash = JSON.parse(exchange_rate)\n BigDecimal(exhash[currencyCode])\n rescue\n raise OrderCurrencyConversionFailure\n end\n end",
"def currency_exchange_rates\n @currency_exchange_rates ||= Services::CurrencyExchangeRatesService.new(@api_service)\n end",
"def get_rate(currency = 'USD')\n get_rates.reduce({}){ |h, v| h.merge v }[currency].to_f\n end",
"def currency_rates\n response['rates'][currency.target_currency.to_s]\n end",
"def update_rate(currency, currency_to, rate)\n @exchange_rates[currency][currency_to] = rate\n end",
"def currency_rates\n if session[:currency].nil?\n session[:currency] = {}\n res = JSON.parse(HTTParty.get(\"https://api.exchangeratesapi.io/latest?base=GBP\").to_json)\n\n\n session[:currency][:g2u] = res[\"rates\"][\"USD\"].round(2)\n session[:currency][:g2e] = res[\"rates\"][\"EUR\"].round(2)\n session[:currency][:g2c] = res[\"rates\"][\"CNY\"].round(2)\n session[:currency][:g2j] = res[\"rates\"][\"JPY\"].round(2)\n session[:currency][:g2r] = res[\"rates\"][\"RUB\"].round(2)\n session[:currency][:g2h] = res[\"rates\"][\"HKD\"].round(2)\n\n @g2u = res[\"rates\"][\"USD\"].round(2)\n @g2e = res[\"rates\"][\"EUR\"].round(2)\n @g2c = res[\"rates\"][\"CNY\"].round(2)\n @g2j = res[\"rates\"][\"JPY\"].round(2)\n @g2r = res[\"rates\"][\"RUB\"].round(2)\n @g2h = res[\"rates\"][\"HKD\"].round(2)\n\n else\n puts session[:currency]\n @g2u = session[:currency][\"g2u\"]\n @g2e = session[:currency][\"g2e\"]\n @g2c = session[:currency][\"g2c\"]\n @g2j = session[:currency][\"g2j\"]\n @g2r = session[:currency][\"g2r\"]\n @g2h = session[:currency][\"g2h\"]\n end\n\n end",
"def report_currency_exchange_rate\n # If exchange reate has been locked use that\n if self.exchange_rate.present?\n self.exchange_rate\n else\n Currency.get_exchange_for(self.project.account.account_setting.default_currency, self.currency)\n end\n end",
"def fetch_currency_rate(currency_type)\n if currency_type == \"EUR\"\n product_price_details\n else\n fetch_updated_currency_rate(currency_type)\n end\n end",
"def getRate currency\n accounts = Account.find(:all)\n \n accounts.each do |a|\n if a.currency == currency then\n return a.exchangeRate\n end\n end\n end",
"def add_exchange_rate_if_necessary(amount_currency)\n # get exchange rate in case currencies don't match\n Money.bank.add_rate(amount_currency, self.currency,\n ExchangeRate.get(amount_currency, self.currency)\n ) if self.currency != amount_currency && !Money.bank.get_rate(amount_currency, self.currency)\n end",
"def amount_from_exchange_rate(amount, currency:, btc_denomination: :satoshi)\n currency = self.default_currency if currency.nil?\n btc_denomination = :satoshi if btc_denomination.nil?\n currency = currency.to_s.upcase\n if currency == 'BTC'\n return Satoshi.new(amount, from_unit: btc_denomination).to_i\n end\n\n begin\n try_adapters(\n @exchange_rate_adapters,\n type: \"exchange rate\",\n priority_exception: Straight::ExchangeRate::Adapter::CurrencyNotSupported\n ) do |a|\n a.convert_from_currency(amount, currency: currency)\n end\n # At least one Bitcoin exchange adapter works, but none returned exchange rate for given currency\n rescue Straight::ExchangeRate::Adapter::CurrencyNotSupported\n amount_in_cross_currency = try_adapters(@forex_rate_adapters, type: \"forex rate\") do |a|\n a.convert_from_currency(amount, currency: currency)\n end\n try_adapters(@exchange_rate_adapters, type: \"exchange rate\") do |a|\n a.convert_from_currency(amount_in_cross_currency, currency: Straight::ExchangeRate::FiatAdapter::CROSS_RATE_CURRENCY)\n end\n end\n end",
"def rate\n if(specific_rate?)\n if(rate_cents < 0)\n task_list.default_rate.to_money\n else\n specific_rate.to_money\n end\n else\n Money.new(0, \"USD\")\n end\n end",
"def exchange_rates\n Currency::Exchange.exchange_rates\n end",
"def fetch_updated_currency_rate(currency_type)\n currency_rate = nil\n ApplicationController.new.fetch_currency_rates.select{ |currency| currency_rate = currency if currency[:currency] == currency_type }\n if currency_rate.present?\n actual_price = price.to_f * currency_rate[:rate].to_f\n currency_details = {:currency => currency_type, :price => actual_price.round(2)}\n else\n currency_details = product_price_details\n end\n currency_details\n end",
"def conversion_rate(target_currency)\n if DaMoney::Money.conversion_rates.key? self.currency\n if DaMoney::Money.conversion_rates[self.currency].key? target_currency\n DaMoney::Money.conversion_rates[self.currency][target_currency].to_f\n else\n raise Exception.new(\"Conversion rate from '#{self.currency}' to '#{target_currency}'\" +\n \" if not configured!\")\n end\n else\n raise Exception.new(\"Curreny '#{self.currency}' is not configured!\")\n end\n end",
"def get_exchange_rate(from, to)\n rate_to_default_currency = (from == Spree::Config.currency) ? 1.0 : currency_rates[from]\n rate_to_required_currency = currency_rates[to]\n\n return (1.0 / rate_to_default_currency) * rate_to_required_currency\n rescue Exception => e\n 1.0\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Get a diff that can be stored as a JSON string, persisted and retrieved to apply (by default, Diff::LCS::Changeas_json isn't very useful) | def persistable_diff(a, b)
diff = Diff::LCS.diff a, b
reify_diff_element(diff).as_json
end | [
"def to_json\n diff.to_json\n end",
"def to_diff\n # see Grit::Commit#show\n patch = to_patch\n\n # discard lines before the diff\n lines = patch.split(\"\\n\")\n while !lines.first.start_with?(\"diff --git\") do\n lines.shift\n end\n lines.pop if lines.last =~ /^[\\d.]+$/ # Git version\n lines.pop if lines.last == \"-- \" # end of diff\n lines.join(\"\\n\")\n end",
"def diff\n @diff ||= begin\n commit.diffs.collect{|diff| diff.diff}.join(\"\\n\")\n end\n end",
"def as_json(opts = nil)\n DiffLineSerializer.new.represent(self)\n end",
"def changes\n @changes ||= JSON.parse(File.read(ARGV[1]))\nend",
"def diff\n @diff ||= begin\n properties = {}\n @before.merge(@after).each { |key,value| properties.merge! diffed_property(key) }\n properties.merge({ GeojsonDiff::META_KEY => @meta })\n end\n end",
"def json\n delta_pack.to_json\n end",
"def changes_for_json\n hash = {}\n changes.each do |attr_name, (_old_value, new_value)|\n new_value = (new_value.to_f * 1000).to_i if new_value.is_a? Time\n hash[attr_name.to_s] = new_value\n end\n\n hash\n end",
"def pretty_json\n JSON.pretty_generate(delta_pack)\n end",
"def get_diffs_struct\n\t {:append => {}, :remove => {}, :update => {}, :same => {}}\n end",
"def diff(format)\n case format\n when :old\n old_diff\n when :unified\n unified_diff\n when :context\n context_diff\n when :ed\n self\n when :reverse_ed, :ed_finish\n ed_diff(format)\n else\n raise \"Unknown diff format #{format}.\"\n end\n end",
"def diff\n original_proposal_version.changeset\n end",
"def diff_as_hash(diff_command, opts = T.unsafe(nil)); end",
"def changes\n if original_change_data.nil?\n nil\n else\n HashDiff.diff(original_change_data, current_change_data)\n end\n end",
"def draft_diff(include_associations: false, parent_object_fk: nil, include_all_attributes: false, include_diff: false, diff_format: :html, recursed: false)\n draft_obj = recursed ? draft : get_draft # get_draft will create missing drafts. Based on the logic, this should only happen when you *first* call draft_diff\n get_object_changes(self, draft_obj, include_associations, parent_object_fk, include_all_attributes, include_diff, diff_format)\n end",
"def get_diff(resource, desired_data)\n diff = OneviewCookbook::Helper.get_diff(resource, desired_data)\n return '. (no diff)' if diff.to_s.empty?\n \". Diff: #{diff}\"\n end",
"def make_my_diffs_pretty!; end",
"def line_diff(old, new)\n RailsDiff.diff(old,new)\n end",
"def correction_diff(old, new)\n RailsDiff.diff(old, new, Output::SimpleCorrectionDiff)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create a Revision using revised content compared to base content | def revise_from_content(base_content, revised_content)
revisions.build(diffs: generate_diff_hash(base_content, revised_content))
end | [
"def make_revision()\n rev = self.revisions.create()\n rev.value_proposition = self.business_plan.plan_parts.find_by_title(\"Value Proposition\").content\n rev.revenue_streams = self.business_plan.plan_parts.find_by_title(\"Revenue Streams\").content\n rev.cost_structure = self.business_plan.plan_parts.find_by_title(\"Cost Structure\").content\n rev.key_resources = self.business_plan.plan_parts.find_by_title(\"Key Resources\").content\n rev.key_activities = self.business_plan.plan_parts.find_by_title(\"Key Activities\").content\n rev.customer_segments = self.business_plan.plan_parts.find_by_title(\"Customer Segments\").content\n rev.key_partners = self.business_plan.plan_parts.find_by_title(\"Key Partners\").content\n rev.channels = self.business_plan.plan_parts.find_by_title(\"Channels\").content\n rev.customer_relationships = self.business_plan.plan_parts.find_by_title(\"Customer Relationships\").content\n rev.reasoning = self.business_plan.plan_parts.find_by_title(\"Reasoning\").content\n rev.save!\n end",
"def create_revision!\n logger.info(\"Creating a new revision for article_submission id: #{self.id}\")\n rev = self.copy(false)\n unless rev.article_submission\n rev.article_submission = self \n self.update_attribute(:last_revision,false)\n else\n rev.article_submission.article_submissions.each do |as|\n as.update_attribute(:last_revision,false)\n end\n end\n \n rev.resubmitted = nil\n rev.committed = nil\n rev.last_revision = true\n rev.save(false) \n rev.set_version!\n self.article_submission_reviewers.each do |ar|\n reviewer = ar.clone\n reviewer.change_status(ArticleSubmissionReviewer::NOT_YET_INVITED)\n rev.article_submission_reviewers << reviewer\n end\n rev.change_status(ArticleSubmission::STARTED)\n rev.increment_manuscript_rev_num\n\n return rev\n end",
"def create_revision!\n revision = RevisionRecord.new(self, acts_as_revisionable_options[:encoding])\n revision.save!\n return revision\n end",
"def create_revision\n return unless self.revision_data\n \n # creating revision\n if ComfortableMexicanSofa.config.revisions_limit.to_i != 0\n self.revisions.create!(:data => self.revision_data)\n end\n \n # blowing away old revisions\n ids = [0] + self.revisions.limit(ComfortableMexicanSofa.config.revisions_limit.to_i).collect(&:id)\n self.revisions.where('id NOT IN (?)', ids).destroy_all\n end",
"def revision= new_revision\n load new_revision\n end",
"def create_revision!\n revision_options = self.class.acts_as_revisionable_options\n revision = revision_record_class.new(self, revision_options[:encoding])\n if revision_options[:meta].is_a?(Hash)\n revision_options[:meta].each do |attribute, value|\n set_revision_meta_attribute(revision, attribute, value)\n end\n elsif revision_options[:meta].is_a?(Array)\n revision_options[:meta].each do |attribute|\n set_revision_meta_attribute(revision, attribute, attribute.to_sym)\n end\n elsif revision_options[:meta]\n set_revision_meta_attribute(revision, revision_options[:meta], revision_options[:meta].to_sym)\n end\n revision.save!\n return revision\n end",
"def original_content\n last_applied = revision.ancestors.find(&:applied?)\n RevisionContent.new(last_applied) if last_applied\n end",
"def fetch_revision\n end",
"def create_revision\n @title = t 'view.documents.new_title'\n\n respond_to do |format|\n format.html { render action: (@document.new_record? ? 'new' : 'edit') }\n format.json { render json: @document }\n end\n end",
"def build_revision_from_changeset(changeset)\n rev_id = changeset[:changeset].to_s.split(':').first.to_i\n \n # Changes\n paths = (rev_id == 0) ?\n # Can't get changes for revision 0 with hg status\n changeset[:files].to_s.split.collect{|path| {:action => 'A', :path => \"/#{path}\"}} :\n status(rev_id)\n \n Revision.new({:identifier => rev_id,\n :scmid => changeset[:changeset].to_s.split(':').last,\n :author => changeset[:user],\n :time => Time.parse(changeset[:date]),\n :message => changeset[:description],\n :paths => paths\n })\n end",
"def import_revision(r)\n new_rev = (r.patchable?) ? add_revision : @revisions[0] \n new_rev.dup_modelitem r\n\n r.changed_bytes.each do |vma, byte|\n new_rev.patch_bytes(vma, [byte])\n end\n\n img = image(new_rev.ident) # Image|PatchedImage associated with Address\n r.addresses.each do |addr|\n a = addr.dup\n a.image = img\n new_rev.add_address(addr.vma, a)\n end\n end",
"def add_revision\n attributes = get_attributes(FIELDS[klass + '_revision_attributes'])\n attributes = convert_empty_id_attributes_to_nil(attributes)\n if was_changed?(attributes) || is_preview?\n self.revisions\n revision = self.revision_model.create(attributes)\n self.revisions << revision\n end\n set_correct_published_revision_number(revision)\n end",
"def build_new_revision\n new_revision = Form.new(version_independent_id: version_independent_id,\n description: description, parent_id: parent_id, status: status,\n version: version + 1, name: name, oid: oid,\n created_by: created_by, control_number: control_number)\n\n new_revision\n end",
"def create_active_revision!\n RevisionRegister.new(page, user: author, blocks_attributes: new_blocks_attributes).save_as_active_revision!\n end",
"def add_revision\n # Get only revisioned attributes (without 'id', 'created_at', etc.)\n @page_attributes = get_attributes(FIELDS['page_revision_attributes'])\n @page_parts_attributes = []\n self.parts.each do |page_part|\n attributes = page_part.get_attributes(FIELDS['page_part_revision_attributes'])\n @page_parts_attributes << attributes\n end\n \n if was_changed? || is_preview?\n revision = PageRevision.create(@page_attributes)\n self.revisions << revision\n end\n \n set_correct_published_revision_number(revision)\n \n return true\n end",
"def add_revision\n attributes = get_attributes(FIELDS['page_part_revision_attributes'])\n attributes['number'] = self.page.number_of_last_revision\n attributes['filter_id'] = nil if attributes['filter_id'].blank?\n if was_changed?(attributes)\n self.revisions << PagePartRevision.create(attributes)\n end\n return true\n end",
"def build_new_revision\n new_revision = ResponseSet.new(version_independent_id: version_independent_id,\n version: version + 1, description: description,\n status: status, name: name,\n parent_id: parent_id, oid: oid)\n responses.each do |r|\n new_revision.responses << r.dup\n end\n\n new_revision\n end",
"def revision(name, msg, options = {}, &block)\n r = Revision.new(name, msg, options, &block)\n @revisions << r\n r\n end",
"def new\n @revision = Revision.new\n @revision.background = @proposal.latest_revision.background\n @revision.body = @proposal.latest_revision.body\n @revision.references = @proposal.latest_revision.references\n @revision.rule_text = @proposal.latest_revision.rule_text\n authorize! :revise, @proposal\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @revision }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
=begin my_basket is configured to the current user =end | def my_basket
@user = current_user
end | [
"def basket\n \t#if the user doesn't have a basket\n \t#give them an empty one\n \tif session[:basket].nil?\n \t#basket is an empty list\n \t#using set because we don't want to add the same thing twice\n \tsession[:basket]= Set.new\n end\n #actually give them the basket\n session[:basket]\n end",
"def basket\n \t#if user doesnt have a basket\n \t# give them an empty one\n \tif session[:basket].nil?\n \t\t#basket is empty list of things\n \t\t#using set bcoz we dont want to add same thing twice\n \t\tsession[:basket]= Set.new\n\n \tend\n \t# actually give them the basket\n \tsession[:basket]\n\n end",
"def basket\n\n \t# if user doesn't have a basket, we want to give them an empty one\n \tif session[:basket].nil?\n \t\t# basket is an empty list\n \t\t# using set because we don't want to add the same thing twice\n \t\tsession[:basket] = Set.new\n\n \tend\n\n \t# actually give them the basket\n \tsession[:basket]\nend",
"def basket\n @context.basket\n end",
"def set_basket\n if current_customer.basket\n # attempts to find the basket which the current customer owns\n @basket = current_customer.basket\n else\n # if the current customer does not own a basket, a basket is created for the customer which has\n # a total_cost of 0 and a customer_id equal to the current customers id\n @basket = Basket.create( {total_cost: 0, customer_id: current_customer.id} )\n end\n end",
"def current_basket\n if session[:order_id]\n order = Order.find_by({id: session[:order_id]})\n order ? order : Order.new\n else\n Order.new\n end\n end",
"def basket\n Basket.new(self)\n end",
"def find_or_create_basket\n if current_user\n if current_user.basket && session[:basket_id]\n combine_baskets\n end\n if current_user.basket\n current_user.basket\n else\n Basket.create(user: current_user)\n end\n else\n if session[:basket_id] && Basket.find(session[:basket_id])\n Basket.find(session[:basket_id])\n else\n basket = Basket.create\n session[:basket_id] = basket.id\n basket\n end\n end\n end",
"def items\n @title = 'User basket'\n @baskets = Basket.user_basket(current_user.id).with_names.not_sent.order 'title'\n end",
"def add_basket(basket)\n add_lines(basket.to_order_lines)\n self.basket = basket\n self.customer_note = basket.customer_note\n self.delivery_instructions = basket.delivery_instructions\n end",
"def basket_id\n @basket_id ||= eshelf_structure.basket_id\n end",
"def create\n @basket = Basket.new(basket_params)\n @basker.status = 0\n @cart_item.basket.set_price\n @basker.user = @current_user\n respond_to do |format|\n if @basket.save\n format.html { redirect_to @basket, notice: 'Basket was successfully created.' }\n format.json { render :show, status: :created, location: @basket }\n else\n format.html { render :new }\n format.json { render json: @basket.errors, status: :unprocessable_entity }\n end\n end\n end",
"def check_basket(basket)\n increase_support if in_basket?(basket)\n end",
"def basket_items\n @context.basket.basket_items\n end",
"def add_to_basket\n if Basket.exists?(user_id: params[:user_id], book_id: params[:book_id])\n @basket = Basket.find_by(user_id: params[:user_id], book_id: params[:book_id])\n @basket.increment!(:amount)\n else\n Basket.create(user_id: params[:user_id], book_id: params[:book_id], amount: 1)\n end\n redirect_to my_basket_path(params[:user_id])\n end",
"def set\n session[:basket_id] = params[\"basket_id\"]\n if params[\"basket_id\"] == \"\"\n redirect_to new_basket_path\n else\n redirect_to basket_path(params[\"basket_id\"])\n end\n end",
"def get_user_basket\n Basket.find(session[:basket_id])\n rescue ActiveRecord::RecordNotFound\n # Look for a basket record in the baskets table with the same id as the basket_id\n # in the session hash\n if !session[:basket_id].nil?\n basket = Basket.find(session[:basket_id])\n basket\n end \n # if one can't be found, create one and set the basket_id in the session to the id of the\n # new basket and return it\n if basket.nil?\n basket = Basket.create()\n session[:basket_id] = basket.id\n basket\n end\n end",
"def find_basket\n @basket = Basket.find(params[:basket_id])\n end",
"def in_basket?(basket)\n @item_ids.all? do |item_id|\n basket.include?(item_id)\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
=begin add_to_basket includes an if statement. If the users basket already exists it adds the book details Else it will create the basket =end | def add_to_basket
if Basket.exists?(user_id: params[:user_id], book_id: params[:book_id])
@basket = Basket.find_by(user_id: params[:user_id], book_id: params[:book_id])
@basket.increment!(:amount)
else
Basket.create(user_id: params[:user_id], book_id: params[:book_id], amount: 1)
end
redirect_to my_basket_path(params[:user_id])
end | [
"def add_to_basket\n item = Item.find(params[:id])\n basket = current_basket\n if basket.items.include? item\n redirect_to items_path, warning: 'Item already present in the basket' and return\n else\n basket.order_items.build({item: item})\n basket.valid?\n basket.save!\n session.destroy\n session[:order_id] = basket.id\n flash[:success] = 'Item has been added to the basket'\n redirect_to items_path and return\n end\n end",
"def find_or_create_basket\n if current_user\n if current_user.basket && session[:basket_id]\n combine_baskets\n end\n if current_user.basket\n current_user.basket\n else\n Basket.create(user: current_user)\n end\n else\n if session[:basket_id] && Basket.find(session[:basket_id])\n Basket.find(session[:basket_id])\n else\n basket = Basket.create\n session[:basket_id] = basket.id\n basket\n end\n end\n end",
"def create_if_not_existing\n if !session.has_key? :basket\n session[:basket] = {}\n end\n end",
"def add_to_basket\n if (item = @basket.order_items.find_by(item_id: @item.id))\n item.quantity += 1\n item.save\n else\n @basket.order_items << OrderItem.new( item_id: @item.id, quantity: 1 )\n @basket.save\n end\n\n respond_to do |format|\n format.html { redirect_to items_url, notice: 'Item was successfully added to basket.' }\n format.json { head :no_content }\n end\n end",
"def addBook(book)\n\t\tinventories.create(book_id: book.id)\n\tend",
"def create_fruitbasket\n Fruitbasket.create(:owner_id => self.id, :owner_type => 'User')\n end",
"def add_item_to_basket(item_id)\n @session[:item_basket] << item_id unless @session[:item_basket].include? item_id\n end",
"def add_book author, title\n books = @books[author]\n if books\n if books.include? title\n puts \"That book is already in the system\"\n else\n books << title\n end\n else\n puts \"No such author\"\n end\n end",
"def basket\n \t#if user doesnt have a basket\n \t# give them an empty one\n \tif session[:basket].nil?\n \t\t#basket is empty list of things\n \t\t#using set bcoz we dont want to add same thing twice\n \t\tsession[:basket]= Set.new\n\n \tend\n \t# actually give them the basket\n \tsession[:basket]\n\n end",
"def borrow_book(user, book, library)\n book_id = book.get_id\n if library.get_available.include?(book_id) == false # check if this book is available\n then\n puts(\"Sorry! Book #{book.title} Is Already Borrowed!\") # failed notice\n else\n library.borrow_update(book_id) # successful borrow update the list of library\n user.borrow_update(book_id) # successful borrow update the list of user\n puts(\"Successfully Borrowed Book #{book.title}!\") # success notice\n end\nend",
"def add_book(book_title_str)\n # is the book not already present in @books list?\n for book_hash in @books\n if book_hash[:title] == book_title_str\n return \"Such book is already in the library\"\n end\n end\n\n new_book = {\n title: book_title_str,\n rental_details: {\n student_name: \"\",\n date: \"\"\n }}\n\n @books.push(new_book)\n\n end",
"def basket\n \t#if the user doesn't have a basket\n \t#give them an empty one\n \tif session[:basket].nil?\n \t#basket is an empty list\n \t#using set because we don't want to add the same thing twice\n \tsession[:basket]= Set.new\n end\n #actually give them the basket\n session[:basket]\n end",
"def basket\n\n \t# if user doesn't have a basket, we want to give them an empty one\n \tif session[:basket].nil?\n \t\t# basket is an empty list\n \t\t# using set because we don't want to add the same thing twice\n \t\tsession[:basket] = Set.new\n\n \tend\n\n \t# actually give them the basket\n \tsession[:basket]\nend",
"def add_to_cart\n\n\n puts \"Which of these do you want? Please enter number\"\n user = gets.chomp\n\n Book.all.each do |a|\n if (a.id == user) && ((a.genre.type == @cart_genre) || (a.author.name == @cart_author) || (a.genre.type == @category))\n print a.id + \" \"\n print a.title + \" by \"\n puts a.author.name\n puts\n input = a.title + \" by \" + a.author.name\n @shopping_cart << input\n puts \"You have bought: \"\n puts @shopping_cart\n puts\n end # end if statement\n end # end book loop\n\n intro\n\nend",
"def add_basket(basket)\n add_lines(basket.to_order_lines)\n self.basket = basket\n self.customer_note = basket.customer_note\n self.delivery_instructions = basket.delivery_instructions\n end",
"def add_book(book)\n @books << book\n puts \"We have just added the following NEW book '#{book.title}' to the Library\"\n end",
"def set_basket\n if current_customer.basket\n # attempts to find the basket which the current customer owns\n @basket = current_customer.basket\n else\n # if the current customer does not own a basket, a basket is created for the customer which has\n # a total_cost of 0 and a customer_id equal to the current customers id\n @basket = Basket.create( {total_cost: 0, customer_id: current_customer.id} )\n end\n end",
"def add_to_cart\n print \"Call #: \"\n call = STDIN.gets.chomp\n \n rs = $con.query( \"SELECT call_num,title,aid FROM books WHERE call_num='\" + call + \"' AND avail > 0\" )\n\n if rs.num_rows == 0\n print \"Unable to find book.\\n\\n\"\n else\n addtocart = true\n rs.each do |r|\n @cart.each do |s|\n if s[0].eql? r[0]\n addtocart = false\n end\n end\n\n if addtocart\n @cart << r\n else\n print \"Copy already exists in the cart.\\n\\n\"\n end\n end\n rs.free\n end\n end",
"def create\n @basket = Basket.new(basket_params)\n @basker.status = 0\n @cart_item.basket.set_price\n @basker.user = @current_user\n respond_to do |format|\n if @basket.save\n format.html { redirect_to @basket, notice: 'Basket was successfully created.' }\n format.json { render :show, status: :created, location: @basket }\n else\n format.html { render :new }\n format.json { render json: @basket.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
=begin Remove basket allows the user to clear down their basket by destroying it for the given user. it then redirects them to an empty basket =end | def remove_from_basket
@basket = Basket.find_by(user_id: params[:user_id], book_id: params[:book_id])
@basket.destroy
redirect_to my_basket_path(params[:user_id])
end | [
"def destroy\n # Check if the quantity of the basket item the user wants to remove from their\n # basket is greater than 1. If so, reduce the quantity by 1.\n if @basket_item.quantity > 1\n @basket_item.update_attribute(:quantity, @basket_item.quantity - 1)\n else\n # Otherwuse, destroy the link between the basket and the wine\n @basket_item.destroy\n end\n respond_to do |format|\n format.html { redirect_to :back, notice: 'Item removed from basket' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @basket.destroy\n session.destroy\n flash[:success] = 'Basket emptied successfully'\n redirect_to root_path and return\n end",
"def remove_from_basket\n @basket.order_items.find_by(item_id: @item.id).destroy\n respond_to do |format|\n format.html { redirect_to purchase_orders_url, notice: 'Item was successfully removed from basket.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n # destroys the basket\n @basket.destroy\n end",
"def remove\n @basket.authtest\n @basket.remove(__getobj__)\n end",
"def remove_from_cart\n # Candy ID to be deleted gets passed in, delete it from sessions and redirect\n session.delete(params[:candy_id])\n redirect_to(customer_show_cart_path)\n end",
"def destroy\n @basket_item = BasketItem.find(params[:id])\n @basket_item.destroy\n\n respond_to do |format|\n format.html { redirect_to basket_items_url }\n format.json { head :no_content }\n end\n end",
"def remove\n\t\t@item = Item.find(params[:id])\n\t\t@item.remove\n\t\tredirect_to current_cart_url\n\tend",
"def remove_shopping_list\n user_id = current_user.id\n yummly_id = params[:yummly_id]\n\n shopping_list = ShoppingList.find_by(yummly_id: yummly_id, user_id: user_id)\n shopping_list.destroy\n\n redirect_to recipes_url, notice: \"Removed recipe from your shopping lists\"\n end",
"def destroy\n @basket_item.destroy\n respond_to do |format|\n format.html { redirect_to basket_items_url, notice: 'Basket item was successfully removed.' }\n format.json { head :no_content }\n end\n end",
"def remove_variable\n @basket = Basket.find(session[:basket_id])\n @variable_group = Variable.find(params[\"variable_id\"]).variable_group\n unless @basket.blank? | @variable_group.blank?\n @basket.variable_groups.delete @variable_group\n end\n unless @basket.blank?\n @basket.variables << Variable.find(params[\"variable_id\"])\n end\n redirect_to request.referer\n end",
"def empty_basket\n basket.basket_items.clear if basket\n end",
"def remove_from_basket\n @discount.update_attribute(:order_id, nil)\n respond_to do |format|\n format.html { redirect_to purchase_orders_url, notice: 'Promotions was successfully removed from basket.' }\n format.json { head :no_content }\n end\n end",
"def remove_item\n\tsaved = current_user.saved\n\titem = saved.items.find(params[:item])\n saved.items.delete(item)\n redirect_to :back\n\tend",
"def removeitem\n # sets a variable containing the listing with the id passed through the url.\n item = Listing.find(params[:id])\n # if the currently logged in user's order contains the item's id, destroy the entry only in the join table for listings_orders.\n # doing this will delete the relation, removing the item from the order, without destroying the listing itself.\n if current_user.order.listings.find(item.id)\n current_user.order.listings_orders.where(listing_id: item.id, order_id: current_user.order.id).destroy_all\n else \n # if no such listing is found in the order, display a notice message.\n flash[:notice] = \"This item is not in your cart.\"\n end\n # on completion of the above, redirect to the user's order.\n redirect_to myorder_path\n end",
"def destroy\n session[:user_id] = nil\n session[:shopping_cart_id] = nil\n redirect_to root_path\n end",
"def removeFromBasket(itemId, basket)\n index = 0\n # .length + 1 will make the array index out of\n # range so that .delete_at will fail and return nil\n indexFound = basket.length + 1\n basket.each do |element|\n if element[0] == itemId.to_i\n indexFound = index\n end\n index = index + 1\n end\n basket.delete_at(indexFound)\nend",
"def delete_items\n if request.post?\n current_user.destroy_items\n flash[:notice] = 'All item data destroyed.'\n end\n redirect_to root_path\n end",
"def destroy\n @temp_basket_item = TempBasketItem.find(params[:id])\n @temp_basket_item.destroy\n\n respond_to do |format|\n format.html { redirect_to temp_basket_items_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Remove a listening channel, the LiveQuery will automatically remove itsself from the pool when there are no channels. | def remove_listener(collection, query)
live_query = @volt_app.live_query_pool.lookup(collection, query)
live_query.remove_channel(@channel)
end | [
"def remove_listener(collection, query)\n live_query = @@live_query_pool.lookup(collection, query)\n live_query.remove_channel(@channel)\n end",
"def remove_channel(channel_name)\r\n @channels.delete(channel_name.downcase)\r\n end",
"def close!\n live_queries = @@channel_live_queries[@channel]\n\n if live_queries\n live_queries.each do |live_query|\n live_query.remove_channel(@channel)\n end\n end\n\n @@channel_live_queries.delete(@channel)\n end",
"def remove(channel_or_channel_name)\n @mutex.synchronize do\n if channel_or_channel_name.is_a?(String)\n channel_or_channel_name = find(channel_or_channel_name)\n end\n \n @channels.delete(channel_or_channel_name)\n end\n end",
"def remove_channel(cid)\n return self.channels.delete(cid)\n end",
"def unsubscribe_from_channel; end",
"def remove_channel( channel )\n @channel_map.delete channel.local_id\n end",
"def remove_channel(name)\n\t\tname.assert_kind Channel, String\n\n\t\tleave_channel(name)\n\t\t@channels.delete name if channel.kind_of? String and @channels.has_key? name\n\tend",
"def remove_channel(channel)\n @attributes[:channels].delete_if { |c| channel.resolve_id == c.resolve_id }\n end",
"def close!\n live_queries = @volt_app.channel_live_queries[@channel]\n\n if live_queries\n live_queries.each do |live_query|\n live_query.remove_channel(@channel)\n end\n end\n\n @volt_app.channel_live_queries.delete(@channel)\n end",
"def remove_listener(channel_name, listener_key)\n channel = channels[channel_name]\n return nil unless (channel)\n key = listener_key\n if (listener_key.instance_of?(Proc))\n key = ProcWrapper.new(listener_key)\n end\n if (channel.has_key?(key))\n return channel.delete(key)\n end\n return nil\n end",
"def destroy_channel name\n @channels.delete name\n remove_from_store self, gen_key(self), gen_key(self, name)\n remove_all_from_store self, gen_key(self, name)\n end",
"def destroy_channel_request\n channel_request = ChannelRequest.where(:user_id => self.subscriber_id, :channel_id => self.channel_id).first\n channel_request.destroy unless channel_request.blank?\n end",
"def unsubscribe channel\n @client.unsubscribe channel\n end",
"def unsubscribe( channel, callback )\n if @channels.include? channel\n @channels[channel].delete(callback)\n end\n if @channels[channel].empty?\n @client.unsubscribe channel\n @channels.delete(channel)\n end\n end",
"def unbind channel\n return if @channel != channel\n @channel = channel\n @unbound_at = Time.now\n end",
"def clear\n channels.each { |channel| remove_channel(channel) }\n end",
"def remove(connection)\n connections.delete(connection)\n\n trigger(\"channel_vacated\", channel: name) if connections.empty?\n end",
"def delete_channel(name)\n delete \"/api/channels/#{name}\" do |response, error|\n raise ::Pigeon::NuntiumException.new error.message if error\n\n response\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes a channel from all associated live queries | def close!
live_queries = @volt_app.channel_live_queries[@channel]
if live_queries
live_queries.each do |live_query|
live_query.remove_channel(@channel)
end
end
@volt_app.channel_live_queries.delete(@channel)
end | [
"def close!\n live_queries = @@channel_live_queries[@channel]\n\n if live_queries\n live_queries.each do |live_query|\n live_query.remove_channel(@channel)\n end\n end\n\n @@channel_live_queries.delete(@channel)\n end",
"def remove_channel( channel )\n @channel_map.delete channel.local_id\n end",
"def clear\n channels.each { |channel| remove_channel(channel) }\n end",
"def delete_facebook_channel(project_id, query_id)\n delete \"/projects/#{project_id}/facebookchannels/#{query_id}\"\n end",
"def remove_channel(channel_name)\r\n @channels.delete(channel_name.downcase)\r\n end",
"def remove_channel(channel)\n @attributes[:channels].delete_if { |c| channel.resolve_id == c.resolve_id }\n end",
"def remove_channel(cid)\n return self.channels.delete(cid)\n end",
"def destroy_channel name\n @channels.delete name\n remove_from_store self, gen_key(self), gen_key(self, name)\n remove_all_from_store self, gen_key(self, name)\n end",
"def del\n channel = params['channel_id']\n existing = Board.find_by(:channel => channel)\n if existing then\n existing.destroy\n return render json: { :response_type => 'in_channel',\n :text => 'Removed the current game for the channel. It was between *' + existing.player1 + '* and *' + existing.player2 + '*' }\n end\n\n return render json: { :text => 'No ongoing game in the current channel' }\n end",
"def destroy\n channels.each { |channel| remove_channel(channel) }\n auth.clear\n end",
"def remove_listener(collection, query)\n live_query = @volt_app.live_query_pool.lookup(collection, query)\n live_query.remove_channel(@channel)\n end",
"def remove(connection)\n connections.delete(connection)\n\n trigger(\"channel_vacated\", channel: name) if connections.empty?\n end",
"def unsubscribe_all\n @subscribed_channels.each do |channel_name|\n unsubscribe channel_name\n end\n end",
"def unsubscribe channel\n @client.unsubscribe channel\n end",
"def remove_listener(collection, query)\n live_query = @@live_query_pool.lookup(collection, query)\n live_query.remove_channel(@channel)\n end",
"def unsubscribe_from_channel; end",
"def channel_ids_to_remove\n @channel_ids_to_remove ||= begin\n cids = []\n if self.respond_to?(:message)\n cids << self.try(:message).try(:channel).try(:id)\n Array(self.message&.channel&.sibling_channel_ids).each do |cid|\n cids << cid unless channel_ids_to_add.include?(cid)\n end\n end\n cids\n end\n end",
"def destroy_channel_request\n channel_request = ChannelRequest.where(:user_id => self.subscriber_id, :channel_id => self.channel_id).first\n channel_request.destroy unless channel_request.blank?\n end",
"def delete\n API.delete_channel(@bot.token, @id)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tracks that this channel will be notified from the live query. | def track_channel_in_live_query(live_query)
channel_live_queries = @volt_app.channel_live_queries
channel_live_queries[@channel] ||= []
channel_live_queries[@channel] << live_query
end | [
"def track_channel_in_live_query(live_query)\n @@channel_live_queries[@channel] ||= []\n @@channel_live_queries[@channel] << live_query\n end",
"def track_channel_in_live_query(live_query)\n @@channel_live_queries[@channel] ||= []\n @@channel_live_queries[@channel] << live_query\n end",
"def notify\n changed(true)\n notify_observers(self)\n end",
"def notify\n changed(true)\n notify_observers(self)\n end",
"def subscribe_to_channel; end",
"def notify_on_changes!\n ensure_connection! { register_callback(notifier) }\n end",
"def on_subscribed(&block) @on_subscribed_callback = block end",
"def notify\n @subscribers.each { |ident, (block,obj)| block.call(obj) }\n end",
"def notify_waiting\n ActionCable.server.broadcast(\"#{uid}_waiting_channel\", action: \"started\")\n end",
"def before_query(query)\n if query.subscription? && !query.subscription_update?\n query.context.namespace(:subscriptions)[:events] = []\n end\n end",
"def notify\n self.call_next\n end",
"def detect_changed(skip_channel)\n not_added_or_removed = @previous_ids & @current_ids\n\n not_added_or_removed.each do |id|\n if @previous_results_hash[id] != (data = @results_hash[id])\n # Data hash changed\n @live_query.notify_changed(id, data, skip_channel)\n end\n end\n end",
"def watched\n @channels = current_user.watched_channels\n end",
"def notified!\n notified\n save!\n end",
"def broadcast_update\n broadcast(:after_update, self) if self.saved_changes?\n end",
"def notification_on_update\n create_notification(:update)\n end",
"def broadcast_queries\n WebsocketRails[:query].trigger 'all_searches', Query.all.most_frequent\n end",
"def on_notifications\n self.before_listen if self.respond_to?(:before_listen)\n\n self.class.connection.execute('LISTEN %s' % channel)\n loop do\n handle_notifications do |incoming|\n yield incoming\n end\n end\n ensure\n self.class.connection.execute('UNLISTEN %s' % channel)\n\n self.after_listen if self.respond_to?(:after_listen)\n\n # Make sure we close this connection since its thread will be killed!\n self.class.connection.close()\n end",
"def monitor_redis\n # redis.dup.subscribe(REDIS_CHANNEL) do |on|\n redis.subscribe(REDIS_CHANNEL) do |on|\n on.message do |_, message|\n l = REDIS_HEAD_FIELD_LENGTH\n channel = message[0, l].strip\n client_id = message[l, l].strip\n json = message[(l * 2)..-1]\n send_json_message(client_id: client_id, channel: channel, json: json)\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /company_businesses/1 DELETE /company_businesses/1.json | def destroy
@company_business.destroy
respond_to do |format|
format.html { redirect_to company_businesses_url }
format.json { head :no_content }
end
end | [
"def destroy\n @company = Company.find(params[:id])\n @company.destroy\n \n render json: @company, status: :ok \n end",
"def destroy\n @company.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @biz_company = BizCompany.find(params[:id])\n @biz_company.destroy\n\n respond_to do |format|\n format.html { redirect_to biz_companies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @crunch_company = CrunchCompany.find(params[:id])\n @crunch_company.destroy\n\n respond_to do |format|\n format.html { redirect_to crunch_companies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @rail_company.destroy\n respond_to do |format|\n format.html { redirect_to rail_companies_url }\n format.json { head :no_content }\n end\n end",
"def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def destroy\n @business = @user.businesses.find(params[:id])\n \n @business.destroy\n respond_to do |format|\n format.html { redirect_to root_path(@user), notice: 'Business was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @builder_company.destroy\n respond_to do |format|\n format.html { redirect_to builder_companies_url, notice: 'Company was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @business = Business.find(params[:id])\n @business.destroy\n\n respond_to do |format|\n format.html { redirect_to businesses_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @contracted__company.destroy\n respond_to do |format|\n format.html { redirect_to contracted__companies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @primary_business = PrimaryBusiness.find(params[:id])\n @primary_business.destroy\n\n respond_to do |format|\n format.html { redirect_to primary_businesses_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @db_company.destroy\n respond_to do |format|\n format.html { redirect_to db_companies_url, notice: 'Db company was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @boilerplate.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def delete_mobile_carrier(args = {}) \n delete(\"/mobile.json/#{args[:carrierId]}\", args)\nend",
"def destroy\n @saved_company = SavedCompany.find(params[:id])\n @saved_company.destroy\n\n respond_to do |format|\n format.html { redirect_to saved_companies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @global_company = GlobalCompany.find(params[:id])\n @global_company.destroy\n\n respond_to do |format|\n format.html { redirect_to global_companies_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @client_company.destroy\n respond_to do |format|\n format.html { redirect_to client_companies_url, notice: 'Client company was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ins_company = InsCompany.find(params[:id])\n @ins_company.destroy\n\n respond_to do |format|\n format.html { redirect_to ins_companies_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @booking_company = BookingCompany.find(params[:id])\n @booking_company.destroy\n\n respond_to do |format|\n format.html { redirect_to booking_companies_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Grabs all of the yaml files found in /pages, and loads them as Page objects. | def find_all
Dir["#{pages_dir}**/*.yml"].map {|f| new f }
end | [
"def find_and_load_pages\n\t\t\tPathname.glob( @sourcedir + '**/*.page' ).each do |pagefile|\n\t\t\t\tpath_to_base = @sourcedir.relative_path_from( pagefile.dirname )\n\n\t\t\t\tpage = Manual::Page.new( self, pagefile, @layoutsdir, path_to_base )\n\t\t\t\thierpath = pagefile.relative_path_from( @sourcedir )\n\n\t\t\t\t@pages << page\n\t\t\t\t@path_index[ pagefile ] = page\n\t\t\t\t@title_index[ page.title ] = page\n\t\t\t\t@uri_index[ hierpath.to_s ] = page\n\t\t\t\t\n\t\t\t\t# Place the page in the page hierarchy by using inject to find and/or create the \n\t\t\t\t# necessary subhashes. The last run of inject will return the leaf hash in which\n\t\t\t\t# the page will live\n\t\t\t\tsection = hierpath.dirname.split[1..-1].inject( @hierarchy ) do |hier, component|\n\t\t\t\t\thier[ component ] ||= {}\n\t\t\t\t\thier[ component ]\n\t\t\t\tend\n\n\t\t\t\tsection[ pagefile.basename('.page') ] = page\n\t\t\tend\n\t\tend",
"def load_pages\n reloader(Dir.glob(@pages_parent + '/' + @pages_dir + '/*.rb').sort)\n end",
"def pages(*args)\n return if args.nil? or args.empty?\n\n test_file_dir = class_eval{ self::TestFileDir }\n default_page_dir = File.join(test_file_dir, \"pages\")\n #puts \"debug: default_page_dir :#{default_page_dir}}\"\n page_dir = default_page_dir\n\n page_files = []\n args.each do |x|\n if x.class == Hash && x[:page_dir]\n page_dir = x[:page_dir]\n else\n page_files << x\n end\n end\n\n if page_files.size == 1 && page_files[0] == :all\n Dir[File.expand_path(page_dir)+ \"/*_page.rb\"].each { |page_file|\n load page_file\n }\n return\n end\n\n page_files.each do |page|\n page_file = File.join(page_dir, page.to_s)\n load page_file\n end\n end",
"def load_pages(sitefile)\n # Setup all page objects\n raise ArgumentError, \"Site file, #{sitefile} not found!\" unless File.exists?(sitefile)\n @site = YAML::load_file(sitefile)\n\n # Return the set of pages\n @site['pages'].collect do |page|\n # load page class\n load_page([File.expand_path(File.dirname(sitefile)), page] * '/')\n end\n end",
"def retrieve_pages(dir, dot_pages)\n theme_pages = []\n dot_pages.each do |page|\n theme_pages << Page.new(@site, dir, \"/\", page)\n end\n site.pages.concat(theme_pages)\n end",
"def retrieve_pages(dir, dot_pages)\n site.pages.concat(PageReader.new(site, dir).read(dot_pages))\n end",
"def retrieve_pages(dir, dot_pages); end",
"def process_pages\n\t\t\t\t@site.page_files(true).each do |p|\n\t\t\t\t\tpage = Striker::Page.new(p, { :site_meta => @meta })\n\t\t\t\t\t# t = Template.new(page)\n\t\t\t\t\tpage.process\n\t\t\t\tend\n\t\t\tend",
"def seed_page_basics!\n page_yml_filenames = [\n \"sell.yml\", \"about.yml\", \"buy.yml\",\n \"rent.yml\", \"home.yml\", \"legal_notice.yml\",\n \"contact.yml\", \"privacy_policy.yml\",\n ]\n\n page_yml_filenames.each do |page_yml_filename|\n seed_page page_yml_filename\n end\n end",
"def make_pages\n Dir.glob(concepts_glob).each do |concept_file_path|\n Jekyll.logger.debug(\"Geolexica:\",\n \"processing concept data #{concept_file_path}\")\n concept_hash = read_concept_file(concept_file_path)\n preprocess_concept_hash(concept_hash)\n add_page ConceptPage::HTML.new(site, concept_hash)\n add_page ConceptPage::JSON.new(site, concept_hash)\n add_page ConceptPage::JSONLD.new(site, concept_hash)\n add_page ConceptPage::Turtle.new(site, concept_hash)\n end\n end",
"def scan\n excludes = %w[~ Rakefile] + Array(config[\"exclude\"])\n\n top = Dir[\"*\"] - excludes\n files = top.select { |path| File.file? path }\n files += Dir[\"{#{top.join(\",\")}}/**/*\"].reject { |f| not File.file? f }\n files.reject! { |f| f.include? \"/_\" }\n\n renderers_re = Page.renderers_re\n\n files.each do |path|\n case path\n when /(?:#{excludes.join '|'})$/\n # ignore\n when /^_layout/ then\n name = File.basename(path).sub(/\\..+$/, '')\n @layouts[name] = Page.new self, path\n when /^_/ then\n next\n when /\\.yml$/ then\n @configs[path] = Config.new self, path\n when /\\.(?:#{self.class.binary_files.join(\"|\")})$/ then\n @pages[path] = Page.new self, path, self.config\n when /\\.(?:#{self.class.text_files.join(\"|\")})$/, renderers_re then\n @pages[path] = Page.new self, path\n else\n warn \"unknown file type: #{path}\" if Rake.application.options.trace\n end\n end\n\n $website = self # HACK\n task(:virtual_pages).invoke\n\n time_prune\n\n fix_subpages\n end",
"def walk_pages (page)\n\n # extract page content\n if page[:Type] == :Pages\n callback(:begin_page_container, [page])\n res = @ohash.object(page[:Resources])\n resources.push res if res\n @ohash.object(page[:Kids]).each {|child| walk_pages(@ohash.object(child))}\n resources.pop if res\n callback(:end_page_container)\n elsif page[:Type] == :Page\n callback(:begin_page, [page])\n res = @ohash.object(page[:Resources])\n resources.push res if res\n walk_resources(current_resources)\n\n if @ohash.object(page[:Contents]).kind_of?(Array)\n contents = @ohash.object(page[:Contents])\n else\n contents = [page[:Contents]]\n end\n\n fonts = font_hash_from_resources(current_resources)\n\n if page.has_key?(:Contents) and page[:Contents]\n direct_contents = contents.map { |content| @ohash.object(content) }\n content_stream(direct_contents, fonts)\n end\n\n resources.pop if res\n callback(:end_page)\n end\n end",
"def load\n # Load everything we can have\n read_content(:posts, :pages, :attachments, :statics)\n \n # Generate our dynamic content (pages, categories, archives, etc. etc.)\n generate_content\n \n # Write slugs to config\n self.config.slugs = (@posts + @pages).collect{|c| c.metadata.slug}\n self.config.slugs.count\n end",
"def load_page_types\n logger.info ' - Loading page types'\n # Add all the load paths before loading the ruby files in case one page type needs to refer to another\n page_type_paths.each do |path|\n add_load_paths_for_page_types_in_dir(path)\n end\n # Then load up the ruby files, they woudl auto load but we need to know which page type classes get defined\n page_type_paths.each do |path|\n load_page_type_classes_in_dir(path)\n end\n self.page_type_classes = Page.send(:subclasses)\n logger.info \"Loaded the following page type classes: #{self.page_type_classes.map(&:to_s).join(', ')}\"\n end",
"def fetch_from_pages\n self.mounting_point.pages.values.each do |page|\n page.translated_in.each do |locale|\n Locomotive::Mounter.with_locale(locale) do\n unless page.template.blank?\n self.add_assets_from_string(page.template.raw_source)\n end\n end\n end\n end\n end",
"def walk_pages (page)\n\n # extract page content\n if page[:Type] == :Pages\n callback(:begin_page_container, [page])\n res = @xref.object(page[:Resources])\n resources.push res if res\n @xref.object(page[:Kids]).each {|child| walk_pages(@xref.object(child))}\n resources.pop if res\n callback(:end_page_container)\n elsif page[:Type] == :Page\n callback(:begin_page, [page])\n res = @xref.object(page[:Resources])\n resources.push res if res\n walk_resources(current_resources)\n\n if @xref.object(page[:Contents]).kind_of?(Array)\n contents = @xref.object(page[:Contents])\n else\n contents = [page[:Contents]]\n end\n\n fonts = font_hash_from_resources(current_resources)\n\n if page.has_key?(:Contents) and page[:Contents]\n contents.each do |content|\n obj = @xref.object(content)\n content_stream(obj, fonts)\n end \n end\n\n resources.pop if res\n callback(:end_page)\n end\n end",
"def template_pages\n Page.all(config, views_path)\n end",
"def collect_pages\n #find page terminator\n start_line = 0\n current_line = 0\n @lines.each_with_index do |line, i|\n current_line = i\n if end_of_page?(line, i)\n @pages << Page.new(start_line, i) # every page in raw document\n start_line = i + 1 \n end \n end #end of line.each\n \n if current_line > start_line\n page = Page.new(start_line, current_line)\n @pages << page\n end\n #puts \" collect_pages found #{@pages.length} pages\"\n @pages \n end",
"def load_page\n Game.engine.markdown.parse_file(File.join(@path, \"#{@page}.md\"))\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a collection that represents the instances belonging to this Auto Scaling group. You can use this collection to further refine the instances you are interested in: group.ec2_instances.filter('availabilityzone', 'useast1a').each do |i| puts instance.id end | def ec2_instances
instances = EC2::InstanceCollection.new(:config => config)
instances.tagged('aws:autoscaling:groupName').tagged_values(name)
end | [
"def instances\n instance_ids = ey_instances(@cloud_info.env_name)\n if instance_ids.empty?\n instances = []\n else\n group_id = aws_group_for_instance_id(instance_ids.first)\n instances = instances_in_group(group_id)\n instances\n end\n end",
"def auto_scaling_instances(group)\n auto_scaling.describe_auto_scaling_instances.map(&:auto_scaling_instances).flatten.select do |instance|\n instance.auto_scaling_group_name == group\n end\nend",
"def get_instances(incl_stopped=false)\n \n instances = @ec2.describe_instances\n instances = instances.select { |x| x[:aws_groups].include? @group_name }\n \n if(instances.length == 0)\n raise CaTPAWS::EC2::Error::InstanceRetrieval, \"No instances found in this group\"\n end \n \n unless (incl_stopped)\n instances = instances.select {|x| x[:aws_state_code].to_i <= 16}\n end\n @instances = instances\n end",
"def describe_auto_scaling_group_running_instances(group_name)\n # create new client\n client = create_autoscale_client()\n\n pages = client.describe_auto_scaling_groups(auto_scaling_group_names: [group_name])\n \n group_description = pages.first.auto_scaling_groups.first\n \n filtered = []\n\n # list all currently existing servers in the auto scale group\n instances = group_description.instances\n\n instances.each do |instance|\n if is_working_instance?(instance.instance_id)\n filtered << instance\n end \n end\n\n return filtered\nend",
"def instances\n @instances ||= aws_client.instances(filters: instance_filters).map do |instance|\n OpenStruct.new(\n with_tags(instance, private_ip: instance.private_ip_address,\n public_ip: instance.public_ip_address,\n instance: instance.instance_id)\n )\n end\n end",
"def instance_ids\n @instance_ids ||= groups.map { |gr| gr.instances.map { |i| i.instance_id } }.flatten\n end",
"def get_instance_ids\n client.api_call('describe_instances', layer_id: id)[:instances].map{ |s| s[:instance_id] }\n end",
"def instances_from_ami(image_id, region)\n ec2 = Aws::EC2::Client.new(region: region)\n resp = ec2.describe_instances(\n filters:\n [\n {\n name: 'image-id',\n values: [image_id],\n },\n ],\n )\n\n instance_ids = []\n resp[:reservations].each { |r|\n r[:instances].each { |i|\n next unless i[:state][:name] == 'running'\n instance_ids.push(i[:instance_id])\n puts \"Instance Id :#{i[:instance_id]}\\n\"\n }\n }\n instance_ids\n end",
"def ec2_find_group_instances(tag_value)\n ec2_find_instances_by_tag('Group', tag_value)\n end",
"def asg_instances(*names)\n autoscaling.describe_auto_scaling_groups(auto_scaling_group_names: names).auto_scaling_groups.map(&:instances).flatten\n end",
"def list_instances_detail()\n response = dbreq(\"GET\", dbmgmthost, \"#{dbmgmtpath}/instances/detail\", dbmgmtport, dbmgmtscheme)\n CloudDB::Exception.raise_exception(response) unless response.code.to_s.match(/^20.$/)\n instances = CloudDB.symbolize_keys(JSON.parse(response.body)[\"instances\"])\n return instances\n end",
"def instances\n IbmCloudRest.get \"#{@uri}/instances\"\n end",
"def list_instances()\n response = dbreq(\"GET\", dbmgmthost, \"#{dbmgmtpath}/instances\", dbmgmtport, dbmgmtscheme)\n CloudDB::Exception.raise_exception(response) unless response.code.to_s.match(/^20.$/)\n instances = CloudDB.symbolize_keys(JSON.parse(response.body)[\"instances\"])\n return instances\n end",
"def describe_auto_scaling_instances(options = {})\n if instance_ids = options.delete('InstanceIds')\n options.merge!(AWS.indexed_param('InstanceIds.member.%d', [*instance_ids]))\n end\n request({\n 'Action' => 'DescribeAutoScalingInstances',\n :parser => Fog::Parsers::AWS::AutoScaling::DescribeAutoScalingInstances.new\n }.merge!(options))\n end",
"def get_instances()\n @client.describe_instances({:layer_id => @layer[:layer_id]})[:instances]\n end",
"def instances\n Egi::Fedcloud::Vmhound::Log.info \"[#{self.class}] Retrieving active instances\"\n fetch_instances\n end",
"def get_instances_ip(region, key, secret, group_name)\n credentials = {\n region: region,\n credentials: Aws::Credentials.new(key, secret)\n }\n instances_of_as = get_instances(credentials, group_name)\n autoscaling_dns = []\n ec2 = Aws::EC2::Resource.new(credentials)\n\n instances_of_as.each do |instance|\n if instance.health_status != 'Healthy'\n puts \"Autoscaling: Skipping unhealthy instance #{instance.instance_id}\"\n else\n autoscaling_dns << ec2.instance(instance.instance_id).public_ip_address\n end\n end\n\n autoscaling_dns\n end",
"def instances_by_ami(amis)\n results = {}\n amis.each do |ami|\n results[ami.image_id] = []\n end\n\n @ec2.describe_instances(\n filters: [\n {\n name: 'image-id',\n values: results.keys\n }\n ]\n )[0].each do |reservation|\n reservation.instances.each do |instance|\n results[instance.image_id] << instance\n end\n end\n\n results\n end",
"def list_asgs\n # collect the list of running instances in this zone\n ec2 = AWS::EC2.new\n region = ec2.regions[AMI_REGION]\n instances = region.instances.select { |i| i.tags.to_h[\"server\"] == APP_NAME }\n\n # now find the list of running asgs\n format = \"%-32s %s\"\n puts\n puts format % [\"Instance Groups\", \"Tags\"]\n puts format % [\"-\" * 32, \"-\" * 60]\n auto_scaling = new_auto_scaling\n count = 0\n auto_scaling.groups.each do |group|\n count = count + 1\n puts format % [group.name, tag_value(group.tags, \"env\")]\n\n instances.each do |i|\n if i.tags.to_h[\"env\"] == tag_value(group.tags, \"env\")\n puts \"\\t%s %-13s %s\" % [i.id, i.status, i.dns_name]\n end\n end\n puts\n end\n puts format % [\"-\" * 32, \"-\" * 60]\n puts \"Found #{count} ASGs\"\n puts\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Suspends processes for this Auto Scaling group. suspend two processes by name auto_scaling_group.suspend_processes 'Launch', 'AZRebalance' | def suspend_processes *processes
client_opts = {}
client_opts[:auto_scaling_group_name] = name
client_opts[:scaling_processes] = processes.flatten
client.suspend_processes(client_opts)
nil
end | [
"def suspend_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'SuspendProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def pause_scaling_events\n client.suspend_processes(auto_scaling_group_name: asg_name,\n scaling_processes:\n %w[ReplaceUnhealthy AlarmNotification ScheduledActions AZRebalance])\n end",
"def resume_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'ResumeProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def resume_processes *processes\n client_opts = {}\n client_opts[:auto_scaling_group_name] = name\n client_opts[:scaling_processes] = processes.flatten\n client.resume_processes(client_opts)\n nil\n end",
"def suspend\n # TODO There is no SuspendProcess API function in Windows.\n # This was described in article: http://www.codeproject.com/Articles/2964/Win-process-suspend-resume-tool\n # The suspend method isn't not save\n end",
"def resume_all_processes\n groups.each do |group|\n group.resume_processes\n end\n end",
"def resume_all_processes\n resume_processes\n end",
"def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n save_to_s3(@task.bucket)\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end",
"def suspend\n process_status = status\n if process_status != 'terminated' && process_status != 'stopped'\n return status if Process.kill('STOP', @proc_attrs[:pid].to_i)\n end\n process_status\n rescue Errno::EPERM\n return 'non-privilaged operation'\n end",
"def suspendvms\n assert_privileges(params[:pressed])\n vm_button_operation('suspend', 'suspend')\n end",
"def suspendvms\n assert_privileges(params[:pressed])\n generic_button_operation('suspend', _('Suspend'), vm_button_action)\n end",
"def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end",
"def suspend\n Fission::Action::VM::Suspender.new(self).suspend\n end",
"def suspend\n param 'state' => Patriot::JobStore::JobState::SUSPEND\n end",
"def terminate_instances\n @task.unsafe(\"Stopping #{@name} Launch process\") do\n autoscaling_group.suspend_processes('Launch')\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Terminating instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.terminate\n end\n end\n end",
"def suspended_processes\n data[:suspended_processes]\n end",
"def cmd_suspend(*args)\n # give'em help if they want it, or seem confused\n if args.length == 0 or (args.include? \"-h\")\n cmd_suspend_help\n return true\n end\n\n continue = args.delete(\"-c\") || false\n resume = args.delete(\"-r\") || false\n\n # validate all the proposed pids first so we can bail if one is bogus\n valid_pids = validate_pids(args)\n args.uniq!\n diff = args - valid_pids.map {|e| e.to_s}\n if not diff.empty? # then we had an invalid pid\n print_error(\"The following pids are not valid:\t#{diff.join(\", \").to_s}.\")\n if continue\n print_status(\"Continuing. Invalid args have been removed from the list.\")\n else\n print_error(\"Quitting.\tUse -c to continue using only the valid pids.\")\n return false\n end\n end\n\n targetprocess = nil\n if resume\n print_status(\"Resuming: #{valid_pids.join(\", \").to_s}\")\n else\n print_status(\"Suspending: #{valid_pids.join(\", \").to_s}\")\n end\n begin\n valid_pids.each do |pid|\n print_status(\"Targeting process with PID #{pid}...\")\n targetprocess = client.sys.process.open(pid, PROCESS_ALL_ACCESS)\n targetprocess.thread.each_thread do |x|\n if resume\n targetprocess.thread.open(x).resume\n else\n targetprocess.thread.open(x).suspend\n end\n end\n end\n rescue ::Rex::Post::Meterpreter::RequestError => e\n print_error \"Error acting on the process: #{e.to_s}.\"\n print_error \"Try migrating to a process with the same owner as the target process.\"\n print_error \"Also consider running the win_privs post module and confirm SeDebug priv.\"\n return false unless continue\n ensure\n targetprocess.close if targetprocess\n end\n return true\n end",
"def suspend\n execute(\"controlvm\", @uuid, \"savestate\")\n end",
"def exec\n t = Time.now.to_i\n asg = Asg.for(asg_name)\n\n Log.log \"Starting with desired_capacity=#{asg_settings[:desired_capacity].to_s.light_blue}, max_size=#{asg_settings[:max_size].to_s.light_blue}\"\n Log.log \"Live instances: #{@starting_instances.collect(&:instance_id)}\"\n Log.log \"Pausing autoscaling processes on #{asg_name}\"\n\n asg.pause_scaling_events\n # increment max size\n Log.log \"Scaling to desired_capacity=#{(asg_settings[:desired_capacity] + 1).to_s.light_blue}, max_size=#{(asg_settings[:max_size] + 1).to_s.light_blue}\"\n asg.scale(asg_settings[:max_size] + 1, asg_settings[:desired_capacity] + 1)\n\n\n asg.await_event\n\n @starting_instances.each do |instance|\n asg.terminate(instance)\n asg.await_event(\"Replacement\")\n end\n\n finishing_instances = asg.in_service_instances\n Log.log \"Live instances: #{finishing_instances.collect(&:instance_id)}\"\n\n res = finishing_instances.any?{ |i| starting_instances.include?(i) }\n Log.log \"Resetting ASG desired_capacity=#{asg_settings[:desired_capacity].to_s.light_blue}, max_size=#{asg_settings[:max_size].to_s.light_blue}\"\n asg.scale(asg_settings[:max_size], asg_settings[:desired_capacity])\n\n Log.log \"Replaced all instances: #{res ? \"no\".light_red : \"yes\".light_green}\"\n asg.terminate(finishing_instances.sample)\n\n asg.in_service_instances\n Log.log \"Live instances: #{asg.in_service_instances.collect(&:instance_id)}\"\n Log.log \"Resuming autoscaling processes following rollout.\"\n asg.resume_scaling_events\n\n delta = Time.now.to_i - t\n\n Log.log \"Staged rollout to auto scaling group #{asg_name} completed#{res ? \" with errors.\" : \".\"}\"\n Log.log \"Rollout completed in #{(delta/1000.0).truncate(2)}s\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Resumes processes for this Auto Scaling group. resume two processes by name auto_scaling_group.suspend_processes 'Launch', 'AZRebalance' | def resume_processes *processes
client_opts = {}
client_opts[:auto_scaling_group_name] = name
client_opts[:scaling_processes] = processes.flatten
client.resume_processes(client_opts)
nil
end | [
"def resume_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'ResumeProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def suspend_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'SuspendProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def resume_all_processes\n resume_processes\n end",
"def suspend_processes *processes\n client_opts = {}\n client_opts[:auto_scaling_group_name] = name\n client_opts[:scaling_processes] = processes.flatten\n client.suspend_processes(client_opts)\n nil\n end",
"def resume_all_processes\n groups.each do |group|\n group.resume_processes\n end\n end",
"def pause_scaling_events\n client.suspend_processes(auto_scaling_group_name: asg_name,\n scaling_processes:\n %w[ReplaceUnhealthy AlarmNotification ScheduledActions AZRebalance])\n end",
"def resume\n process_status = status\n if process_status == 'stopped'\n return status if Process.kill('CONT', @proc_attrs[:pid].to_i)\n end\n process_status\n rescue Errno::EPERM\n return 'non-privilaged operation'\n end",
"def suspend\n # TODO There is no SuspendProcess API function in Windows.\n # This was described in article: http://www.codeproject.com/Articles/2964/Win-process-suspend-resume-tool\n # The suspend method isn't not save\n end",
"def exec\n t = Time.now.to_i\n asg = Asg.for(asg_name)\n\n Log.log \"Starting with desired_capacity=#{asg_settings[:desired_capacity].to_s.light_blue}, max_size=#{asg_settings[:max_size].to_s.light_blue}\"\n Log.log \"Live instances: #{@starting_instances.collect(&:instance_id)}\"\n Log.log \"Pausing autoscaling processes on #{asg_name}\"\n\n asg.pause_scaling_events\n # increment max size\n Log.log \"Scaling to desired_capacity=#{(asg_settings[:desired_capacity] + 1).to_s.light_blue}, max_size=#{(asg_settings[:max_size] + 1).to_s.light_blue}\"\n asg.scale(asg_settings[:max_size] + 1, asg_settings[:desired_capacity] + 1)\n\n\n asg.await_event\n\n @starting_instances.each do |instance|\n asg.terminate(instance)\n asg.await_event(\"Replacement\")\n end\n\n finishing_instances = asg.in_service_instances\n Log.log \"Live instances: #{finishing_instances.collect(&:instance_id)}\"\n\n res = finishing_instances.any?{ |i| starting_instances.include?(i) }\n Log.log \"Resetting ASG desired_capacity=#{asg_settings[:desired_capacity].to_s.light_blue}, max_size=#{asg_settings[:max_size].to_s.light_blue}\"\n asg.scale(asg_settings[:max_size], asg_settings[:desired_capacity])\n\n Log.log \"Replaced all instances: #{res ? \"no\".light_red : \"yes\".light_green}\"\n asg.terminate(finishing_instances.sample)\n\n asg.in_service_instances\n Log.log \"Live instances: #{asg.in_service_instances.collect(&:instance_id)}\"\n Log.log \"Resuming autoscaling processes following rollout.\"\n asg.resume_scaling_events\n\n delta = Time.now.to_i - t\n\n Log.log \"Staged rollout to auto scaling group #{asg_name} completed#{res ? \" with errors.\" : \".\"}\"\n Log.log \"Rollout completed in #{(delta/1000.0).truncate(2)}s\"\n end",
"def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n save_to_s3(@task.bucket)\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end",
"def resume(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_RESUME)\n end",
"def resume!\n _log.info(\"Resuming EMS [#{name}] id [#{id}].\")\n\n new_zone = if zone_before_pause.nil?\n zone.maintenance? ? Zone.default_zone : zone\n else\n zone_before_pause\n end\n\n transaction do\n all_managers = [self] + child_managers\n all_managers.each do |ems|\n ems.update!(\n :zone_before_pause => nil,\n :zone => new_zone,\n :enabled => true\n )\n end\n end\n\n _log.info(\"Resuming EMS [#{name}] id [#{id}] successful.\")\n end",
"def resume(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_RESUME)\n end",
"def resume\n @state = :running\n end",
"def resume(job = ALL_JOBS)\n\t\t\tDRMAA.control(job, DRMAA::ACTION_RESUME)\n\t\tend",
"def resume\n @pauses.each do |topic, partitions|\n partitions.each do |partition, pause|\n next unless pause.paused?\n next unless pause.expired?\n\n pause.resume\n\n yield(topic, partition)\n end\n end\n end",
"def suspended_processes\n data[:suspended_processes]\n end",
"def resume_all; threads.each {|x| resume(x)}; end",
"def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Resumes all processes for this Auto Scaling group. | def resume_all_processes
resume_processes
end | [
"def resume_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'ResumeProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def resume_processes *processes\n client_opts = {}\n client_opts[:auto_scaling_group_name] = name\n client_opts[:scaling_processes] = processes.flatten\n client.resume_processes(client_opts)\n nil\n end",
"def resume_all_processes\n groups.each do |group|\n group.resume_processes\n end\n end",
"def suspend_processes(auto_scaling_group_name, options = {})\n if scaling_processes = options.delete('ScalingProcesses')\n options.merge!(AWS.indexed_param('ScalingProcesses.member.%d', [*scaling_processes]))\n end\n request({\n 'Action' => 'SuspendProcesses',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def pause_scaling_events\n client.suspend_processes(auto_scaling_group_name: asg_name,\n scaling_processes:\n %w[ReplaceUnhealthy AlarmNotification ScheduledActions AZRebalance])\n end",
"def resume_all\n self.class.post('/command/resumeAll')\n end",
"def resume_all; threads.each {|x| resume(x)}; end",
"def resume(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_RESUME)\n end",
"def resume(job = ALL_JOBS)\n DRMAA.control(job, DRMAA::ACTION_RESUME)\n end",
"def resume_campaigns(params)\n perform_request(self, @token, 'campaigns', 'resume', params)\n end",
"def resume(job = ALL_JOBS)\n\t\t\tDRMAA.control(job, DRMAA::ACTION_RESUME)\n\t\tend",
"def resume\n with_queue_control do |control|\n control.resume\n end\n end",
"def resume\n @pauses.each do |topic, partitions|\n partitions.each do |partition, pause|\n next unless pause.paused?\n next unless pause.expired?\n\n pause.resume\n\n yield(topic, partition)\n end\n end\n end",
"def resume\n @state = :running\n end",
"def resume!\n raise Burstflow::Job::InternalError.new(self, \"Can't resume: already resumed\") if resumed?\n raise Burstflow::Job::InternalError.new(self, \"Can't resume: not suspended\") unless suspended?\n\n self.resumed_at = current_timestamp\n end",
"def resume\n process_status = status\n if process_status == 'stopped'\n return status if Process.kill('CONT', @proc_attrs[:pid].to_i)\n end\n process_status\n rescue Errno::EPERM\n return 'non-privilaged operation'\n end",
"def suspend_processes *processes\n client_opts = {}\n client_opts[:auto_scaling_group_name] = name\n client_opts[:scaling_processes] = processes.flatten\n client.suspend_processes(client_opts)\n nil\n end",
"def resume_paused_partitions\n @coordinators.resume do |topic, partition|\n @client.resume(topic, partition)\n end\n end",
"def resume_job\n # get items\n query = AnalysisJobsItem.cancelled_for_analysis_job(id)\n\n # batch update\n query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items|\n items.each(&:retry!)\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Enables all metrics collection for the Auto Scaling group. | def enable_all_metrics_collection
enable_metrics_collection
end | [
"def enable_metrics_collection(auto_scaling_group_name, granularity, options = {})\n if metrics = options.delete('Metrics')\n options.merge!(AWS.indexed_param('Metrics.member.%d', [*metrics]))\n end\n request({\n 'Action' => 'EnableMetricsCollection',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n 'Granularity' => granularity,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def update_metrics(group, disable, enable)\n @aws.disable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: disable\n })\n @aws.enable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: enable,\n granularity: \"1Minute\"\n })\n end",
"def disable_all_metrics_collection\n disable_metrics_collection\n end",
"def init_groups\n @@client.describe_auto_scaling_groups.auto_scaling_groups\n end",
"def enabled_metrics\n @group.enabled_metrics\n end",
"def update_auto_scaling_group(auto_scaling_group_name, options = {})\n if availability_zones = options.delete('AvailabilityZones')\n options.merge!(AWS.indexed_param('AvailabilityZones.member.%d', [*availability_zones]))\n end\n request({\n 'Action' => 'UpdateAutoScalingGroup',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def autoscaling(value)\n set_bool(:autoscaling, value)\n end",
"def enable_all\n @services.each_key(&method(:enable))\n end",
"def enable_monitoring\n client.monitor_instances(:instance_ids => [id])\n nil\n end",
"def describe_auto_scaling_groups(options = {})\n if auto_scaling_group_names = options.delete('AutoScalingGroupNames')\n options.merge!(AWS.indexed_param('AutoScalingGroupNames.member.%d', [*auto_scaling_group_names]))\n end\n request({\n 'Action' => 'DescribeAutoScalingGroups',\n :parser => Fog::Parsers::AWS::AutoScaling::DescribeAutoScalingGroups.new\n }.merge!(options))\n end",
"def auto_scaling_instances(group)\n auto_scaling.describe_auto_scaling_instances.map(&:auto_scaling_instances).flatten.select do |instance|\n instance.auto_scaling_group_name == group\n end\nend",
"def update_averages_for_metrics!\n Metric.all.each do |metric|\n update_averages_for_metric!(metric)\n end\n end",
"def update_auto_scaling_group(auto_scaling_group_name, options={})\n request_hash = amazonize_list('AvailabilityZones.member', options[:availability_zones])\n request_hash['AutoScalingGroupName'] = auto_scaling_group_name\n request_hash['LaunchConfigurationName'] = options[:launch_configuration_name] if options[:launch_configuration_name]\n request_hash['MinSize'] = options[:min_size] if options[:min_size]\n request_hash['MaxSize'] = options[:max_size] if options[:max_size]\n request_hash['Cooldown'] = options[:cooldown] if options[:cooldown]\n link = generate_request(\"UpdateAutoScalingGroup\", request_hash)\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n end",
"def remove_auto_scaling_group_properties\n properties = []\n properties << :AvailabilityZones\n properties << :HealthCheckGracePeriod\n properties << :HealthCheckType\n add_patch Patches::RemoveProperty.new 'AWS::AutoScaling::AutoScalingGroup', properties\n end",
"def enable_all_alerts\n self.service_subscriptions.each do |subscription|\n subscription.update_attribute :sms_enabled, true\n subscription.update_attribute :email_enabled, true\n end\n end",
"def describe_auto_scaling_groups(*auto_scaling_group_names)\n auto_scaling_group_names = auto_scaling_group_names.flatten.compact\n request_hash = amazonize_list('AutoScalingGroupNames.member', auto_scaling_group_names)\n link = generate_request(\"DescribeAutoScalingGroups\", request_hash)\n request_cache_or_info(:describe_auto_scaling_groups, link, DescribeAutoScalingGroupsParser, @@bench, auto_scaling_group_names.blank?)\n end",
"def enable\n {\n method: \"Performance.enable\"\n }\n end",
"def enable\n\t\t# if already enabled, nothing to do\n\t\tif @enabled == true\n\t\t\treturn false\n\t\tend\n\n\t\t# for each aspect, install in the class\n\t\t@aspects.each do |_aspect, arr|\n\t\t\tarr.each do |_class|\n\t\t\t\t_aspect.install(_class)\n\t\t\tend\n\t\tend\n\t\t@enabled = true\n\n\t\treturn true\n\tend",
"def run\n collector_config = config['metric_collectors']\n metric_plugins = instantiate_plugins collector_config\n metric_plugins.each do |metric_collector|\n metric_collector.collect\n @alert_manager.send_alerts! metric_collector.alerts\n @ship_manager.ship metric_collector.metrics\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Disables all metrics collection for the Auto Scaling group. | def disable_all_metrics_collection
disable_metrics_collection
end | [
"def enable_all_metrics_collection\n enable_metrics_collection\n end",
"def update_metrics(group, disable, enable)\n @aws.disable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: disable\n })\n @aws.enable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: enable,\n granularity: \"1Minute\"\n })\n end",
"def zero_autoscale_group\n Tapjoy::AutoscalingBootstrap::AWS::Autoscaling::Group.resize\n\n wait_for_asg_to_quiet\n\n Tapjoy::AutoscalingBootstrap::AWS::Autoscaling::Group.delete\n\n wait_for_asg_to_delete\n\n abort(\"#{@scaler_name} still exists\") if exists\n end",
"def enable_metrics_collection(auto_scaling_group_name, granularity, options = {})\n if metrics = options.delete('Metrics')\n options.merge!(AWS.indexed_param('Metrics.member.%d', [*metrics]))\n end\n request({\n 'Action' => 'EnableMetricsCollection',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n 'Granularity' => granularity,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def clear\n @mutex.synchronize do\n @metrics.each do |key, metric|\n metric.stop if metric.respond_to?(:stop)\n end\n\n @metrics = {}\n end\n end",
"def remove_auto_scaling_group_properties\n properties = []\n properties << :AvailabilityZones\n properties << :HealthCheckGracePeriod\n properties << :HealthCheckType\n add_patch Patches::RemoveProperty.new 'AWS::AutoScaling::AutoScalingGroup', properties\n end",
"def reset\n @collected_metrics = []\n end",
"def metrics_to_disable\n @aws.enabled_metrics.map { |k| k.metric }.deep_sort - @local.enabled_metrics.deep_sort\n end",
"def disable_monitoring\n client.unmonitor_instances(:instance_ids => [id])\n nil\n end",
"def zero_autoscale\n autoscale = AWS::AutoScaling.new(:region => @task.region)\n cf_stack.resources.each do |resource|\n next unless resource.logical_resource_id == 'ScalingGroup'\n\n scale_group = autoscale.groups[resource.physical_resource_id]\n if scale_group.min_size == 0 && scale_group.max_size == 0\n @task.debug { \"Stack #{@name} scale group #{scale_group.name} already zeroed\" }\n next\n end\n\n @task.unsafe(\"Change autoscale #{resource.physical_resource_id} to zero\") do\n s3_object = s3_bucket.objects[\"cloudformation/#{@name}/autoscale/#{scale_group.name}.json\"]\n s3_object.write JSON.generate(\n :min_size => scale_group.min_size,\n :max_size => scale_group.max_size,\n :desired_capacity => scale_group.desired_capacity\n )\n scale_group.update(:min_size => 0, :max_size => 0, :desired_capacity => 0)\n end\n end\n end",
"def disable!\n @mutex.synchronize do\n @advised.each { | x | x.disable! }\n end\n self\n end",
"def turn_off\n $logger.debug \"Turning group #{id} off\"\n put_request($uri + \"/#{$key}/groups/#{id}/action\", {:on => false})\n end",
"def clear_runtime\n @runtime_groups.each do |name, group|\n group.release_all\n end\n self\n end",
"def disable\n {\n method: \"Performance.disable\"\n }\n end",
"def reset_metrics\n flush_cache\n self\n end",
"def disable_all_mediation!\n self.__graph_mediator_enabled = false\n end",
"def invalidate_groupings(groupings)\n groupings.each(&:invalidate_grouping)\n end",
"def stop_collectors\n ::PrometheusExporter::Instrumentation::Process.stop\n ::Gruf::Prometheus::Collector.stop\n end",
"def stop_instances\n @task.unsafe(\"Stopping #{@name} processes\") do\n autoscaling_group.suspend_all_processes\n end\n autoscaling_instances.each do |instance|\n @task.unsafe(\"Stopping instance #{instance.instance_id}\") do\n load_balancers.each do |elb|\n elb.instances.deregister(instance.instance_id)\n end\n instance.ec2_instance.stop\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Update one or more attributes on the Auto Scaling group. | def update options = {}
group_opts = group_options(options)
# tags must be updated using a separate request from the
# other attributes, *sigh*
if tags = group_opts.delete(:tags)
tags.map(&:to_hash).each do |tag|
tag[:resource_type] = 'auto-scaling-group'
tag[:resource_id] = name
end
client.create_or_update_tags(:tags => tags)
end
unless group_opts.empty?
client_opts = group_opts.merge(:auto_scaling_group_name => name)
client.update_auto_scaling_group(client_opts)
end
nil
end | [
"def update_auto_scaling_group(auto_scaling_group_name, options = {})\n if availability_zones = options.delete('AvailabilityZones')\n options.merge!(AWS.indexed_param('AvailabilityZones.member.%d', [*availability_zones]))\n end\n request({\n 'Action' => 'UpdateAutoScalingGroup',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(options))\n end",
"def update_auto_scaling_group(auto_scaling_group_name, options={})\n request_hash = amazonize_list('AvailabilityZones.member', options[:availability_zones])\n request_hash['AutoScalingGroupName'] = auto_scaling_group_name\n request_hash['LaunchConfigurationName'] = options[:launch_configuration_name] if options[:launch_configuration_name]\n request_hash['MinSize'] = options[:min_size] if options[:min_size]\n request_hash['MaxSize'] = options[:max_size] if options[:max_size]\n request_hash['Cooldown'] = options[:cooldown] if options[:cooldown]\n link = generate_request(\"UpdateAutoScalingGroup\", request_hash)\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n end",
"def update options = {}\n client_opts = scaling_policy_options(group, name, options)\n resp = client.put_scaling_policy(client_opts)\n static_attributes[:arn] = resp.policy_arn\n nil\n end",
"def update(group, diffs)\n hash = group.to_aws(Configuration.instance.autoscaling.force_size)\n if !Configuration.instance.autoscaling.override_launch_config_on_sync\n hash.delete(:launch_configuration_name)\n end\n\n update_group = false\n\n diffs.each do |diff|\n case diff.type\n when AutoScalingChange::MIN\n update_group = true\n\n # Override the min size value because it could be different from the actual\n # configured min size if there were scheduled actions\n hash[:min_size] = diff.local\n when AutoScalingChange::MAX\n update_group = true\n\n # Override the max size value because it could be different from the actual\n # configured max size if there were scheduled actions\n hash[:max_size] = diff.local\n when AutoScalingChange::DESIRED\n update_group = true\n\n # Override the desired size value because it could be different from the actual\n # configured desired size if there were scheduled actions\n hash[:desired_capacity] = diff.local\n when AutoScalingChange::COOLDOWN,\n AutoScalingChange::CHECK_TYPE,\n AutoScalingChange::CHECK_GRACE,\n AutoScalingChange::SUBNETS,\n AutoScalingChange::TERMINATION,\n AutoScalingChange::LAUNCH\n update_group = true\n when AutoScalingChange::TAGS\n update_tags(group, diff.tags_to_remove, diff.tags_to_add)\n when AutoScalingChange::LOAD_BALANCER\n update_load_balancers(group, diff.load_balancers_to_remove, diff.load_balancers_to_add)\n when AutoScalingChange::METRICS\n update_metrics(group, diff.metrics_to_disable, diff.metrics_to_enable)\n when AutoScalingChange::SCHEDULED\n remove = diff.changes.removed.map { |d| d.aws.scheduled_action_name }\n update = (diff.changes.added + diff.changes.modified).map { |d| d.local.name }\n\n update_scheduled_actions(group, remove, update)\n when AutoScalingChange::POLICY\n remove = diff.policy_diffs.reject do |d|\n d.type != PolicyChange::UNMANAGED\n end.map { |d| d.aws.policy_name }\n update = diff.policy_diffs.select do |d|\n d.type != PolicyChange::UNMANAGED and d.type != PolicyChange::ALARM\n end.map { |d| d.local.name }\n update_scaling_policies(group, remove, update)\n\n # update alarms for existing policies\n alarms = diff.policy_diffs.select { |d| d.type == PolicyChange::ALARM }\n alarms.each do |policy_diff|\n remove = policy_diff.alarm_diffs.reject do |d|\n d.type != AlarmChange::UNMANAGED\n end.map { |d| d.aws.alarm_name }\n update = policy_diff.alarm_diffs.select do |d|\n d.type != AlarmChange::UNMANAGED\n end.map { |u| u.local.name }\n\n update_alarms(policy_diff.local, policy_diff.policy_arn, remove, update)\n end\n\n # create alarms for new policies\n new_policies = diff.policy_diffs.select { |d| d.type == PolicyChange::ADD }\n new_policies.each do |policy_diff|\n config = policy_diff.local\n policy_arn = @aws.describe_policies({\n auto_scaling_group_name: group.name,\n policy_names: [config.name]\n }).scaling_policies[0].policy_arn\n update_alarms(config, policy_arn, [], config.alarms.map {|k , v| k })\n end\n end\n end\n\n @aws.update_auto_scaling_group(hash) if update_group\n end",
"def update\n\t\t@attribute_group = AttributeGroup.find(params[:id])\n\n\t\trespond_to do |format|\n\t\t\tif @attribute_group.update_attributes(params[:attribute_group])\n\t\t\t\tformat.html { redirect_to attribute_groups_url, notice: 'Attribute Group was successfully updated.' }\n\t\t\t\tformat.json\t{ head :ok }\n\t\t\telse\n\t\t\t\tformat.html { render action: \"edit\" }\n\t\t\t\tformat.json\t{ render json: @attribute_group.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def update\n @attribute_group = AttributeGroup.find(params[:id])\n\n respond_to do |format|\n if @attribute_group.update_attributes(params[:attribute_group])\n format.html { redirect_to(@attribute_group, :notice => 'Attribute group was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @attribute_group.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\t\t@auto_scaling_trigger = AutoScalingTrigger.find(params[:id], :include => [ :auto_scaling_group ])\n\t\t@auto_scaling_group = @auto_scaling_trigger.auto_scaling_group\n\t\tast_params = params[:auto_scaling_trigger]\n\t\t\n\t\tunless ast_params.nil?\n\t\t\tast_params.each do |k,v|\n\t\t\t\tast_params[k] = v.chomp\n\t\t\tend\n\t\tend\n\t\t\n\t\trespond_to do |format|\n\t\t\tif @auto_scaling_trigger.update_attributes(ast_params)\n\t\t\t\tp = @auto_scaling_group\n\t\t\t\to = @auto_scaling_trigger\n\t\t\t\tAuditLog.create_for_parent(\n\t\t\t\t\t:parent => p,\n\t\t\t\t\t:auditable_id => o.id,\n\t\t\t\t\t:auditable_type => o.class.to_s,\n\t\t\t\t\t:auditable_name => o.name,\n\t\t\t\t\t:author_login => current_user.login,\n\t\t\t\t\t:author_id => current_user.id,\n\t\t\t\t\t:summary => \"updated '#{o.name}'\",\n\t\t\t\t\t:changes => o.tracked_changes,\n\t\t\t\t\t:force => false\n\t\t\t\t)\n\t\t\telse\n\t\t\t\t@error_messages = @auto_scaling_trigger.errors.collect{ |attr,msg| \"#{attr.humanize} - #{msg}\" }\n\t\t\t\tflash[:error] = @error_messages.join('<br/>')\n\t\t\tend\n\t\t\tformat.json { render :json => @auto_scaling_trigger }\n\t\tend\n\tend",
"def update_affinity_group(name, label, options = {})\n raise 'Label name cannot be empty' if label.nil? || label.empty?\n if affinity_group(name)\n body = Serialization.resource_to_xml(label, options)\n request_path = \"/affinitygroups/#{name}\"\n request = @client.management_request(:put, request_path, body)\n request.call\n Azure::Loggerx.info \"Affinity Group #{name} is updated.\"\n end\n end",
"def update_affinity_group(name, label, options = {})\n raise 'Label name cannot be empty' if label.nil? || label.empty?\n if affinity_group(name)\n body = Serialization.resource_to_xml(label, options)\n request_path = \"/affinitygroups/#{name}\"\n request = Azure::BaseManagement::ManagementHttpRequest.new(:put, request_path, body)\n request.call\n Azure::Loggerx.info \"Affinity Group #{name} is updated.\"\n end\n end",
"def update options = {}\n\n client_opts = options.dup\n client_opts[:scheduled_action_name] = name\n client_opts[:auto_scaling_group_name] = auto_scaling_group_name\n\n # convert these options to timestamps \n [:start_time, :end_time].each do |opt|\n if client_opts[opt].is_a?(Time)\n client_opts[opt] = client_opts[opt].iso8601\n end\n end\n\n client.put_scheduled_update_group_action(client_opts)\n\n nil\n\n end",
"def update_attribute(attr_name, attr_value)\n update_attributes(attr_name => attr_value)\n end",
"def update_group(group_name, options = {})\n request({\n 'Action' => 'UpdateGroup',\n 'GroupName' => group_name,\n :parser => Fog::Parsers::AWS::IAM::UpdateGroup.new\n }.merge!(options))\n end",
"def update_attributes attribute_hash\n bulk_assign(attribute_hash)\n save\n end",
"def modify_target_group_attributes(tg_id, attributes)\n params = {}\n params.merge!(Fog::AWS.indexed_param('Attributes.member', attributes))\n request({\n 'Action' => 'ModifyTargetGroupAttributes',\n 'TargetGroupArn' => tg_id,\n :parser => Fog::Parsers::AWS::ELBV2::ModifyTargetGroupAttributes.new\n }.merge(params))\n end",
"def update_attributes(attrs)\n nattrs = attrs.reduce({}) do |acc, a|\n ak, av = a\n case ak\n when :group, :actor\n else\n acc[ak] = av\n end\n\n acc\n end\n\n super(nattrs)\n end",
"def update\n update_cloudformation if valid?\n end",
"def save\n if group.id\n update\n true\n else\n raise \"New #{self.class} are created when a new Fog::Rackspace::AutoScale::Group is created\"\n end\n end",
"def update_tags(group, remove, add)\n @aws.delete_tags({\n tags: remove.map { |k, v|\n {\n key: k,\n resource_id: group.name,\n resource_type: \"auto-scaling-group\"\n }\n }\n })\n @aws.create_or_update_tags({\n tags: add.map { |k, v|\n {\n key: k,\n value: v,\n resource_id: group.name,\n resource_type: \"auto-scaling-group\",\n propagate_at_launch: true\n }\n }\n })\n end",
"def update_metrics(group, disable, enable)\n @aws.disable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: disable\n })\n @aws.enable_metrics_collection({\n auto_scaling_group_name: group.name,\n metrics: enable,\n granularity: \"1Minute\"\n })\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes all tags from this Auto Scaling group. | def delete_all_tags
delete_tags(self.tags)
nil
end | [
"def delete_all_tags\n for tag in tags\n tag.reload\n tag.unlink\n end\n tags.clear\n end",
"def clear_tags\n\t\tif @tags\n\t\t\t@tags.each { |tag, __unused| self.class.remove_object_with_tag(self, tag) }\n\t\t\t@tags.clear\n\t\tend\n\t\treturn self\n\tend",
"def clear_all_instance_tags\r\n self.tag_types.each do |tag_type|\r\n self.instance_variable_set(\"@acts_as_taggable_#{tag_type.to_s.singularize}_tags\", nil)\r\n end\r\n self.instance_variable_set(\"@acts_as_taggable_tags\", nil)\r\n end",
"def rimuovi_associazioni_tags\n self.tags.delete_all\n end",
"def delete_tags\n @tags.delete_tags(@filename) unless @tags.nil?\n end",
"def delete\n resource.delete_tags([self])\n nil\n end",
"def delete_tags(tags)\n params = {}\n tags.each_with_index do |tag, i|\n tag.each do |key, value|\n params[\"Tags.member.#{i+1}.#{key}\"] = value unless value.nil?\n end\n end\n request({\n 'Action' => 'DeleteTags',\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n }.merge!(params))\n end",
"def destroy\n tag_ids.clone.each { |tag_id| destroy_tag(tag_id) }\n super\n end",
"def purge_unused_tags!\n unused_tags.each do |tag|\n logger.info(\"Purging tag '#{tag}'\")\n entries_with(label: tag).each do |entry|\n payload = Protocol::Device::SetTagLabels.new(tags: id_to_tags_field(entry.tag_id), label: '')\n context.send_message(target: Target.new(site_id: entry.site_id),\n payload: payload,\n acknowledge: true)\n end\n end\n Timeout.timeout(5) do\n while !unused_tags.empty?\n sleep 0.1\n end\n end\n end",
"def remove_all_asset_group_tags(id, opts = {})\n data, _status_code, _headers = remove_all_asset_group_tags_with_http_info(id, opts)\n data\n end",
"def purge_unused_tags!\n context.purge_unused_tags!\n end",
"def delete_unused_tags\n return if tagger.nil?\n tagger.destroy if (tagger.unwrangled? && tagger.taggings == [self])\n end",
"def remove_tags(tags)\n @interface.remove_tags_to_image(self, tags)\n end",
"def remove_tagging(tagging)\n taggings.destroy(tagging) if tags.size > MINIMUM_TAG_COUNT\n end",
"def unset_all_tags\n @deployment.servers_no_reload.each do |s|\n # can't unset ALL tags, so we must set a bogus one\n s.tags = [{\"name\"=>\"removeme:now=1\"}]\n s.save\n end\n end",
"def delete_image tags\n all_images_by_tags( tags ).each { | image | image.deregister } \n end",
"def clear_all_tags()\n puts \"Deleting Tags...\"\n Tag.delete_all()\n puts \"Finished deleting all tags.\"\nend",
"def delete_image tags\n all_images_by_tags( tags ).each do | image | \n @ec2.deregister_image image[ :aws_id ]\n end \n end",
"def delete_tags(name); end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Deletes the Auto Scaling group. If you pass +:force+ as true then all the instances associated with this group will also be terminated. | def delete options = {}
client_opts = {}
client_opts[:force_delete] = options[:force] == true
client_opts[:auto_scaling_group_name] = name
client.delete_auto_scaling_group(client_opts)
nil
end | [
"def delete_auto_scaling_group(auto_scaling_group_name)\n request({\n 'Action' => 'DeleteAutoScalingGroup',\n 'AutoScalingGroupName' => auto_scaling_group_name,\n :parser => Fog::Parsers::AWS::AutoScaling::Basic.new\n })\n end",
"def delete!\n client.delete_app_group!(app_group_id)\n self\n end",
"def delete_asg name\n auto_scaling = new_auto_scaling\n groups = auto_scaling.groups\n raise \"unable to delete asg, #{name}. asg not found!\" if groups[name].nil? \n\n asg = groups[name]\n\n puts \"deleting asg, #{asg.name}\"\n asg.delete({:force => true})\n delete_launch_configs\nend",
"def delete_affinity_group(name)\n if affinity_group(name)\n request_path = \"/affinitygroups/#{name}\"\n request = @client.management_request(:delete, request_path)\n request.call\n Azure::Loggerx.info \"Deleted affinity group #{name}.\"\n end\n end",
"def delete_resource_group(group_name)\n cmdline = Schott::AzureCLI::Commandlines.delete_resource_group(group_name)\n sh(cmdline)\n end",
"def delete\n client.delete_group(:group_name => name)\n nil\n end",
"def destroy_google_group\n result = Gandalf::GoogleApiClient.delete_google_group(self.apps_id)\n result.data\n end",
"def delete_affinity_group(name)\n if affinity_group(name)\n request_path = \"/affinitygroups/#{name}\"\n request = Azure::BaseManagement::ManagementHttpRequest.new(:delete, request_path)\n request.call\n Azure::Loggerx.info \"Deleted affinity group #{name}.\"\n end\n end",
"def delete\n rg_exists = @resource_client.resource_groups.check_resource_group_exists(@rg_name)\n if !rg_exists\n OOLog.info(\"The Resource Group #{@rg_name} does not exist. Moving on...\")\n else\n @resource_client.resource_groups.get(@rg_name).destroy\n end\n end",
"def delete\n client_opts = {}\n client_opts[:auto_scaling_group_name] = group.name\n client_opts[:policy_name] = name\n client.delete_policy(client_opts)\n nil\n end",
"def delete\n client.delete_security_group(:group_id => id)\n nil\n end",
"def destroy\n @client.resource_groups.delete(@resource_group)\n end",
"def delete_app_group(group,app)\n\t\t\tresults = submit_cmd('delete app group',:db,\" -env #{self.env} -domain #{self.domain} -plant #{self.plant} -group #{group} -app_instance #{app}\")\n\t\t\tputs results\n\tend",
"def delete_group(group_id)\n start.uri('/api/group')\n .url_segment(group_id)\n .delete()\n .go()\n end",
"def delete_group(group_id)\n start.uri('/api/group')\n .url_segment(group_id)\n .delete()\n .go()\n end",
"def destroy\n start = Time.now\n debug(\"Deleting device group: \\\"#{resource[:full_path]}\\\"\")\n connection = self.class.get_connection(resource[:account])\n device_group = get_device_group(connection, resource[:full_path], 'id')\n if device_group\n delete_device_group = rest(connection,\n Puppet::Provider::Logicmonitor::DEVICE_GROUP_ENDPOINT % device_group['id'],\n Puppet::Provider::Logicmonitor::HTTP_DELETE)\n valid_api_response?(delete_device_group) ? nil : alert(delete_device_group)\n end\n debug \"Finished in #{(Time.now-start)*1000.0} ms\"\n end",
"def delete\n begin\n response = @client.resource_groups.delete(@rg_name).value!\n rescue MsRestAzure::AzureOperationError => e\n OOLog.fatal(\"Error deleting resource group: #{e.body}\")\n rescue => ex\n OOLog.fatal(\"Error deleting resource group: #{ex.message}\")\n end\n end",
"def destroy\n requires :name\n\n connection.delete_security_group(name)\n true\n end",
"def destroy(name, &each_group)\n list(name, &each_group) if each_group\n ret = @@ec2.delete_security_group(:group_name => name)\n (ret && ret['return'] == 'true')\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /course_proposal_questions GET /course_proposal_questions.json | def index
@course_proposal_questions = CourseProposalQuestion.all
end | [
"def questions\n self.class.get('/2.2/questions', @options)\n end",
"def index\n authorize CourseProposal\n @course_proposals = CourseProposal.all\n end",
"def manage_questions\n @course = Course.find(params[:course_id])\n @questions = @course.questions.order(\"id\")\n end",
"def questions\n @data[:questions]\n end",
"def show\n @question_course = QuestionCourse.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @question_course }\n end\n end",
"def destroy\n @course_proposal_question.destroy\n respond_to do |format|\n format.html { redirect_to course_proposal_questions_url, notice: 'Course proposal question was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def index\n \n @prework_questions = @module.prework_questions\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @prework_questions }\n end\n end",
"def index\n @course_prereqs = CoursePrereq.all\n end",
"def index\n @experiment_proposals = ExperimentProposal.all\n respond_with(@experiment_proposals) do |format|\n format.json { render :json => @experiment_proposals.as_json }\n end\n end",
"def create\n @course_proposal_question = CourseProposalQuestion.new(course_proposal_question_params)\n\n respond_to do |format|\n if @course_proposal_question.save\n format.html { redirect_to @course_proposal_question, notice: 'Course proposal question was successfully created.' }\n format.json { render :show, status: :created, location: @course_proposal_question }\n else\n format.html { render :new }\n format.json { render json: @course_proposal_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def get_questions\n result = QuizQuestion.includes(:quiz_answers).where(quiz_id: params[:id])\n\n @questions = result.map { |question| QuestionPresenter.new(question) }\n end",
"def questions\n # Get a list of questionnaires that belong to instances of the current race\n if @course\n questionnaire_ids = @course.questionnaire_ids\n elsif @instance\n questionnaire_ids = @instance.questionnaire_ids\n else\n questionnaire_ids = []\n end\n\n # Collect question_ids that are used in those questionnaires\n question_ids = Set.new\n Questionnaire.where(:id => questionnaire_ids).find_each do |questionnaire|\n question_ids.merge(questionnaire.question_ids)\n end\n\n @questions = Question.find(question_ids.to_a)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @questions.to_json }\n end\n end",
"def responses\n @proposal = current_user.proposals.find(params[:id])\n @responses = @proposal.responses\n\n respond_to do |format|\n format.html # responses.html.erb\n format.xml { render :xml => @responses }\n end\n end",
"def show\n @question = @exam.questions.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @question }\n end\n end",
"def prebooking_questions\n\n if @url.blank? or @api_token.blank?\n return []\n end\n\n [\n {\n question: \"What is your trip purpose?\", \n choices: purpose_choices, \n code: \"purpose\"\n },\n {\n question: \"Are you traveling with anyone?\", \n choices: passenger_choices, \n code: \"guests\"\n },\n { question: \"What is your apartment number?\",\n choices: ['string'],\n code: \"pickup_unit_number\"\n },\n { question: \"What is the apartment number at your destination?\",\n choices: ['string'],\n code: \"dropoff_unit_number\"\n }\n ]\n end",
"def index\n @proposals = Proposal.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @proposals }\n end\n end",
"def index\n @proposals = Proposal.all\n end",
"def questions\n return @questions\n end",
"def index\n @questions = Question.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @questions }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /course_proposal_questions POST /course_proposal_questions.json | def create
@course_proposal_question = CourseProposalQuestion.new(course_proposal_question_params)
respond_to do |format|
if @course_proposal_question.save
format.html { redirect_to @course_proposal_question, notice: 'Course proposal question was successfully created.' }
format.json { render :show, status: :created, location: @course_proposal_question }
else
format.html { render :new }
format.json { render json: @course_proposal_question.errors, status: :unprocessable_entity }
end
end
end | [
"def index\n @course_proposal_questions = CourseProposalQuestion.all\n end",
"def create\n @course = Course.find(params[:course_id])\n @question = @course.questions.build(params[:question])\n @question.save\n\n respond_to do |format|\n if @question.save\n format.html { redirect_to @course, notice: 'Question was successfully created.' }\n format.json { render json: @course, status: :created, location: @question }\n else\n format.html { render action: \"new\" }\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @question = Question.new(params[:question])\n\t\t@exam.questions << @question\n\n respond_to do |format|\n if @question.save\n format.html { redirect_to [:teacher, @course, @exam, @question], :notice => 'Question was successfully created.' }\n format.json { render :json => @question, :status => :created, :location => @question }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @question.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def destroy\n @course_proposal_question.destroy\n respond_to do |format|\n format.html { redirect_to course_proposal_questions_url, notice: 'Course proposal question was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def create\n @course_prereq = CoursePrereq.new(course_prereq_params)\n\n respond_to do |format|\n if @course_prereq.save\n format.html { redirect_to @course_prereq, notice: 'Course prereq was successfully created.' }\n format.json { render :show, status: :created, location: @course_prereq }\n else\n format.html { render :new }\n format.json { render json: @course_prereq.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @question = @extraction_forms_projects_section.questions.new(question_params)\n authorize(@question)\n\n # !!! Check for params 'q_type' and build values based on the type.\n respond_to do |format|\n if @question.save\n format.html do\n redirect_to edit_question_path(@question),\n notice: t('success')\n end\n format.json { render json: { id: @question.id }, status: 200 }\n else\n format.html { render :new }\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def quiz_questionnaire num_quiz_questions\n\n # New questionnaire from params\n questionnaire = QuizQuestionnaire.new(questionnaire_params)\n\n # Set min and max score\n questionnaire.max_question_score = 1\n questionnaire.min_question_score = 0\n\n # Set author team\n author_team = AssignmentTeam.team(Participant.find(params[:pid]))\n questionnaire.instructor_id = author_team.id\n\n # Create each quiz question.\n create_quiz_questions(num_quiz_questions) do |question|\n\n # Add the question to the questionnaire\n questionnaire.quiz_questions << question\n\n # Create each question choice\n create_quiz_question_choices(question) do |choice|\n\n # Add the choice to the question\n question.quiz_question_choices << choice\n end\n end\n\n questionnaire\n end",
"def create\n @question_course = QuestionCourse.new(params[:question_course])\n\n respond_to do |format|\n if @question_course.save\n format.html { redirect_to @question_course, notice: 'Question course was successfully created.' }\n format.json { render json: @question_course, status: :created, location: @question_course }\n else\n format.html { render action: \"new\" }\n format.json { render json: @question_course.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_new_questions\n questionnaire_id = params[:id]\n # If the questionnaire is being used in the active period of an assignment, delete existing responses before adding new questions\n if AnswerHelper.check_and_delete_responses(questionnaire_id)\n flash[:success] = 'You have successfully added a new question. Any existing reviews for the questionnaire have been deleted!'\n else\n flash[:success] = 'You have successfully added a new question.'\n end\n\n questionnaire = Questionnaire.find(questionnaire_id)\n current_num_of_questions = questionnaire.questions.size\n max_seq = 0\n Questionnaire.find(questionnaire_id).questions.each do |question|\n if !question.seq.nil? && question.seq > max_seq\n max_seq = question.seq\n end\n end\n ((current_num_of_questions + 1)..(current_num_of_questions + params[:question][:total_num].to_i)).each do\n max_seq += 1\n # Create question object based on type using question_factory\n question = question_factory(params[:question][:type], questionnaire_id, max_seq)\n if question.is_a? ScoredQuestion\n question.weight = params[:question][:weight]\n question.max_label = Question::MAX_LABEL\n question.min_label = Question::MIN_LABEL\n end\n\n if Question::SIZES.key?(question.class.name)\n question.size = Question::SIZES[question.class.name]\n end\n if Question::ALTERNATIVES.key?(question.class.name)\n question.alternatives = Question::ALTERNATIVES[question.class.name]\n end\n\n begin\n question.save\n rescue StandardError => e\n flash[:error] = e.message\n end\n end\n redirect_to edit_questionnaire_path(questionnaire_id.to_sym)\n end",
"def create\n @question = @extraction_forms_projects_section.questions.new(question_params)\n\n if @question.question_rows.count == 0\n @question.question_rows.new\n new_qrc = @question.question_rows.first.question_row_columns.new(question_row_column_type: QuestionRowColumnType.find_by(name: 'text'))\n QuestionRowColumnOption.all.each do |opt|\n new_qrc.question_row_column_options << opt\n end\n end\n\n # !!! Check for params 'q_type' and build values based on the type.\n\n respond_to do |format|\n if @question.save\n format.html { redirect_to edit_question_path(@question),\n notice: t('success') }\n format.json { render :show, status: :created, location: @question }\n else\n format.html { render :new }\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @apt_question = AptQuestion.new(apt_question_params)\n\n respond_to do |format|\n if @apt_question.save\n format.html { redirect_to @apt_question, notice: 'Apt question was successfully created.' }\n format.json { render :show, status: :created, location: @apt_question }\n else\n format.html { render :new }\n format.json { render json: @apt_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @test_question = TestQuestion.new(test_question_params)\n\n respond_to do |format|\n if @test_question.save\n format.html { redirect_to @test_question, notice: 'Test question was successfully created.' }\n format.json { render :show, status: :created, location: @test_question }\n else\n format.html { render :new }\n format.json { render json: @test_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @proposal = Proposal.new(proposal_params)\n\n respond_to do |format|\n if @proposal.save\n format.html { redirect_to proposals_path }\n format.json { render :show, status: :created, location: @proposal }\n else\n format.html { render :new }\n format.json { render json: @proposal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @experiment_proposal = ExperimentProposal.new(experiment_proposal_params)\n\n respond_to do |format|\n if @experiment_proposal.save\n format.json { render :show, status: :created, location: @experiment_proposal }\n else\n format.json { render json: @experiment_proposal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @agreement_source_study.build_questions\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @agreement_source_study }\n end\n end",
"def new\n @prework_question = @module.prework_questions.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @prework_question }\n end\n end",
"def create\n @proposal = Proposal.new(params[:proposal])\n\n respond_to do |format|\n if @proposal.save\n format.html { redirect_to @proposal, notice: 'Proposal was successfully created.' }\n format.json { render json: @proposal, status: :created, location: @proposal }\n else\n format.html { render action: \"new\" }\n format.json { render json: @proposal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_single_quiz_question(course_id,quiz_id,question__question_name__,question__question_text__,question__question_type__,opts={})\n query_param_keys = [\n \n ]\n\n form_param_keys = [\n :question__question_name__,\n :question__question_text__,\n :question__quiz_group_id__,\n :question__question_type__,\n :question__position__,\n :question__points_possible__,\n :question__correct_comments__,\n :question__incorrect_comments__,\n :question__neutral_comments__,\n :question__text_after_answers__,\n :question__answers__,\n \n ]\n\n # verify existence of params\n raise \"course_id is required\" if course_id.nil?\n raise \"quiz_id is required\" if quiz_id.nil?\n raise \"question__question_name__ is required\" if question__question_name__.nil?\n raise \"question__question_text__ is required\" if question__question_text__.nil?\n raise \"question__question_type__ is required\" if question__question_type__.nil?\n # set default values and merge with input\n options = underscored_merge_opts(opts,\n :course_id => course_id,\n :quiz_id => quiz_id,\n :question__question_name__ => question__question_name__,\n :question__question_text__ => question__question_text__,\n :question__question_type__ => question__question_type__\n )\n\n # resource path\n path = path_replace(\"/v1/courses/{course_id}/quizzes/{quiz_id}/questions\",\n :course_id => course_id,\n :quiz_id => quiz_id)\n headers = nil\n form_params = select_params(options, form_param_keys)\n query_params = select_params(options, query_param_keys)\n if opts[:next_page]\n pagination_params = page_params_load(:post, path)\n query_params.merge! pagination_params if pagination_params\n end\n response = mixed_request(:post, path, query_params, form_params, headers)\n page_params_store(:post, path)\n QuizQuestion.new(response)\n end",
"def prebooking_questions\n\n if @url.blank? or @api_token.blank?\n return []\n end\n\n [\n {\n question: \"What is your trip purpose?\", \n choices: purpose_choices, \n code: \"purpose\"\n },\n {\n question: \"Are you traveling with anyone?\", \n choices: passenger_choices, \n code: \"guests\"\n },\n { question: \"What is your apartment number?\",\n choices: ['string'],\n code: \"pickup_unit_number\"\n },\n { question: \"What is the apartment number at your destination?\",\n choices: ['string'],\n code: \"dropoff_unit_number\"\n }\n ]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /course_proposal_questions/1 PATCH/PUT /course_proposal_questions/1.json | def update
respond_to do |format|
if @course_proposal_question.update(course_proposal_question_params)
format.html { redirect_to @course_proposal_question, notice: 'Course proposal question was successfully updated.' }
format.json { render :show, status: :ok, location: @course_proposal_question }
else
format.html { render :edit }
format.json { render json: @course_proposal_question.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @question = Question.find(params[:id])\n\n respond_to do |format|\n if @question.update_attributes(params[:question])\n format.html { redirect_to @question.course, notice: 'Question was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @question = @exam.questions.find(params[:id])\n\n respond_to do |format|\n if @question.update_attributes(params[:question])\n format.html { redirect_to [:teacher, @course, @exam, @question], :notice => 'Question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @question.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n \t@questions = Question.find(params[:id])\n \trespond_to do |format|\n \t\tif @questions.update(questions_params)\n \t\t\tformat.html { redirect_to @questions, notice: 'questions was successfully updated.' }\n \t\t\tformat.json { head :no_content }\n \t\telse\n \t\t\tformat.html { render action: 'edit' }\n \t\t\tformat.json { render json: @questions.errors, status: :unprocessable_entity }\n \t\tend\n \tend\n end",
"def update\n @question_course = QuestionCourse.find(params[:id])\n\n respond_to do |format|\n if @question_course.update_attributes(params[:question_course])\n format.html { redirect_to @question_course, notice: 'Question course was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @question_course.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @question.update(question_params)\n format.json { head :no_content }\n else\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @questions_option = QuestionsOption.find(params[:id])\n\n respond_to do |format|\n if @questions_option.update_attributes(params[:questions_option])\n format.html { redirect_to @questions_option, notice: 'Questions option was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @questions_option.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @test_question = TestQuestion.find(params[:id])\n\n respond_to do |format|\n if @test_question.update_attributes(params[:test_question])\n format.html { redirect_to @test_question, :notice => 'Test question was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @test_question.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @interview_question = InterviewQuestion.find(params[:id])\n\n respond_to do |format|\n if @interview_question.update_attributes(params[:interview_question])\n format.html { redirect_to @interview_question, notice: 'Interview question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @interview_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @apt_question.update(apt_question_params)\n format.html { redirect_to @apt_question, notice: 'Apt question was successfully updated.' }\n format.json { render :show, status: :ok, location: @apt_question }\n else\n format.html { render :edit }\n format.json { render json: @apt_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @lab_question = LabQuestion.find(params[:id])\n\n respond_to do |format|\n if @lab_question.update_attributes(params[:lab_question])\n format.html { redirect_to @lab_question, notice: 'Lab question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @lab_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @critical_question = CriticalQuestion.find(params[:id])\n\n respond_to do |format|\n if @critical_question.update_attributes(params[:critical_question])\n format.html { redirect_to @critical_question, notice: 'Critical question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @critical_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @course = Course.find(params[:course_id])\n @customized_concept = CustomizedConcept.find(params[:id])\n if !params[:customized_concept][:test_paper_questions_attributes].blank?\n update_test_paper_option(params[:customized_concept][:test_paper_questions_attributes])\n end\n # remove the test_paper_options\n respond_to do |format|\n if @customized_concept.update!(customized_concept_params)\n format.html { redirect_to group_course_path(@course.group, @course), notice: 'Customized concept was successfully updated.' }\n format.json { render :show, status: :ok, location: @customized_concept }\n else\n format.html { render :edit }\n format.json { render json: @customized_concept.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @prework_question = @module.prework_questions.find(params[:id])\n\n respond_to do |format|\n if @prework_question.update_attributes(params[:prework_question])\n format.html { redirect_to prework_module_path(@module), notice: 'Prework question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @prework_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def edit_question\n\t\t\tquizzes = current_instructor.quizzes\n\t\t\t@found = 0\n\t\t\tquizzes.each do |quiz|\n\t\t\t\tif(quiz.questions.exists?(:id => params[:question_id]))\n\t\t\t\t\t@found = @found + 1\n\t\t\t\tend \n\t\t\tend\n\t\t\tif (@found > 0)\n\t\t\t\tquestion = Question.find(params[:question_id])\n\t\t\t\tif (question.update(question_params))\n\t\t\t\t\trender json: { success: true, data: { :question => question }, info:{} }, status: 200\n\t\t\t\telse\n\t\t\t\t\trender json: { error: question.errors }, status: 422 \n\t\t\t\tend\t\n\t\t\telse\n\t\t\t\trender json: { error:\"Question is not found\" }, status: 422\n\t\t\tend\n\t\tend",
"def update\n respond_to do |format|\n if @course_prereq.update(course_prereq_params)\n format.html { redirect_to @course_prereq, notice: 'Course prereq was successfully updated.' }\n format.json { render :show, status: :ok, location: @course_prereq }\n else\n format.html { render :edit }\n format.json { render json: @course_prereq.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @admin_interview_question.update(admin_interview_question_params)\n format.html { redirect_to (params[:ref] || @admin_interview_question), notice: t('crud.updated_successfully!', name: Admin::InterviewQuestion.model_name.human) }\n format.json { render :show, status: :ok, location: @admin_interview_question }\n else\n format.html { render :edit }\n format.json { render json: @admin_interview_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @enq_question = EnqQuestion.find(params[:id])\n\n respond_to do |format|\n if @enq_question.update_attributes(params[:enq_question])\n format.html { redirect_to @enq_question, notice: 'Enq question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @enq_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @parent_question.update(parent_question_params)\n format.html { redirect_to @parent_question, notice: 'Parent question was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @parent_question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @interview_question.update(interview_question_params)\n format.html { redirect_to @interview_question, notice: 'Interview question was successfully updated.' }\n format.json { render :show, status: :ok, location: @interview_question }\n else\n format.html { render :edit }\n format.json { render json: @interview_question.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /course_proposal_questions/1 DELETE /course_proposal_questions/1.json | def destroy
@course_proposal_question.destroy
respond_to do |format|
format.html { redirect_to course_proposal_questions_url, notice: 'Course proposal question was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @question_course = QuestionCourse.find(params[:id])\n @question_course.destroy\n\n respond_to do |format|\n format.html { redirect_to question_courses_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @question = @exam.questions.find(params[:id])\n @question.destroy\n\n respond_to do |format|\n format.html { redirect_to teacher_course_exam_questions_url(@course, @exam) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @admin_interview_question.destroy\n respond_to do |format|\n format.html { redirect_to admin_interview_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exam_question = ExamQuestion.find(params[:id])\n @exam_question.destroy\n\n respond_to do |format|\n format.html { redirect_to exam_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @examquestion.destroy\n respond_to do |format|\n format.html { redirect_to examquestions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @v1_question = V1::Question.find(params[:id])\n @v1_question.destroy\n\n head :no_content\n end",
"def destroy\n @lab_question = LabQuestion.find(params[:id])\n @lab_question.destroy\n\n respond_to do |format|\n format.html { redirect_to lab_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @enq_question = EnqQuestion.find(params[:id])\n @enq_question.destroy\n\n respond_to do |format|\n format.html { redirect_to enq_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @test_question = TestQuestion.find(params[:id])\n @test_question.destroy\n\n respond_to do |format|\n format.html { redirect_to test_questions_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @course_prereq.destroy\n respond_to do |format|\n format.html { redirect_to course_prereqs_url, notice: 'Course prereq was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exam_course = ExamCourse.find(params[:id])\n @exam_course.destroy\n\n respond_to do |format|\n format.html { redirect_to exam_courses_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @question = Question.find(params[:id])\n @question.destroy\n\n respond_to do |format|\n format.html { redirect_to api_v1_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @critical_question = CriticalQuestion.find(params[:id])\n @critical_question.destroy\n\n respond_to do |format|\n format.html { redirect_to critical_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @good_question = GoodQuestion.find(params[:id])\n @good_question.destroy\n\n respond_to do |format|\n format.html { redirect_to good_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ttest_question.destroy\n respond_to do |format|\n format.html { redirect_to ttest_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n if @v1_question.destroy\n render json: {'message': 'Deleted question successfully'}, status: :ok\n else\n render json: get_errors, status: :unprocessable_entity\n end\n\n end",
"def destroy\n @questionnaire.destroy\n respond_to do |format|\n format.html { redirect_to questionnaires_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @attempt_question.destroy\n respond_to do |format|\n format.html { redirect_to attempt_questions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @interview_question = InterviewQuestion.find(params[:id])\n @interview_question.destroy\n\n respond_to do |format|\n format.html { redirect_to interview_questions_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set a default Chef environment name for our chef_search calls. | def chef_env(env)
chef_scope :chef_environment, env
end | [
"def chef_environment_name(export)\n if chef_environment_parameter?\n name = export[:stack][:options][:parameters][options[:chef_environment_parameter]]\n end\n name || DEFAULT_CHEF_ENVIRONMENT\n end",
"def default_environment=(env); end",
"def set_default_env(name, value)\n default_env[name.to_s] = value\n end",
"def default_environment_name\n return nil unless config?\n config.default_environment\n end",
"def default_environment\n return nil unless default_environment_name\n environment(default_environment_name)\n end",
"def env_default=(new_data)\n reset\n @env_default = VividMash.new(new_data, self, __node__, :env_default)\n end",
"def default_orgname\n ENV['ECM_DEFAULT_ORGNAME']\n end",
"def set_cfoundry_environment\n org = @client.organizations.find { |org|\n org.name == @default_org\n }\n @client.current_organization = org\n\n space = @client.spaces.find { |space|\n space.name == @default_space\n }\n @client.current_space = space\n\nend",
"def default_environment\n environments = Bebox::Environment.list(project_root)\n if environments.count > 0\n return environments.include?('vagrant') ? 'vagrant' : environments.first\n else\n return ''\n end\n end",
"def cli_name name, type=self\n environments[name] = type\n end",
"def lookup_in_env(env_variable_name, project_name = nil, default = nil)\n env_variable_name = \"#{env_variable_name.upcase.gsub('-', '_')}\"\n project_specific_name = \"#{project_name.upcase.gsub('-', '_')}_#{env_variable_name}\" if project_name\n project_name && ENV[project_specific_name] || ENV[env_variable_name] || default # rubocop:disable Style/FetchEnvVar\n end",
"def node_name\n name = Chef::Config[:node_name] || ohai[:fqdn] || ohai[:machinename] || ohai[:hostname]\n Chef::Config[:node_name] = name\n\n raise Chef::Exceptions::CannotDetermineNodeName unless name\n\n name\n end",
"def read_env(name, default = nil, filter = /[^\\w]*/)\n raw = ENV[name].nil? ? default : ENV[name]\n\n raw = raw.downcase.gsub(filter, '\\1') unless raw.nil?\n raw\n end",
"def determine_chef_server(request)\n \n # Return specified Chef server, if given and match found\n if request.params[\"chef-server\"]\n EdmundsChefRailsProxy::Application.config.chef_servers.each do |chef_server|\n if chef_server[:url] =~ /#{request.params[\"chef-server\"]}/\n return chef_server\n end\n end\n end\n \n # Return matching Chef server if environment is specified\n if request.params[\"chef-environment\"] || request.fullpath =~ /\\/environments\\/.*/\n chef_environment = request.params[\"chef-environment\"] || request.fullpath[/\\/environments\\/([a-zA-Z0-9-]*).*/, 1]\n logger.debug \" determine_chef_server: chef_environment = #{chef_environment}\"\n EdmundsChefRailsProxy::Application.config.chef_servers.each do |chef_server|\n logger.debug chef_server\n chef_server[:envs].each do |chef_server_env|\n if chef_environment =~ /#{chef_server_env}/\n return chef_server\n end\n end\n end\n end\n\n # At this point, return default Chef server\n return EdmundsChefRailsProxy::Application.config.chef_servers[0]\n\nend",
"def environment(name = nil)\n environments.with_name(name || default_environment)\n end",
"def env_default=(new_data)\n reset\n @env_default = VividMash.new(self, new_data)\n end",
"def setup!(requesting_actor_id, options=Hash.new)\n create_database!\n\n policy = OrgAuthPolicy.new(self, requesting_actor_id, options)\n policy.apply!\n\n # Environments are in erchef / SQL. Make an HTTP request to create the default environment\n headers = {:headers => {'x-ops-request-source' => 'web'}}\n rest = Chef::REST.new(Chef::Config[:chef_server_host_uri],\n Chef::Config[:web_ui_proxy_user],\n Chef::Config[:web_ui_private_key], headers)\n rest.post_rest(\"organizations/#{name}/environments\",\n {\n 'name' => '_default',\n 'description' => 'The default Chef environment'\n })\n end",
"def __apply_default_env(options)\n options = options.dup\n default_env = options.delete(:default_env)\n default_env = true if default_env.nil?\n if default_env\n env_key = options.key?(:env) ? :env : :environment\n options[env_key] = {\n \"LC_ALL\" => __config[:internal_locale],\n \"LANGUAGE\" => __config[:internal_locale],\n \"LANG\" => __config[:internal_locale],\n __env_path_name => default_paths,\n }.update(options[env_key] || {})\n end\n options\n end",
"def nodes_for_environment(name)\n ridley.partial_search(:node, \"chef_environment:#{name}\", [\"fqdn\", \"cloud.public_hostname\", \"name\", \"os\"])\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns the first packet in +dump+ | def packet dump
Capp.offline(dump).loop.first
end | [
"def packet\n @packet ||= nil\n end",
"def first()\n return from(network().host_address.add(@ip_bits.host_ofs), @prefix)\n end",
"def first_hop()\n @hops.find { |hop| !hop.ip.nil? && hop.ip != \"0.0.0.0\" }\n end",
"def read_packet\n return @packets.pop unless @packets.empty?\n\n while @packets.empty?\n page = read_page\n raise EOFError.new(\"End of file reached\") if page.nil?\n input = StringIO.new(page.data)\n\n page.segment_table.each do |seg|\n @partial ||= \"\"\n\n @partial << input.read(seg)\n if seg != 255\n @packets.insert(0, @partial)\n @partial = nil\n end\n end\n end\n\n return @packets.pop\n end",
"def first\n @msg_frames.first\n end",
"def get_out_packet_head\n debug \">> #{__method__}\"\n path = OUTPACKETS_HEAD_PATH\n resp = get_object_to_network(@nwc_id, path)\n unless resp.status_code == Response::OK\n return nil\n end\n begin\n return Packet.from_object_hash(resp.body)\n rescue => ex\n error 'Exception: Get Invalid Message'\n error \" #{ex.message} #{ex.backtrace}\"\n return nil\n end\n end",
"def first_line\n line_from_ip(0)\n end",
"def get_in_packet_head\n debug \">> #{__method__}\"\n path = INPACKETS_HEAD_PATH\n resp = get_object_to_network(@nwc_id, path)\n unless resp.status_code == Response::OK\n return nil\n end\n begin\n return Packet.from_object_hash(resp.body)\n rescue => ex\n error 'Exception: Get Invalid Message'\n error \" #{ex.message} #{ex.backtrace}\"\n return nil\n end\n end",
"def read_packet; end",
"def last_packet\n Packet.where(\n 'created_at < ? AND device_id = ? AND type = ?',\n @packet.created_at, @device_id, @packet.type\n ).last\n end",
"def first\n object_from_address(@base[:first])\n end",
"def get_packet\n\t\t\t\tfirst_number = read(1).unpack(\"C\")[0]\n\t\t\t\t# get the 'mask' property\n\t\t\t\tpacket_mask = first_number >> 6\n\t\t\t\t# get the 'frame1' property\n\t\t\t\tframe_number = first_number & 0x3F\n\t\t\t\tif frame_number == 0\n\t\t\t\t\t# if frame1 is equal to 0 then 'frame' is equal to 'frame2'\n\t\t\t\t\tframe_number = read(1).unpack(\"C\")[0]\n\t\t\t\telsif frame_number == 1\n\t\t\t\t\t# if frame1 is equal to 1 then 'frame' is equal to 'frame3'\n\t\t\t\t\tframe_number = read(2).unpack(\"n\")[0]\n\t\t\t\tend\n\t\t\t\t# init a 'frame stream' if it doesn't exist yet\n\t\t\t\tif ! @frames_in.has_key? frame_number\n\t\t\t\t\t@frames_in[frame_number] = Frame.new(0,0,0,0)\n\t\t\t\t\tif packet_mask != 0\n\t\t\t\t\t\traise StandardError, \"packet error\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t# for logging purpose\n\t\t\t\t@bytes_in += 1\n\t\t\t\t\n\t\t\t\t# reads the 'time', 'datasize', 'rtmpdatatype' and 'streamid' properties from the socket\n\t\t\t\t# and put them into the 'frame stream' archive\n\t\t\t\t\n\t\t\t\tcase packet_mask\n\t\t\t\twhen 0\n\t\t\t\t\t@frames_in[frame_number].timer = getMediumInt()\n\t\t\t\t\t@frames_in[frame_number].size = getMediumInt()\n\t\t\t\t\t@frames_in[frame_number].data_type = read(1).unpack(\"C\")[0]\n\t\t\t\t\t@frames_in[frame_number].obj = read(4).unpack(\"N\")[0]\n\t\t\t\t\t@bytes_in += 11\n\t\t\t\twhen 1\n\t\t\t\t\t@frames_in[frame_number].timer = getMediumInt()\n\t\t\t\t\t@frames_in[frame_number].size = getMediumInt()\n\t\t\t\t\t@frames_in[frame_number].data_type = read(1).unpack(\"C\")[0]\n\t\t\t\t\t@bytes_in += 7\n\t\t\t\twhen 2\n\t\t\t\t\t@frames_in[frame_number].timer = getMediumInt()\n\t\t\t\t\t@bytes_in += 3\n\t\t\t\twhen 3\n\t\t\t\t\n\t\t\t\telse\n\t\t\t\t\n\t\t\t\tend\n\t\t\t\t# fix the CONNECTION_PACKET bug when its size is larger than 128 bytes (see caution 4.4.6)\n\t\t\t\tif ! @connected\n\t\t\t\t\tdata_length = @frames_in[frame_number].size\n\t\t\t\t\n\t\t\t\t\tif data_length < 129\n\t\t\t\t\t\tdata = read(data_length)\n\t\t\t\t\telsif data_length == 129\n\t\t\t\t\t\tdata = read(data_length+1)\n\t\t\t\t\t\tdata = data[0..-2]\n\t\t\t\t\telse data_length > 129\n\t\t\t\t\t\tdata = read(data_length+1)\n\t\t\t\t\t\tdata = data[0..127] << data[129..-1]\n\t\t\t\t\tend\n\t\t\t\telse\n\t\t\t\t\tdata = read(@frames_in[frame_number].size)\n\t\t\t\tend\n\t\t\t\t# for logging purpose\n\t\t\t\t@bytes_in += data.length\n\t\t\t\t@msg_in += 1\n\t\t\t\t# return a RTMP_PACKET with all its properties (implicit ones included)\n\t\t\t\treturn RTMPPacket.new(\tframe_number,\n\t\t\t\t\t\t\t@frames_in[frame_number].timer,\n\t\t\t\t\t\t\tdata,\n\t\t\t\t\t\t\t@frames_in[frame_number].data_type,\n\t\t\t\t\t\t\t@frames_in[frame_number].obj)\n\t\tend",
"def getPacket(socket)\n\tpacket = Packet.new\n\tsize = 128\n\tbegin\n\t\tpacket = Packet.new(socket.recvfrom_nonblock(size)[0])\n\trescue Errno::EAGAIN\n\t\tIO.select([socket])\n\t\tretry\n\tend\n\treturn packet\nend",
"def extract_opcode(dump)\n dump.each_line do |line|\n # If the line is a line of disassembled code...\n m = /\\A\\s+[0-9a-f]+:/i.match line\n\n # Extract the opcode field and remove any internal spaces\n return line.split(/\\t/)[1].split.join unless m.nil?\n end\nend",
"def first\n return nil if @list.empty?\n @list[head]\n end",
"def get_first\r\n # if the list is empty, head is nil\r\n if @head.nil?\r\n return nil\r\n else \r\n value = @head.data\r\n return value\r\n end\r\n end",
"def first(count = 1)\n bytes = []\n File.open(@file) do |file|\n count.times { bytes << file.getbyte }\n end\n count == 1 ? bytes.first : bytes\n end",
"def peek\n return nil if empty?\n _decode_message(self.first)\n end",
"def getPacket(socket)\n\tpacket = Packet.new\n\tsize = 2048 + 6\n\tbegin\n\t\tpacket = Packet.new(socket.recvfrom_nonblock(size)[0])\n\trescue Errno::EAGAIN\n\t\tIO.select([socket])\n\t\tretry\n\tend\n\n\treturn packet\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Reads /proc/mounts and finds all tmpfs. It returns a Hash But if the info isn't available or /proc/mounts is not readable, it will return an empty Hash. | def tmpfs
ret = {}
mounts.each { |x|
ret.merge!({x.split[1] => x}) if x.start_with?('tmpfs '.freeze)
}
ret
end | [
"def read_mounts\n begin\n mountfh = File.open(\"/proc/mounts\", NONBLOCK)\n mounts = mountfh.read\n mountfh.close\n rescue\n return nil\n end\n\n mntpoint = {}\n\n # Read all entries in /proc/mounts. The second column is the\n # mountpoint and the third column is the filesystem type.\n # We skip rootfs because it is always mounted at /\n mounts.collect do |line|\n params = line.split(' ')\n next if params[2] == 'rootfs'\n mntpoint[params[1]] = params[2]\n end\n return mntpoint\n end",
"def read_mounts\n mounts = \"\"\n begin\n if File.method_defined? \"read_nonblock\"\n # If possible we use read_nonblock in a loop rather than read to work-\n # a linux kernel bug. See ticket #1963 for details.\n mountfh = File.open(\"/proc/mounts\")\n mounts += mountfh.read_nonblock(1024) while true\n else\n # Otherwise we shell out and let cat do it for us\n mountfh = IO.popen(\"/bin/cat /proc/mounts\")\n mounts = mountfh.read\n end\n rescue EOFError\n # that's expected\n rescue\n return nil\n ensure\n mountfh.close if mountfh\n end\n\n mntpoint = {}\n\n # Read all entries in /proc/mounts. The second column is the\n # mountpoint and the third column is the filesystem type.\n # We skip rootfs because it is always mounted at /\n mounts.each_line do |line|\n params = line.split(' ')\n next if params[2] == 'rootfs'\n mntpoint[params[1]] = params[2]\n end\n mntpoint\n end",
"def getmountpoints\n mtab = IO.readlines '/proc/mounts'\n mountpoints = mtab.map{ |line| line.split(/\\s+/)[1]}\n mountpoints.map!{ |mount| unescape(mount) }\n # Ignore common system mountpoints\n mountpoints.reject!{ |mount| mount =~ /^\\/$/ }\n mountpoints.reject!{ |mount| mount =~ /^\\/(proc|sys|usr|boot|tmp|dev|var|bin|etc|lib).*/ }\n # Mount /run/media/* but ignore other /run/ mountpoints\n mountpoints.reject!{ |mount| mount =~ /^\\/run.*/ unless mount =~ /^\\/run\\/(media.*)/ }\nend",
"def list_mounts\n disk_usage.values.map { |f| f[:mount] }\n end",
"def mounts\n JSON.parse(@vault_host.get(\"/#{VAULT_VERSION}/sys/mounts\", @header).body)\n end",
"def mounts(session=nil)\n\t\t if session\n\t\t\t make_ssh_session(session)\n\t\t\tend\n\n\t\t\tcmd = [ZFS.zfs_path].flatten + %w(get -rHp -oname,value mountpoint)\n\n\t\t\tstdout, stderr, status = @session.capture3(*cmd)\n\n\t\t\tif status.success? and stderr.empty?\n\t\t\t\tmounts = stdout.lines.collect do |line|\n\t\t\t\t\tfs, path = line.chomp.split(/\\t/, 2)\n\t\t\t\t\t[path, ZFS(fs, session)]\n\t\t\t\tend\n\t\t\t\tHash[mounts]\n\t\t\telse\n\t\t\t\traise Exception, \"something went wrong\"\n\t\t\tend\n\t\tend",
"def mountpoints\n mtab = IO.readlines '/proc/mounts'\n mountpoints = mtab.map{ |line| line.split(/\\s+/)[1]}\n mountpoints.map!{ |mount| unescape(mount) }\n # Ignore common system mountpoints.\n mountpoints.reject!{ |mount| mount =~ /^\\/$/ }\n mountpoints.reject!{ |mount| mount =~ /^\\/(proc|sys|usr|boot|tmp|dev|var|bin|etc|lib).*/ }\n # Mount /run/media/* but ignore other /run/ mountpoints.\n mountpoints.reject!{ |mount| mount =~ /^\\/run.*/ unless mount =~ /^\\/run\\/(media.*)/ }\n\n # Add home dir.\n mountpoints << home\n end",
"def mounts()\n return {} if !@conf.key?(\"mounts\")\n\n mounts = {}\n @conf[\"mounts\"].each do |name, path|\n path = File.expand_path(path)\n raise NameError(\"project_name cannot be used as a NFS mount point\") if name == @name\n raise NameError(\"Path #{path} does not exist or is not a directory\") if !File.directory?(path)\n mounts[name] = path\n end\n\n return mounts\n end",
"def find_mnt_pts\n `grep VolGroup /proc/self/mounts \\\n | awk '{print $2, $4}' | awk -F, '{print $1}' | awk '{print $1, $2}'`\n # `grep VolGroup proc_mounts.test \\\n # | awk '{print $2, $4}' | awk -F, '{print $1}' | awk '{print $1, $2}'`\n end",
"def mounts_info\n @mounts_info ||= vault_client.request(:get, \"/v1/sys/internal/ui/mounts\")\n rescue Vault::VaultError\n unable_to_determine_version\n raise\n end",
"def mount_points\n `mount`.split(\"\\n\").grep(/dev/).map { |x| x.split(\" \")[2] }\n end",
"def mounts\r\n @mounts.list\r\n end",
"def disk_space()\n\n instructions = 'df -h'\n r = @ssh ? @ssh.exec!(instructions) : `#{instructions}`\n\n @results[:disk_usage] = {}\n\n a = r.lines.grep(/\\/dev\\/root/)\n\n puts ('a: ' + a.inspect).debug if @debug\n\n if a.any? then\n size, used, avail = a[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:root] = {size: size, used: used, \n avail: avail}\n end\n\n a2 = r.lines.grep(/\\/dev\\/sda1/)\n\n puts ('a2: ' + a2.inspect).debug if @debug\n\n if a2.any? then\n size, used, avail = a2[0].split(/ +/).values_at(1,2,3)\n\n @results[:disk_usage][:sda1] = {size: size, used: used, \n avail: avail}\n end\n\n end",
"def disk_usage()\n return {} if @config.monitored_mounts.empty?\n \n disks = {}\n\n @config.monitored_mounts.each do |disk|\n disks[disk] = nil\n end\n\n diskData = `#{@config.df_command}`\n \n diskData.each_line do |line|\n fields = line.split(' ')\n name = fields[5 .. -1].join\n disks[name] = Integer(Float(fields[4][0 .. -2])) if @config.monitored_mounts.include? name\n end\n\n disks\n end",
"def other_mounts\n others = []\n fstab_lines.each do |line|\n localdevice, localmount, _fstype, _options, _freq, _pass = line.split(/\\s+/)\n others << line if localmount == mount && localdevice != device\n end\n others\n end",
"def mount_list(www_root)\n arr = []\n %x{mount -t iso9660}.scan(/\\S+ on (\\S+)/) do |a|\n mountpoint = a[0]\n arr << mountpoint if mountpoint.match %r{#{www_root}}\n end\n arr\nend",
"def list_disk_devices\n if File.executable?(\"/bin/lsblk\")\n shell_out(%Q{/bin/lsblk -i -p -r -n | egrep ' disk( |$)'}).stdout.each_line.map { |l|\n l.chomp.sub(/ .*/, '')\n }\n else\n # XXX something dumber\n nil\n end\n end",
"def is_mounted?(device)\n system(\"grep -q '#{device}' /proc/mounts\")\nend",
"def bcpc_mounted_filesystems\n bcpc_ohai_reload(:filesystem)\n\n node['filesystem'].select do |_, fs|\n fs[:mount] =~ %r{^/disk/\\d+$}\n end\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
:callseq: scale_bilinear(width, height) Scales the image using the bilinear interpolation method. Bilinear interpolation calculates the color values in the resulting image by looking at the four nearest pixels for each pixel in the resulting image. This gives a more accurate representation than nearestneighbor interpolation, at the expense of slightly blurring the resulting image. == Example i = Axon::JPEG('test.jpg') i.scale_bilinear(50, 75) i.width => 50 i.height => 75 | def scale_bilinear(*args)
@source = BilinearScaler.new(@source, *args)
self
end | [
"def resample_bilinear!(new_width, new_height); end",
"def bilinear_gray(image, scale_x, scale_y)\n interpolated_img = Imgrb::Image.new((image.width*scale_x).round,\n (image.height*scale_y).round,\n [0]*image.channels)\n\n (interpolated_img.height).times do |y_|\n y = y_+0.5 #Center of pixel is at idx+0.5\n y_orig = (y/scale_y.to_f - 0.5) #To find index remove 0.5 from position (see above)\n y_less = y_orig.floor\n y_more = y_orig.ceil\n y_more = y_less if y_more > image.height-1\n y_less = y_more if y_less < 0\n row_less = image.bitmap.rows[y_less]\n row_more = image.bitmap.rows[y_more]\n\n (interpolated_img.width).times do |x_|\n x = x_+0.5\n x_orig = (x/scale_x.to_f - 0.5)\n x_less = x_orig.floor\n x_more = x_orig.ceil\n x_more = x_less if x_more > image.width-1\n x_less = x_more if x_less < 0\n\n if x_less != x_more\n y_less_int = linear_int_special(row_less[x_less], row_less[x_more], x_orig)\n y_more_int = linear_int_special(row_more[x_less], row_more[x_more], x_orig)\n else\n y_less_int = row_less[x_less]\n y_more_int = row_more[x_less]\n end\n\n if y_less != y_more\n int_val = linear_int_special(y_less_int, y_more_int, y_orig)\n else\n int_val = y_less_int\n end\n\n interpolated_img[y_,x_] = int_val\n end\n end\n\n return interpolated_img\n end",
"def resample_bilinear!(new_width, new_height)\n index_x, interp_x = steps_residues(width, new_width)\n index_y, interp_y = steps_residues(height, new_height)\n\n pixels = Array.new(new_width * new_height)\n i = 0\n for y in 1..new_height\n # Clamp the indicies to the edges of the image\n y1 = [index_y[y - 1], 0].max\n y2 = [index_y[y - 1] + 1, height - 1].min\n y_residue = interp_y[y - 1]\n\n for x in 1..new_width\n # Clamp the indicies to the edges of the image\n x1 = [index_x[x - 1], 0].max\n x2 = [index_x[x - 1] + 1, width - 1].min\n x_residue = interp_x[x - 1]\n\n pixel_11 = get_pixel(x1, y1)\n pixel_21 = get_pixel(x2, y1)\n pixel_12 = get_pixel(x1, y2)\n pixel_22 = get_pixel(x2, y2)\n\n # Interpolate by Row\n pixel_top = ChunkyPNG::Color.interpolate_quick(pixel_21, pixel_11, x_residue)\n pixel_bot = ChunkyPNG::Color.interpolate_quick(pixel_22, pixel_12, x_residue)\n\n # Interpolate by Column\n\n pixels[i] = ChunkyPNG::Color.interpolate_quick(pixel_bot, pixel_top, y_residue)\n i += 1\n end\n end\n replace_canvas!(new_width.to_i, new_height.to_i, pixels)\n end",
"def scaleimage **opts\n Vips::Image.scale self, **opts\n end",
"def resample!(image, width, height)\n with_minimagick(image) do |img|\n img.combine_options do |cmd|\n yield cmd if block_given?\n cmd.resample \"#{width}x#{height}\"\n end\n end\n end",
"def resize(width,height)\n\t\t@buffer=@buffer.scale(width, height, :bilinear)\n\tend",
"def scaleImage!()\n\t\t# we scale down the image so that a cell is 1px x 1px\n\t\twidth = (@image.columns / CELL_SIZE).floor\n\t\theight = (@image.rows / CELL_SIZE).floor\n\t\t@image.scale!(width, height)\n\t\t@image = @image.posterize(2)\n\t\treturn self\n\tend",
"def resize(*scale, method)\n #TODO: Implement antialiasing.\n if scale.size == 0\n scale = [method, method]\n method = :bilinear\n elsif scale.size == 1\n scale = [scale[0], scale[0]]\n elsif scale.size != 2\n raise ArgumentError, \"wrong number of arguments (given #{scale.size+1}, expected 1..3)\"\n end\n\n raise ArgumentError, \"scale must be larger than 0.\" if scale.include? 0\n\n\n if method == :bilinear\n #TODO: Resize frame data of animations\n # if self.animated?\n # if !@animation_frames_cached\n # cache_animation_frames_apng if check_valid_apng\n # end\n # end\n return bilinear(scale[0], scale[1])\n elsif method == :nearest\n return nearest_neighbor(scale[0], scale[1])\n else\n raise ArgumentError, \"unknown method: #{method}\"\n end\n end",
"def scale_to(new_width, new_height = nil, options = {})\n raise \"cannot scale non-image\" unless image?\n\n options.reverse_merge!(\n :strip => true,\n :progressive => true,\n :quality => 85,\n :crop => new_height.present?,\n :output_type => 'jpg'\n )\n new_height ||= scaled_height_keeping_aspect_ratio(new_width)\n\n (temp = Tempfile.new(['image-scaler', \".#{options[:output_type]}\"])).close\n\n strip_meta = '-strip' if options[:strip]\n quality = \"-quality #{options[:quality]}%\" \n progressive = '-interlace Plane' if options[:progressive]\n crop = \"^ -gravity center -extent #{new_width}x#{new_height}\" if options[:crop]\n\n # -filter lanczos2sharp -distort resize ? \n resize = \"-resize #{new_width}x#{new_height}#{crop}\" \n cmd = \"#{CONVERT} #{unscaled_path} #{resize} #{progressive} #{quality} #{strip_meta} #{temp.path}\"\n result = `#{cmd}` \n if File.readable?(temp.path) && File.size(temp.path) > 0 \n return IO.read(temp.path) \n else\n Rails.logger.error \"scaling photo #{unscaled_path} failed:\\n#{result}\" \n false\n end\n end",
"def resample(img, width, height, &block)\n processor.resample!(img, width, height, &block)\n end",
"def rescale(min_val = 0, max_val = 1)\n img = self.to_f\n self_min, self_max = img.bitmap.rows.flatten.minmax\n if self_max == self_min\n return Imgrb::Image.new(img.width, img.height, [min_val]*img.channels)\n end\n diff = max_val - min_val\n rescaled = (img - self_min)\n rescaled = rescaled * (diff/(self_max - self_min)) + min_val\n\n return rescaled\n end",
"def scale_nearest(*args)\n @source = NearestNeighborScaler.new(@source, *args)\n self\n end",
"def scale(factor=0.75, quality: nil)\n \n read() do |img|\n \n img2 = img.scale(factor) \n write img2, quality\n \n end\n \n end",
"def get_scaled_size(image, width_bound, height_bound)\n width_multiplier = 1.0 * width_bound / image.columns\n height_multiplier = 1.0 * height_bound / image.rows\n\n if image.rows * width_multiplier <= height_bound\n width_multiplier\n else\n height_multiplier\n end\n end",
"def shrink_to_fit(width, height, quality, format)\n manipulate! do |image|\n img_width, img_height = image.dimensions\n\n image.format(format) do |img|\n if img_width > width || img_height > height\n ratio_width = img_width / width.to_f\n ratio_height = img_height / height.to_f\n\n if ratio_width >= ratio_height\n img.resize \"#{width}x#{(img_height / ratio_width).round}\"\n else\n img.resize \"#{(img_width / ratio_height).round}x#{height}\"\n end\n end\n\n img.quality(quality.to_s)\n image = yield(img) if block_given?\n end\n\n image\n end\n end",
"def scale_by_pixels(dimensions)\n out_pixels = sqrt(options[:width] * options[:height]).truncate\n src_pixels = sqrt(dimensions[0] * dimensions[1]).truncate\n out_pixels / src_pixels.to_f\n end",
"def resample inimage, dpi, outimage=nil\n m_begin \"resample\"\n img = get_image(inimage)\n old_dpi = (image_dpi inimage)[0]\n if old_dpi != dpi then\n if false\n out = img.resample(dpi)\n elsif \n old_dpi = (image_dpi inimage)[0]\n ratio = dpi / old_dpi\n out = img.sample(ratio)\n out.density = \"#{dpi}x#{dpi}\"\n end\n else\n out = img\n end\n outimage = inimage if outimage.nil?\n put_image(outimage, out)\n m_end \"resample\"\n end",
"def scale(ratio)\n new_width, new_height = (width * ratio).to_i, (height * ratio).to_i\n target = resize(new_width, new_height)\n block_given? ? yield(target) : target\n end",
"def resize!(w, h, resample = true)\n ptr = self.class.create_image_ptr(w, h, false)\n ::GD2::GD2FFI.send(resample ? :gdImageCopyResampled : :gdImageCopyResized,\n ptr, image_ptr, 0, 0, 0, 0, w.to_i, h.to_i, width.to_i, height.to_i)\n alpha_blending = alpha_blending?\n init_with_image(ptr)\n self.alpha_blending = alpha_blending\n self\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
:callseq: scale_nearest(width, height) Scales the image using the nearestneighbor interpolation method. Nearestneighbor interpolation selects the value of the nearest pixel when calculating colors in the scaled image. == Example i = Axon::JPEG('test.jpg') i.scale_nearest(50, 75) i.width => 50 i.height => 75 | def scale_nearest(*args)
@source = NearestNeighborScaler.new(@source, *args)
self
end | [
"def resample_nearest_neighbor!(new_width, new_height); end",
"def resample_nearest_neighbor!(new_width, new_height)\n steps_x = steps(width, new_width)\n steps_y = steps(height, new_height)\n\n pixels = Array.new(new_width * new_height)\n i = 0\n for y in steps_y\n for x in steps_x\n pixels[i] = get_pixel(x, y)\n i += 1\n end\n end\n\n replace_canvas!(new_width.to_i, new_height.to_i, pixels)\n end",
"def scale_bilinear(*args)\n @source = BilinearScaler.new(@source, *args)\n self\n end",
"def scaleImage!()\n\t\t# we scale down the image so that a cell is 1px x 1px\n\t\twidth = (@image.columns / CELL_SIZE).floor\n\t\theight = (@image.rows / CELL_SIZE).floor\n\t\t@image.scale!(width, height)\n\t\t@image = @image.posterize(2)\n\t\treturn self\n\tend",
"def scale_to(new_width, new_height = nil, options = {})\n raise \"cannot scale non-image\" unless image?\n\n options.reverse_merge!(\n :strip => true,\n :progressive => true,\n :quality => 85,\n :crop => new_height.present?,\n :output_type => 'jpg'\n )\n new_height ||= scaled_height_keeping_aspect_ratio(new_width)\n\n (temp = Tempfile.new(['image-scaler', \".#{options[:output_type]}\"])).close\n\n strip_meta = '-strip' if options[:strip]\n quality = \"-quality #{options[:quality]}%\" \n progressive = '-interlace Plane' if options[:progressive]\n crop = \"^ -gravity center -extent #{new_width}x#{new_height}\" if options[:crop]\n\n # -filter lanczos2sharp -distort resize ? \n resize = \"-resize #{new_width}x#{new_height}#{crop}\" \n cmd = \"#{CONVERT} #{unscaled_path} #{resize} #{progressive} #{quality} #{strip_meta} #{temp.path}\"\n result = `#{cmd}` \n if File.readable?(temp.path) && File.size(temp.path) > 0 \n return IO.read(temp.path) \n else\n Rails.logger.error \"scaling photo #{unscaled_path} failed:\\n#{result}\" \n false\n end\n end",
"def bilinear_gray(image, scale_x, scale_y)\n interpolated_img = Imgrb::Image.new((image.width*scale_x).round,\n (image.height*scale_y).round,\n [0]*image.channels)\n\n (interpolated_img.height).times do |y_|\n y = y_+0.5 #Center of pixel is at idx+0.5\n y_orig = (y/scale_y.to_f - 0.5) #To find index remove 0.5 from position (see above)\n y_less = y_orig.floor\n y_more = y_orig.ceil\n y_more = y_less if y_more > image.height-1\n y_less = y_more if y_less < 0\n row_less = image.bitmap.rows[y_less]\n row_more = image.bitmap.rows[y_more]\n\n (interpolated_img.width).times do |x_|\n x = x_+0.5\n x_orig = (x/scale_x.to_f - 0.5)\n x_less = x_orig.floor\n x_more = x_orig.ceil\n x_more = x_less if x_more > image.width-1\n x_less = x_more if x_less < 0\n\n if x_less != x_more\n y_less_int = linear_int_special(row_less[x_less], row_less[x_more], x_orig)\n y_more_int = linear_int_special(row_more[x_less], row_more[x_more], x_orig)\n else\n y_less_int = row_less[x_less]\n y_more_int = row_more[x_less]\n end\n\n if y_less != y_more\n int_val = linear_int_special(y_less_int, y_more_int, y_orig)\n else\n int_val = y_less_int\n end\n\n interpolated_img[y_,x_] = int_val\n end\n end\n\n return interpolated_img\n end",
"def scaleimage **opts\n Vips::Image.scale self, **opts\n end",
"def scale_by_pixels(dimensions)\n out_pixels = sqrt(options[:width] * options[:height]).truncate\n src_pixels = sqrt(dimensions[0] * dimensions[1]).truncate\n out_pixels / src_pixels.to_f\n end",
"def scale_image(preferred_width, preferred_height)\n # Retrieve the current height and width\n image_data = ActiveStorage::Analyzer::ImageAnalyzer.new(image).metadata\n new_width = image_data[:width]\n new_height = image_data[:height]\n\n # Adjust the width\n if new_width > preferred_width\n new_width = preferred_width\n new_height = (new_height * new_width) / image_data[:width]\n end\n\n # Adjust the height\n if new_height > preferred_height\n old_height = new_height\n new_height = preferred_height\n new_width = (new_width * new_height) / old_height\n end\n\n # Return the resized image\n image.variant(resize_to_limit: [new_width, new_height])\n end",
"def resize_retina_image(image_filename)\n \n image_name = File.basename(image_filename)\n \n image = Magick::Image.read(image_filename).first # Read the image\n new_image = image.scale(SCALE_BY)\n \n if new_image.write(image_filename) # Overwrite image file\n puts \"Resizing Image (#{SCALE_BY}): #{image_name}\"\n else\n puts \"Error: Couldn't resize image #{image_name}\"\n end\n \nend",
"def resample_bilinear!(new_width, new_height); end",
"def preformat_image(image)\n image.density = \"72x72\" # Make 72x72\n # Convert to our default color profile (RGB, ftw!)\n if image.colorspace == Magick::CMYKColorspace\n image = image.add_profile(COLOR_PROFILE)\n end\n # If the image has alpha channel transparency, fill it with background color\n if image.alpha?\n image.alpha(BackgroundAlphaChannel)\n end\n # If the image size isn't a square, make it a square\n img_w = image.columns\n img_h = image.rows\n ratio = img_w.to_f/img_h.to_f\n if ratio < 1\n x = img_h/2-img_w/2\n image = image.extent(img_h,img_h,x=-x,y=0)\n elsif ratio > 1\n y = img_w/2-img_h/2\n image = image.extent(img_w,img_w,x=0,y=-y)\n end\n return image\n end",
"def resample_bilinear!(new_width, new_height)\n index_x, interp_x = steps_residues(width, new_width)\n index_y, interp_y = steps_residues(height, new_height)\n\n pixels = Array.new(new_width * new_height)\n i = 0\n for y in 1..new_height\n # Clamp the indicies to the edges of the image\n y1 = [index_y[y - 1], 0].max\n y2 = [index_y[y - 1] + 1, height - 1].min\n y_residue = interp_y[y - 1]\n\n for x in 1..new_width\n # Clamp the indicies to the edges of the image\n x1 = [index_x[x - 1], 0].max\n x2 = [index_x[x - 1] + 1, width - 1].min\n x_residue = interp_x[x - 1]\n\n pixel_11 = get_pixel(x1, y1)\n pixel_21 = get_pixel(x2, y1)\n pixel_12 = get_pixel(x1, y2)\n pixel_22 = get_pixel(x2, y2)\n\n # Interpolate by Row\n pixel_top = ChunkyPNG::Color.interpolate_quick(pixel_21, pixel_11, x_residue)\n pixel_bot = ChunkyPNG::Color.interpolate_quick(pixel_22, pixel_12, x_residue)\n\n # Interpolate by Column\n\n pixels[i] = ChunkyPNG::Color.interpolate_quick(pixel_bot, pixel_top, y_residue)\n i += 1\n end\n end\n replace_canvas!(new_width.to_i, new_height.to_i, pixels)\n end",
"def resize(*scale, method)\n #TODO: Implement antialiasing.\n if scale.size == 0\n scale = [method, method]\n method = :bilinear\n elsif scale.size == 1\n scale = [scale[0], scale[0]]\n elsif scale.size != 2\n raise ArgumentError, \"wrong number of arguments (given #{scale.size+1}, expected 1..3)\"\n end\n\n raise ArgumentError, \"scale must be larger than 0.\" if scale.include? 0\n\n\n if method == :bilinear\n #TODO: Resize frame data of animations\n # if self.animated?\n # if !@animation_frames_cached\n # cache_animation_frames_apng if check_valid_apng\n # end\n # end\n return bilinear(scale[0], scale[1])\n elsif method == :nearest\n return nearest_neighbor(scale[0], scale[1])\n else\n raise ArgumentError, \"unknown method: #{method}\"\n end\n end",
"def resize_photos_helper(photo)\n return nil if photo.blank?\n image = MiniMagick::Image.open(self.photo.url)\n [[:height,300], [:width, 600]].each do |param, num|\n if image.send(param) > num \n scaling_percent = (num / image.send(param).to_f)*100\n image.sample(scaling_percent.to_s + \"%\")\n end\n end\n return image\n end",
"def rescale(min_val = 0, max_val = 1)\n img = self.to_f\n self_min, self_max = img.bitmap.rows.flatten.minmax\n if self_max == self_min\n return Imgrb::Image.new(img.width, img.height, [min_val]*img.channels)\n end\n diff = max_val - min_val\n rescaled = (img - self_min)\n rescaled = rescaled * (diff/(self_max - self_min)) + min_val\n\n return rescaled\n end",
"def fit\n if self.needs_to_be_resized?\n rmagick_img.resize_to_fit!(@x, @y)\n else\n rmagick_img.resize_to_fit(@x, @y)\n end\n end",
"def resample!(image, width, height)\n with_minimagick(image) do |img|\n img.combine_options do |cmd|\n yield cmd if block_given?\n cmd.resample \"#{width}x#{height}\"\n end\n end\n end",
"def pixelize(sender)\n context = NSGraphicsContext.currentContext.CIContext\n data = image_placeholder.image.TIFFRepresentation \n current_image = CIImage.imageWithData(data)\n \n # See all available filters by calling CIFilter.filterNamesInCategories(nil).sort\n filter = CIFilter.filterWithName(\"CICrystallize\")\n filter.setDefaults\n filter.setValue(3.0, forKey: \"inputRadius\")\n \n filter.setValue(current_image, forKey: \"inputImage\") \n converted_image = ciimage_to_nsimage(filter.outputImage)\n image_placeholder.image = converted_image\n debug \"Pixelizing the image\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
:callseq: jpeg_file(path [, options]) Writes the image to a new file at +path+ as compressed JPEG data. Returns the number of bytes written. If the image has an alpha channel it will be stripped. See Axonjpeg for a description of +options+. == Example Axon.png_file("image.png") do |image| image.jpeg_file("image.jpg") saves the image to "image.jpeg" end | def jpeg_file(path, *args)
File.open(path, 'wb') do |f|
jpeg(f, *args)
end
end | [
"def convert_to_jpeg input_file, output_file\n begin\n puts \"Opening temp file '#{input_file.path}'...\"\n imagick = MiniMagick::Image.open input_file.path\n if imagick.type == \"JPEG\"\n puts \"Skipping JPEG recompression. Image is already JPEG!\"\n imagick.write output_file.path\n return true\n end\n imagick.format \"jpeg\"\n imagick.quality 100\n imagick.write output_file.path\n puts \"Conversion to JPEG success...\"\n puts \"Wrote to #{output_file.path}...\"\n rescue\n puts \"Conversion to JPEG failed!!!\"\n return false\n end\n return true\n end",
"def write(path)\n image.write(path)\n image\n end",
"def write(filename, options = {})\n options.symbolize_keys!\n format = options[:format] && options[:format].to_s.strip.upcase\n quality = options[:quality] && options[:quality].to_i\n format = 'JPEG' if (quality && !format) || format == 'JPG'\n compression = nil\n \n if format == 'JPEG' || File.extname(filename) =~ /jpeg|jpg/i\n quality ||= (!@resource.quality.to_i.zero? && @resource.quality) || 80\n flatten! # TODO: other formats need flattening such as gif, bmp etc.\n elsif format == 'PNG' || File.extname(filename) =~ /png/i\n quality ||= 100\n compression = Magick::ZipCompression\n #@resource = @resource.quantize(2**24)\n end\n\n @resource.format = format if format\n \n \n success = @resource.write(filename) do\n self.format = format if format\n self.quality = quality if quality \n self.compression = compression if compression\n end\n return nil unless success\n self\n end",
"def save(path, format=nil, quality=nil)\n ext = File.extname(path)[1..-1]\n if not format\n format = (ext and ext.downcase == 'png') ? 'png' : 'jpg'\n end\n\n output_path = path.sub(/\\.\\w+$/, '') + \".#{format}\"\n\n if format == 'jpg'\n writer = ::VIPS::JPEGWriter.new @image, :quality => (quality || 80)\n else\n writer = ::VIPS::PNGWriter.new @image\n end\n\n writer.write output_path\n\n # Reset the image so we can use it again\n @image = ::VIPS::Image.new @path\n\n output_path\n end",
"def compress_img (files = [])\n\t\t\n\t\tunless which('convert')\n\t\t\tputs \"WARNING: ImageMagick is not installed on your system. Skipping image compression...\"\n\t\t\treturn\n\t\tend\n\t\t\n\t\tunless files.is_a? Array\n\t\t\tfiles = [files]\n\t\tend\n\n\t\tfiles.each do |file|\n\n\t\t\tfname = get_filename(file)\n\n\t\t\tcompress_cmd = \n\t\t\t\t\t\t\t\t\"convert -strip \" + \n\t\t\t\t\t\t\t\t# uncomment to enable gaussian blur (smaller files but blurry)\n\t\t\t\t\t\t\t\t#\"-gaussian-blur 0.01 \" +\n\t\t\t\t\t\t\t\t# uncomment to enable interlacing (progressive compression for jpeg)\n\t\t\t\t\t\t\t\t#\"-interlace Plane \" +\n\t\t\t\t\t\t\t\t\"#{fname} -resize #{$img_options[:max_width]}x#{$img_options[:max_height]}\\\\> \" + \n\t\t \t\t\t\t\t\"-compress #{$img_options[:compress_type]} -quality #{$img_options[:quality]} \" + \n\t\t \t\t\t\t\t\"#{get_raw_filename(fname) + '.' + $img_options[:output_ext]}\"\n\t\t\t\n\t\t # invoke system ImageMagick\n\t\t system(compress_cmd)\n\t\t # remove the old file (if applicable)\n\t\t if (get_ext(fname) != (\".\" + $img_options[:output_ext]))\n\t\t \tsystem(\"rm #{fname}\")\n\t\t end\n\n\t\tend\n\n\tend",
"def convert_png_to_jpg(img_path)\n print_status(\"Converting #{img_path} to JPG to reduce image quality to #{JPGQUALITY}\")\n basename = File.basename(img_path, '.png')\n img = MiniMagick::Image.open(img_path)\n img.format('JPEG')\n img.quality(JPGQUALITY)\n dst = \"#{WORKFOLDER}/#{basename}.jpg\"\n img.write(dst)\n dst\nend",
"def jpeg(quality = nil)\n size = FFI::MemoryPointer.new(:pointer)\n ptr = ::GD2::GD2FFI.send(:gdImageJpegPtr, image_ptr, size, quality || -1)\n ptr.get_bytes(0, size.get_int(0))\n ensure\n ::GD2::GD2FFI.send(:gdFree, ptr)\n end",
"def generate_jpeg(force_regen = false)\n return true unless image?\n return true unless CONFIG[\"jpeg_enable\"]\n return true unless width && height\n # Only generate JPEGs for PNGs. Don't do it for files that are already JPEGs; we'll just add\n # artifacts and/or make the file bigger. Don't do it for GIFs; they're usually animated.\n return true if (file_ext.downcase != \"png\")\n\n # We can generate the image during upload or offline. Use tempfile_path\n # if it exists, otherwise use file_path.\n path = tempfile_path\n path = file_path unless File.exist?(path)\n unless File.exist?(path)\n errors.add(:file, \"not found\")\n throw :abort\n end\n\n # If we already have the image, don't regenerate it.\n if !force_regen && jpeg_width.is_a?(Integer)\n return true\n end\n\n size = Moebooru::Resizer.reduce_to({ :width => width, :height => height }, { :width => CONFIG[\"jpeg_width\"], :height => CONFIG[\"jpeg_height\"] }, CONFIG[\"jpeg_ratio\"])\n begin\n Moebooru::Resizer.resize(file_ext, path, tempfile_jpeg_path, size, CONFIG[\"jpeg_quality\"])\n rescue => x\n errors.add \"jpeg\", \"couldn't be created: #{x}\"\n throw :abort\n end\n\n self.jpeg_width = size[:width]\n self.jpeg_height = size[:height]\n self.jpeg_size = File.size(tempfile_jpeg_path)\n self.jpeg_crc32 = Moebooru::Hasher.compute_one(tempfile_jpeg_path, :crc32)\n\n true\n end",
"def save_to_file(file, format = :png, compression_level = 0, *options)\n if format == :png\n save_png(file, compression_level, *options)\n else\n save_bmp(file)\n end\n\n #Should the method close the file, or leave responsibility to caller?\n file.close\n\n return file\n end",
"def to_image_file(filename, options = {})\n self.to_image(options).write(filename)\n end",
"def write( path )\n base_image.write( path )\n end",
"def jpeg\n i = Magick::Image.from_blob(data).first\n i.format = \"JPG\"\n i.to_blob\n end",
"def write_picture(args = {})\n if @picture[\"n\"] == 0\n raise FlacInfoError, \"There is no METADATA_BLOCK_PICTURE\"\n end\n\n if args.has_key?(:n)\n n = args[:n]\n else\n n = 1\n end\n\n # \"image/jpeg\" => \"jpeg\"\n extension = @picture[n][\"mime_type\"].split(\"/\")[1]\n\n if not args.has_key?(:outfile)\n if @tags[\"album\"] == nil or @tags[\"album\"] == \"\"\n outfile = \"flacimage#{n}.#{extension}\"\n else\n # Try to use contents of \"album\" tag for the filename\n outfile = \"#{@tags[\"album\"]}#{n}.#{extension}\"\n end\n else\n outfile = \"#{args[:outfile]}.#{extension}\"\n end\n\n in_p = File.new(@filename, \"rb\")\n out_p = is_io?(args[:outfile]) ? args[:outfile] : File.new(outfile, \"wb\")\n out_p.binmode # For Windows folks...\n\n in_p.seek(@picture[n]['raw_data_offset'], IO::SEEK_CUR)\n raw_data = in_p.read(@picture[n]['raw_data_length'])\n out_p.write(raw_data)\n\n in_p.close\n if is_io?(args[:outfile])\n out_p.rewind\n else\n out_p.close\n end\n\n nil\n end",
"def save_impl(format, file)\n ImageIO.write(@src, format, file)\n end",
"def write_image id, name_suffix=\"\"\n m_begin \"write_image\"\n filename = \"./temp/\" +id.to_s+name_suffix+\".gif\"\n get_image(id).write(filename)\n m_end \"write_image\"\n end",
"def compress_file(*path)\n compressed_path = path.dup\n compressed_path.push(\"#{compressed_path.pop}.gz\")\n base_file = File.open(for_file(path))\n create_file(compressed_path) do |file|\n compressor = Zlib::GzipWriter.new(file)\n while data = base_file.read(2048)\n compressor.write(data)\n end\n compressor.close\n end\n end",
"def png_file(path, *args)\n File.open(path, 'wb') do |f|\n png(f, *args)\n end\n end",
"def save_impl(format, file)\n write_new_image format, FileImageOutputStream.new(file)\n end",
"def save(path='result.jpg')\n @canvas.write(path)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
:callseq: png_file(path) Writes the image to a new file at +path+ as compressed PNG data. Returns the number of bytes written. == Example Axon.jpeg_file("image.jpg") do |image| image.png_file("image.png") saves the image to "image.jpeg" end | def png_file(path, *args)
File.open(path, 'wb') do |f|
png(f, *args)
end
end | [
"def jpeg_file(path, *args)\n File.open(path, 'wb') do |f|\n jpeg(f, *args)\n end\n end",
"def png(file)\n file.write PNG_HEADER\n\n # Make sure IEND is actually at the end (Ruby 1.9).\n iend = @chunks.delete 'IEND'\n @chunks['IEND'] = iend\n\n @chunks.each do |type, data|\n data.each do |data_part|\n file.write [data_part.length, type].pack('NA*')\n file.write data_part\n file.write [Zlib::crc32(type + data_part)].pack('N')\n end\n end\n end",
"def write(path)\n image.write(path)\n image\n end",
"def write_png(file)\n @canvas.target.write_to_png(file)\nend",
"def convert_jpg_to_png(img_path)\n print_status(\"Converting #{img_path} back to PNG\")\n basename = File.basename(img_path, '.jpg')\n img = MiniMagick::Image.open(img_path)\n img.format('PNG')\n dst = \"#{WORKFOLDER}/#{basename}.png\"\n img.write(dst)\n dst\nend",
"def save_png(filename)\n to_png.save(filename)\n end",
"def convert_png_to_jpg(img_path)\n print_status(\"Converting #{img_path} to JPG to reduce image quality to #{JPGQUALITY}\")\n basename = File.basename(img_path, '.png')\n img = MiniMagick::Image.open(img_path)\n img.format('JPEG')\n img.quality(JPGQUALITY)\n dst = \"#{WORKFOLDER}/#{basename}.jpg\"\n img.write(dst)\n dst\nend",
"def write( path )\n base_image.write( path )\n end",
"def convert_to_jpeg input_file, output_file\n begin\n puts \"Opening temp file '#{input_file.path}'...\"\n imagick = MiniMagick::Image.open input_file.path\n if imagick.type == \"JPEG\"\n puts \"Skipping JPEG recompression. Image is already JPEG!\"\n imagick.write output_file.path\n return true\n end\n imagick.format \"jpeg\"\n imagick.quality 100\n imagick.write output_file.path\n puts \"Conversion to JPEG success...\"\n puts \"Wrote to #{output_file.path}...\"\n rescue\n puts \"Conversion to JPEG failed!!!\"\n return false\n end\n return true\n end",
"def write_image id, name_suffix=\"\"\n m_begin \"write_image\"\n filename = \"./temp/\" +id.to_s+name_suffix+\".gif\"\n get_image(id).write(filename)\n m_end \"write_image\"\n end",
"def save(path, format=nil, quality=nil)\n ext = File.extname(path)[1..-1]\n if not format\n format = (ext and ext.downcase == 'png') ? 'png' : 'jpg'\n end\n\n output_path = path.sub(/\\.\\w+$/, '') + \".#{format}\"\n\n if format == 'jpg'\n writer = ::VIPS::JPEGWriter.new @image, :quality => (quality || 80)\n else\n writer = ::VIPS::PNGWriter.new @image\n end\n\n writer.write output_path\n\n # Reset the image so we can use it again\n @image = ::VIPS::Image.new @path\n\n output_path\n end",
"def save_dxruby_image(fn, img)\r\n w = img.width\r\n h = img.height\r\n png = ChunkyPNG::Image.new(w, h, ChunkyPNG::Color::TRANSPARENT)\r\n h.times do |y|\r\n w.times do |x|\r\n a, r, g, b = img[x, y]\r\n png[x, y] = ChunkyPNG::Color.rgba(r, g, b, a)\r\n end\r\n end\r\n png.save(fn, :fast_rgba)\r\nend",
"def save_to_file(file, format = :png, compression_level = 0, *options)\n if format == :png\n save_png(file, compression_level, *options)\n else\n save_bmp(file)\n end\n\n #Should the method close the file, or leave responsibility to caller?\n file.close\n\n return file\n end",
"def save_png(path=@path)\n start.puts 'set term png enhanced', \n \"set output '#{path}.png'\"\n draw\n end",
"def write_to_pgm(to_write, pixels)\n # if it already exists, delete it and create a new one\n if File.file?(to_write)\n File.delete(to_write)\n end\n # begin writing\n File.open(to_write, \"w\") do |pgm|\n # store in P2 format\n pgm.write(\"P2\\n#{WIDTH} #{HEIGHT}\\n#{MAX_PIXEL}\\n\")\n pixels.each do |i|\n i.each do |pixel|\n pgm.write(\"#{pixel} \")\n end\n end\n end\nend",
"def save_impl(format, file)\n write_new_image format, FileImageOutputStream.new(file)\n end",
"def generate(write_path, size_type)\n if SUPPORTED_FILE_TYPES.include? File.extname(read_path)\n image = Magick::Image.read(read_path).first\n write_image image, write_path, size_type\n else\n write_default_image write_path, size_type\n end\n end",
"def save_png(path, margin=4)\n scale = 8\n w = (width+margin+margin)*scale\n h = (width+margin+margin)*scale\n canvas = PNG::Canvas.new w,h\n points.each do |p|\n for x in (0..scale-1)\n for y in (0..scale-1)\n canvas.point( (p[0]+margin)*scale+x,\n h-(p[1]+margin)*scale-y,\n PNG::Color::Black )\n end\n end\n end\n\n png = PNG.new canvas\n png.save path\n end",
"def write(file_name = 'graph.png')\n to_image.write(file_name)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font name of cell | def change_font_name(new_font_name = 'Verdana')
validate_worksheet
font = get_cell_font.dup
font.set_name(new_font_name)
update_font_references(font)
end | [
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_column_font_name(column_index = 0, font_name = 'Verdana')\n xf = get_col_xf(column_index)\n font = @workbook.fonts[xf.font_id].dup\n font.set_name(font_name)\n change_column_font(column_index, Worksheet::NAME, font_name, font, xf)\n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def font_name()\n validate_worksheet\n @workbook.fonts[font_id()][:font][:name][:attributes][:val]\n end",
"def default_font_name(); end",
"def font(row, col, sheet = default_sheet)\n read_cells(sheet)\n row, col = normalize(row, col)\n @fonts[sheet][[row, col]]\n end",
"def font_name; end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def default_font_name()\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n @fonts[sheet][[row,col]]\n end",
"def fontName\n return @font_name\n end",
"def update_font\n self.contents.font.name = @window.fontName\n #self.recalculate_maxlength\n self.refresh\n end",
"def process_font_name( selector, property, value )\n\t\tprocess_normal_style_attr(selector, \"font-family\", value)\t\n\tend",
"def font=(font)\n @label.font = font\n end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def font=(value)\n @font = value\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font size of cell | def change_font_size(font_size = 10)
validate_worksheet
raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)
font = get_cell_font.dup
font.set_size(font_size)
update_font_references(font)
end | [
"def change_font_size(font_size=10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def change_column_font_size(column_index, font_size=10)\n xf = get_col_xf(column_index)\n font = @workbook.fonts[xf.font_id].dup\n font.set_size(font_size)\n change_column_font(column_index, Worksheet::SIZE, font_size, font, xf)\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * row.worksheet.workbook.bold_font_multiplier) : font.sz\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def font_size=(size)\n font_size(size)\n end",
"def font_size\r\n @style.font_size || @default_font_size\r\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def set_cell_sizes(attr, sizes)\n # Prepend key_cell_size to the front of sizes\n sizes.unshift calculate_key_size(sizes)\n self.send \"#{attr}=\", sizes\n end",
"def text_size s, f = font\n f.text_size s\n end",
"def title_font_size=(value)\n @title_font.size = value\n end",
"def fontsize size\n Attribute.new \"fontsize = #{size}\"\n end",
"def text_size s, f = font\n f.text_size s.to_s\n end",
"def font_size obj_type\n case obj_type\n when CELL_NAME\n base_size = 10500\n when CELLTYPE_NAME\n base_size = 10500\n when CELL_NAME_L\n base_size = 16000\n when SIGNATURE_NAME\n base_size = 9000\n when PORT_NAME\n base_size = 9000\n when PAPER_COMMENT\n base_size = 10500\n end\n base_size * @scale_val / 100.0 * DPI / 96.0\n end",
"def reset_font_size\n @font_size = 26\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font color of cell | def change_font_color(font_color = '000000')
validate_worksheet
Color.validate_color(font_color)
font = get_cell_font.dup
font.set_rgb_color(font_color)
update_font_references(font)
end | [
"def text_color=(color)\n @subtable.cells.text_color = color\n end",
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def change_column_font_color(column_index, font_color='000000')\n Color.validate_color(font_color)\n\n xf = get_col_xf(column_index)\n font = @workbook.fonts[xf.font_id].dup\n font.set_rgb_color(font_color)\n change_column_font(column_index, Worksheet::COLOR, font_color, font, xf)\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def change_text_color(color)\n @text_color = color\n @text_entry.refresh\n self.redraw\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n style_close = \"</font>\"\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def set_font_color(range, color)\n @ws.setFontColor(range, color)\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def font_color()\n validate_worksheet\n if @workbook.fonts[font_id()][:font][:color].nil?\n '000000' #black\n else\n @workbook.fonts[font_id()][:font][:color][:attributes][:rgb]\n end\n end",
"def ansi_formatting(cell, col, row); end",
"def bold_red(text); colour(text, '91');end",
"def before_cell_render_styling(column_name, cell_value, record)\n\n \"\"\n end",
"def bold_green(text); colour(text, '92');end",
"def textColor=(color)\n @label.textColor = color\n end",
"def bold_yellow(text); colour(text, '93');end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font italics settings of cell | def change_font_italics(italicized = false)
validate_worksheet
font = get_cell_font.dup
font.set_italic(italicized)
update_font_references(font)
end | [
"def change_font_italics(italicized=false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_row_italics(row=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_row_font(row, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def change_column_italics(col=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_column_font(col, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def italic_cell(options = {}, &block)\n cell({ font_style: :italic }.merge(options || {}), &block)\n end",
"def bold_italic\n @font.fullname = 'Courier-BoldOblique'\n @font\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def set_text_style(text, style)\n text.bold = true if (style & 1) != 0\n text.italic = true if (style & 2) != 0\n if bigger_text?\n @text.size = Font::FONT_SIZE\n @text.y += 4\n end\n end",
"def update_font_settings\n change_font_setting(:default_name, :font_name)\n change_font_setting(:default_size, :font_size)\n change_font_setting(:default_bold, :bold)\n change_font_setting(:default_italic, :italic)\n change_font_setting(:default_shadow, :shadow)\n change_font_setting(:default_outline, :outline)\n change_font_setting(:default_color, :font_color)\n change_font_setting(:default_out_color, :font_out_color)\n end",
"def bold_italic_cell(options = {}, &block)\n cell({ font_style: :bold_italic }.merge(options || {}), &block)\n end",
"def settextfontprec(*)\n super\n end",
"def font_style=(style)\n @text_options[:style] = style\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def font(row, col, sheet = default_sheet)\n read_cells(sheet)\n row, col = normalize(row, col)\n @fonts[sheet][[row, col]]\n end",
"def italic_font(options = {}, &block)\n switch_font(options.merge(style: :italic), &block)\n end",
"def italic; end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n @fonts[sheet][[row,col]]\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font bold settings of cell | def change_font_bold(bolded = false)
validate_worksheet
font = get_cell_font.dup
font.set_bold(bolded)
update_font_references(font)
end | [
"def change_font_bold(bolded=false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def enter_bold_mode\n @font = (@font + {weight: :bold})\n end",
"def bold=(value)\n @bold = value\n end",
"def bold=(b)\n self[:bold] = b ? true : false\n end",
"def bold_font(options = {}, &block)\n switch_font(options.merge(style: :bold), &block)\n end",
"def bold_format\n format({ :bold => 1 })\n end",
"def bold= bool\n self.weight = bool ? :bold : nil\n end",
"def bold_font_multiplier; end",
"def change_column_bold(col=0, bolded=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified bold settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_column_font(col, Worksheet::BOLD, bolded, font, xf_id)\n end",
"def change_row_bold(row=0, bolded=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified bold settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_row_font(row, Worksheet::BOLD, bolded, font, xf_id)\n end",
"def bold_italic_cell(options = {}, &block)\n cell({ font_style: :bold_italic }.merge(options || {}), &block)\n end",
"def bold; end",
"def bold=(value)\n @bold = !!value\n end",
"def change_font_italics(italicized=false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def bold_italic\n @font.fullname = 'Courier-BoldOblique'\n @font\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def bold\n style = CharacterStyle.new\n style.bold = true\n if block_given?\n apply(style) {|node| yield node}\n else\n apply(style)\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Changes font underline settings of cell | def change_font_underline(underlined = false)
validate_worksheet
font = get_cell_font.dup
font.set_underline(underlined)
update_font_references(font)
end | [
"def change_font_underline(underlined=false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_underline(underlined)\n update_font_references(font)\n end",
"def change_row_underline(row=0, underlined=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified underline settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_row_font(row, Worksheet::UNDERLINE, underlined, font, xf_id)\n end",
"def underline=(value)\n @underline = value\n end",
"def change_column_underline(col=0, underlined=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified underline settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_column_font(col, Worksheet::UNDERLINE, underlined, font, xf_id)\n end",
"def underline(value)\n @options[:underline] = value\n end",
"def underline_cell(options = {}, &block)\n cell({ borders: [ :bottom ], border_width: 0.5 }.merge(options || {}), &block)\n end",
"def underline=(flag=false)\n @underline = flag\n end",
"def underline; end",
"def set_underline(arg = 1)\n begin\n case arg\n when 0 then @underline = 0 # off\n when 1 then @underline = 1 # Single\n when 2 then @underline = 2 # Double\n when 33 then @underline = 33 # Single accounting\n when 34 then @underline = 34 # Double accounting\n else\n raise ArgumentError,\n \"\\n\\n set_underline(#{arg.inspect})\\n arg must be 0, 1, or none, 2, 33, 34.\\n\"\n \" ( 0:OFF, 1 and none:Single, 2:Double, 33:Single accounting, 34:Double accounting )\\n\"\n end\n end\n end",
"def underline(text); colour(text, '4');end",
"def underline\n wrap_with_sgr(TERM_EFFECTS[:underline])\n end",
"def underline\n surround_with_ansi(ANSI_UNDERLINE)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def text_underline(text); underline(text);end",
"def underline\n style = CharacterStyle.new\n style.underline = true\n if block_given?\n apply(style) {|node| yield node}\n else\n apply(style)\n end\n end",
"def underline_text(x, y, w, h, t, a)\n # set underline width by default\n new_width = w\n # unless underline is full text width\n unless font.underline_full\n # Get current width of text alone\n new_width = text_size(t).width\n # Alter underline's x based on alignment other than left\n unless a==0\n x += a == 1 ? w / 2 - new_width / 2 : w - new_width\n end\n end\n # Set underline height\n new_y = 1 + y + h / 2 + font.size / 3\n # Draw underline (and draw effect on underline if applicable\n if self.font.underline_effects == true\n if self.font.shadow == true\n fill_rect(x+2, new_y+2, new_width, 1, self.font.shadow_color)\n end\n if self.font.outline == true\n fill_rect(x-1, new_y-1, new_width+2, 3, self.font.outline_color)\n end\n end\n fill_rect(x, new_y, new_width, 1, self.font.underline_color)\n end",
"def underline\n return @underline\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def strikethru_text(x, y, w, h, t, a)\n # set strikethru width by default\n new_width = w\n # unless underline is full text width\n unless font.strikethru_full\n # Get current width of text alone\n new_width = text_size(t).width\n # Alter underline's x based on alignment other than left\n unless a==0\n x += a == 1 ? w / 2 - new_width / 2 : w - new_width\n end\n end\n # set strikethru height\n new_y = y + h / 2\n # draw underline\n fill_rect(x, new_y, new_width, 1, self.font.strikethru_color)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Endpoint /images exists on both v1 and v2 API The attribute 'visibility' is used to detect if the call has been made on v1 or v2 In case of v2 we have all the needed information, but in case of v1 we don't and we have to call /images/detail to get full details | def get_all_images(env)
images_json = get(env, "#{@session.endpoints[:image]}/images")
images = JSON.parse(images_json)['images']
return images if images.empty?
is_v1 = false
unless images[0].key? 'visibility'
is_v1 = true
images_json = get(env, "#{@session.endpoints[:image]}/images/detail")
images = JSON.parse(images_json)['images']
end
images.map do |i|
i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1
Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])
end
end | [
"def describe_images(optional={})\n\t\targs = self.class.new_params\n\t\targs[:query]['Action'] = 'DescribeImages'\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\tif optional.key? :_method\n\t\t\traise ArgumentError, '_method must be GET|POST' unless 'GET|POST'.split('|').include? optional[:_method]\n\t\t\targs[:method] = optional[:_method]\n\t\tend\n\t\tif optional.key? :_scheme\n\t\t\traise ArgumentError, '_scheme must be http|https' unless 'http|https'.split('|').include? optional[:_scheme]\n\t\t\targs[:scheme] = optional[:_scheme]\n\t\tend\n\t\tif optional.key? :filter_1_key\n\t\t\targs[:query]['Filter.1.Key'] = optional[:filter_1_key]\n\t\tend\n\t\tif optional.key? :filter_1_value\n\t\t\targs[:query]['Filter.1.Value'] = optional[:filter_1_value]\n\t\tend\n\t\tif optional.key? :filter_2_key\n\t\t\targs[:query]['Filter.2.Key'] = optional[:filter_2_key]\n\t\tend\n\t\tif optional.key? :filter_2_value\n\t\t\targs[:query]['Filter.2.Value'] = optional[:filter_2_value]\n\t\tend\n\t\tif optional.key? :image_id\n\t\t\targs[:query]['ImageId'] = optional[:image_id]\n\t\tend\n\t\tif optional.key? :image_name\n\t\t\targs[:query]['ImageName'] = optional[:image_name]\n\t\tend\n\t\tif optional.key? :image_owner_alias\n\t\t\targs[:query]['ImageOwnerAlias'] = optional[:image_owner_alias]\n\t\tend\n\t\tif optional.key? :owner_account\n\t\t\targs[:query]['OwnerAccount'] = optional[:owner_account]\n\t\tend\n\t\tif optional.key? :owner_id\n\t\t\targs[:query]['OwnerId'] = optional[:owner_id]\n\t\tend\n\t\tif optional.key? :page_number\n\t\t\traise ArgumentError, 'page_number must be equal or greater than 1' unless optional[:page_number] < 1\n\t\t\targs[:query]['PageNumber'] = optional[:page_number]\n\t\tend\n\t\tif optional.key? :page_size\n\t\t\traise ArgumentError, 'page_size must be equal or greater than 1' unless optional[:page_size] < 1\n\t\t\traise ArgumentError, 'page_size must be equal or less than 100' unless optional[:page_size] > 100\n\t\t\targs[:query]['PageSize'] = optional[:page_size]\n\t\tend\n\t\tif optional.key? :resource_owner_account\n\t\t\targs[:query]['ResourceOwnerAccount'] = optional[:resource_owner_account]\n\t\tend\n\t\tif optional.key? :resource_owner_id\n\t\t\targs[:query]['ResourceOwnerId'] = optional[:resource_owner_id]\n\t\tend\n\t\tif optional.key? :show_expired\n\t\t\targs[:query]['ShowExpired'] = optional[:show_expired]\n\t\tend\n\t\tif optional.key? :snapshot_id\n\t\t\targs[:query]['SnapshotId'] = optional[:snapshot_id]\n\t\tend\n\t\tif optional.key? :status\n\t\t\targs[:query]['Status'] = optional[:status]\n\t\tend\n\t\tif optional.key? :tag_1_key\n\t\t\targs[:query]['Tag.1.Key'] = optional[:tag_1_key]\n\t\tend\n\t\tif optional.key? :tag_1_value\n\t\t\targs[:query]['Tag.1.Value'] = optional[:tag_1_value]\n\t\tend\n\t\tif optional.key? :tag_2_key\n\t\t\targs[:query]['Tag.2.Key'] = optional[:tag_2_key]\n\t\tend\n\t\tif optional.key? :tag_2_value\n\t\t\targs[:query]['Tag.2.Value'] = optional[:tag_2_value]\n\t\tend\n\t\tif optional.key? :tag_3_key\n\t\t\targs[:query]['Tag.3.Key'] = optional[:tag_3_key]\n\t\tend\n\t\tif optional.key? :tag_3_value\n\t\t\targs[:query]['Tag.3.Value'] = optional[:tag_3_value]\n\t\tend\n\t\tif optional.key? :tag_4_key\n\t\t\targs[:query]['Tag.4.Key'] = optional[:tag_4_key]\n\t\tend\n\t\tif optional.key? :tag_4_value\n\t\t\targs[:query]['Tag.4.Value'] = optional[:tag_4_value]\n\t\tend\n\t\tif optional.key? :tag_5_key\n\t\t\targs[:query]['Tag.5.Key'] = optional[:tag_5_key]\n\t\tend\n\t\tif optional.key? :tag_5_value\n\t\t\targs[:query]['Tag.5.Value'] = optional[:tag_5_value]\n\t\tend\n\t\tif optional.key? :usage\n\t\t\targs[:query]['Usage'] = optional[:usage]\n\t\tend\n\t\tself.run(args)\n\tend",
"def api_image_inventory\n @images = ::Queries::Image::Filter.new(\n params.permit(\n :otu_id, otu_scope: [])\n ).all.page(params[:page]).per(params[:per])\n render '/images/api/v1/index'\n end",
"def status_images\n response = JSON.parse(@client.get(\"/api/v1/status-images\").body)\n return response[\"images\"] || response\n end",
"def all details = false\n path = details ? \"/v1/images/detail\" : \"/v1/images\"\n\n response = @connection.get path\n response.body['images'].collect do |r|\n ImageData.new r\n end\n end",
"def images\n @images ||= Image.find_all_by_listing_id(listing_id, oauth)\n end",
"def viewer_images_info\n render json: helpers.viewer_images_info(presenter)\n end",
"def public_image?(image)\n # Glance v1\n return image.is_public if image.respond_to?(:is_public)\n # Glance v2\n image.visibility != 'private' if image.respond_to?(:visibility)\n end",
"def query_image_list\n get_resource_list('image', 'v2/images', 'images')\n puts 'The list of Images in this PowerVC cloud are:'\n headers = ['Images']\n print_table(headers, @resource_print_list)\n footer\n end",
"def pic \n link = params[\"api_URL\"]\n tail = \"?show-blocks=all&api-key=#{ENV[\"API_KEY\"]}\"\n response = Unirest.get(link + tail)\n @pics = response.body[\"response\"][\"content\"][\"blocks\"][\"main\"][\"elements\"][0][\"assets\"]\n @pics.each do |pic|\n if pic[\"typeData\"][\"isMaster\"] == true\n @img = pic[\"file\"]\n end\n end\n render json: {img: @img}\n end",
"def get_images\n images = collect_inventory(:private_images) { gather_data_for_this_region(@sas, 'list_all_private_images') }\n rescue ::Azure::Armrest::ApiException => err\n _log.warn(\"Unable to collect Azure private images for: [#{@ems.name}] - [#{@ems.id}]: #{err.message}\")\n else\n process_collection(images, :vms) { |image| parse_image(image) }\n end",
"def describe_images( options = {} )\n options = { :image_id => [], :owner_id => [], :executable_by => [] }.merge(options)\n params = pathlist( \"ImageId\", options[:image_id] )\n params.merge!(pathlist( \"Owner\", options[:owner_id] ))\n params.merge!(pathlist( \"ExecutableBy\", options[:executable_by] ))\n return response_generator(:action => \"DescribeImages\", :params => params)\n end",
"def list_public_virtual_machine_images\n request_path = '/services/images'\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.virtual_machine_images_from_xml(response)\n end",
"def images(params = {})\n response, status = BeyondApi::Request.get(@session, \"/shop/images\", params)\n\n handle_response(response, status)\n end",
"def images\n @images ||= ApiFactory.new 'Projects::Images'\n end",
"def show_by_type_and_owner\n images = Image.get_image_by_type_and_owner(params[:imageabletype], params[:imageableid]) # Obtain all the images corresponding to the type and the owner id from the model\n\n if images.length > 0 # If exist at least one image in DB\n response = { content: images, message: \"Images has been obtained successfully\" } # Return all the images\n\n render json: response, status: 200\n else # If not exist data\n response = { content: nil, message: \"No images corresponding to the type and id owner\" }\n\n render json: response, status: 204 # Return 'no content' and nil\n end\n end",
"def images\n @images ||= aws_client.images(owners: ['self'], filters: image_filters).map do |image|\n OpenStruct.new(\n with_tags(image, image_id: image.image_id,\n type: image.image_type,\n public: image.public,\n created_at: image.creation_date)\n )\n end\n end",
"def show_images\n doc = document\n index = 1\n doc.images.each do |l|\n puts \"image: name: #{l.name}\"\n puts \" id: #{l.invoke(\"id\")}\"\n puts \" src: #{l.src}\"\n puts \" index: #{index}\"\n index += 1\n end\n end",
"def fetch_thumbnail_visibility\n response = Blacklight.default_index.connection.get 'select', params: { q: \"id:#{identifier}\" }\n visibility = response[\"response\"][\"docs\"][0][\"visibility_ssi\"]\n return visibility if visibility.present?\n [\"restricted\"]\n rescue\n [\"restricted\"]\n end",
"def show_by_type\n images = Image.get_all_type_images(params[:imageabletype]) # Obtain all the images corresponding to the type from the model\n\n if images.length > 0 # If exist at least one image in DB\n response = { content: images, message: \"Images has been obtained successfully\" } # Return all the images\n\n render json: response, status: 200\n else # If not exist data\n response = { content: nil, message: \"No images corresponding to the type\" }\n\n render json: response, status: 204 # Return 'no content' and nil\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create a new method called encrypt_file | def encrypt_file(filename)
#open the file by passing it the name and ..
input = File.open(filename, 'r')
#this is a string now so
contents = input.read
encrypted_contents = encrypt_string(contents)
input.close
output = File.open(filename + '.encrypted', 'w')
output.write(encrypted_contents)
output.close
end | [
"def encrypt(file_name, password); end",
"def encrypt_file(path, password = nil)\n salt = random_bytes(@salt_len)\n iv = random_bytes(@salt_len)\n aes_key, mac_key = keys(salt, password)\n\n cipher = cipher(aes_key, iv)\n hmac = OpenSSL::HMAC.new(mac_key, OpenSSL::Digest::SHA256.new)\n new_path = path + '.enc'\n\n File.open(new_path, 'wb') do |out_file|\n out_file.syswrite salt\n out_file.syswrite iv\n hmac.update iv\n\n file_chunks(path).each do |chunk|\n encrypted = cipher.update(chunk)\n hmac.update encrypted\n out_file.syswrite encrypted\n end\n encrypted = cipher.final\n\n hmac.update encrypted\n out_file.syswrite encrypted\n out_file.syswrite hmac.digest\n end\n new_path\n rescue TypeError, ArgumentError, SystemCallError, IOError => e\n error_handler e\n end",
"def encryptFile(fileIn,conf)\n\nsalt_len = 8\nbuf=''\npassword = conf[:passphrase]\ncipher = 'aes-128-cbc'\nputs aktTime()+' encrypting archive...'\nSTDOUT.flush #write out immediately\nsalt= OpenSSL::Random::pseudo_bytes(salt_len)\n\nc = OpenSSL::Cipher::Cipher.new(cipher)\nc.encrypt\n#generate key + IV from given password\nc.pkcs5_keyivgen(password, salt, 1)\nFile.open(CRYPT_TMP,'wb') do |fo|\n \n fo.write(MAGIC) #write magic string \n fo.write(salt) #write 8 bytes random salt\n File.open(fileIn,'rb') do |fi|\n while fi.read(4096,buf) \n fo.write c.update(buf)\n end\n fo.write( c.final)\n end\nend\n\n#overwrite archive with crypted archive\nputs aktTime()+' archive encrypted '\nFile.rename(CRYPT_TMP,fileIn)\nend",
"def encrypt_file(filename, key)\n File.open(filename) do |file|\n data = file.read\n encrypted64 = hex_to_base64(repeat_key_xor(data, key))\n\n File.open(\"encrypted64_\" + filename, \"w\") do |out_file|\n out_file.write(encrypted64)\n end\n end\nend",
"def encrypt_file(src, tgt=nil, options = {} )\n options = { :suffix => '.ez', :autoclean => 'true' }.update(options)\n tgt = \"#{src}#{options[:suffix]}\" unless tgt\n cipher_file :on_encrypter, src, tgt, options[:autoclean]\n end",
"def encrypt(filename, key)\r\n\t\r\n\t_original = File.open(filename, \"r\")\r\n\t_encrypted = File.open(\"encrypted.txt\", \"w+\")\r\n\t\r\n\t# initialize the keyword as an encryption key\r\n\t_key = EncryptionKey.new(key)\r\n\t\r\n\tlinecount = 1\r\n\t\r\n\t# loop through each line, and then each character, modifying it by the current key value\r\n\t# then write the modified character to the output file\r\n\t_original.each do |line|\r\n\t\tcharcount = 1\r\n\t\tline.split(\"\").each do |originalChar|\r\n\t\t\t\r\n\t\t\ttemp = originalChar.ord\r\n\t\t\ttemp = (temp + _key.nextModVal()) % 256\t\t\t\r\n\t\t\tencryptedChar = temp.chr\r\n\t\t\t_encrypted << encryptedChar\r\n\t\t\tcharcount += 1\r\n\t\tend\r\n\t\tlinecount += 1\r\n\tend\r\n\t\r\nend",
"def encrypt(file)\n unless EncryptFileW(string_check(file).wincode)\n raise SystemCallError.new(\"EncryptFile\", FFI.errno)\n end\n self\n end",
"def cipher_file(method, sourcefile, targetfile, delete_source)\n raise(ArgumentError, \"source == target #{sourcefile}\") if sourcefile == targetfile\n safe_create(targetfile,0600) do |o|\n self.send(method, o) do |c|\n safe_read(sourcefile) do |i|\n loop do\n buffer = i.read(Key.block_size) or break\n c << buffer\n end\n end\n end\n end\n safe_delete(sourcefile) if delete_source && File.exists?(targetfile)\n return targetfile\n end",
"def unencrypted_path\n yield SafeFile.safepath_to_string(@filename)\n end",
"def encryptFilename(s)\n\n return s if @orignames\n\n return s if s.start_with?(ENCRFILENAMEPREFIX)\n\n nonce = OpenSSL::Digest::SHA1.new(s).digest\n\n bf = MyAES.new(true, @encrKey2, nonce)\n bf.finish(s)\n b = bf.flush()\n\n s3 = Base64.urlsafe_encode64(b)\n\n s2 = ENCRFILENAMEPREFIX.dup\n\n s2 << s3\n\n s2\n end",
"def encrypt_and_return_object_content(public_key_file, content)\r\n public_key = OpenSSL::PKey::RSA.new(File.read(public_key_file))\r\n Base64.encode64(public_key.public_encrypt(content))\r\nend",
"def encrypt_to_file(file_path, value, encode_base64=true)\r\n\r\n # encrypt value.\r\n enc_value = encrypt(value, encode_base64)\r\n\r\n # save file.\r\n File.write(file_path, enc_value)\r\n enc_value\r\n end",
"def encrypt_files(*files)\n options[:multifile] = true\n files.each { |file| encrypt(file) }\n end",
"def output_file_encrypted\n \"#{output_file}.enc\"\n end",
"def encrypt(message)\n \n end",
"def encrypt_zip_file(filename)\n tempfile = nil\n File.open(filename, 'rb') do |file|\n data = file.read\n if !data.nil? && !data.empty?\n public_key = OpenSSL::PKey::RSA.new(File.read(\"#{CONFIG['RSA_KEYS_PATH']}/#{CONFIG['RSA_PUBLIC_KEY']}\"))\n cipher = OpenSSL::Cipher::Cipher.new(CONFIG['CIPHER_NAME'])\n cipher.encrypt\n cipher.key = cipher_key = cipher.random_key\n cipher.iv = cipher_iv = cipher.random_iv\n encrypted_data = cipher.update(data)\n encrypted_data << cipher.final\n encrypted_data = Base64.encode64(encrypted_data)\n encrypted_data << CONFIG['RSA_KEY_TAG'] + Base64.encode64(public_key.public_encrypt(cipher_key))\n encrypted_data << CONFIG['RSA_IV_TAG'] + Base64.encode64(public_key.public_encrypt(cipher_iv))\n file_basename = File.basename(filename)\n tempfile = Tempfile.new(file_basename, CONFIG['TEMP_DIR'])\n ZipOutputStream::open(tempfile.path) do |zip|\n zip.put_next_entry(file_basename)\n zip.write(encrypted_data)\n end\n end\n end\n return tempfile\n end",
"def encrypt_method\n private? ? :private_encrypt : :public_encrypt\n end",
"def write\n file = FileWriter.new(@path)\n encryptor = Encryptor.new(password: @password, file: file)\n encryptor.encrypt(@plaintext)\n file.write\n end",
"def write_safe \n # returns GPGME::Data obj\n cipher = @crypto.encrypt(self.to_s, {\n :symmetric => true,\n :password => @password,\n # :protocol => 4, #~TODO: need to set algo cipher to AES256 => how???\n # :file => @pwsafe #~TODO: this part is not working ...\n })\n # overwrite safe with write encrypted contents\n File.open(@pwsafe, \"wb\") do |fw|\n fw.puts cipher.read\n end\n # make backup copy in case something bad happened\n FileUtils.cp(@pwsafe, \"#{@pwsafe}.BAK.\" + Time.now.to_i.to_s)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /idea_events/1 GET /idea_events/1.json | def show
@idea_event = IdeaEvent.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @idea_event }
end
end | [
"def show\n event_id = params[:id]\n if event_id.present?\n @event = Com::Nbos::Events::Event.active_events.where(id: event_id, tenant_id: @user.tenant_id)\n if @event.present?\n render :json => @event\n else\n render :json => {messageCode: \"event.notfound\", message: \"Event Not Found\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end",
"def show\n @event = Event.find(params[:id])\n render json: @event\n end",
"def show\n @angel_event = AngelEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @angel_event }\n end\n end",
"def event\n begin\n JSON.parse(@omegle.post('/events', \"id=#{@id}\").body)\n rescue\n end\n end",
"def show\n @myevent = Myevent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @myevent }\n end\n end",
"def show\n @simple_event = SimpleEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @simple_event }\n end\n end",
"def show\n @atfal_event = AtfalEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @atfal_event }\n end\n end",
"def show\n @important_event = ImportantEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @important_event }\n end\n end",
"def show\n @inspection_event = InspectionEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @inspection_event }\n end\n end",
"def show\n @current_event = CurrentEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @current_event }\n end\n end",
"def show\n puts \"dddddddddddddddddddddddddddddddddddddd\"\n puts params[:id]\n @interview_event = InterviewEvent.find_by_id(params[:id])\n\n puts @interview_event\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @interview_event }\n end\n end",
"def show\n @event_req = EventReq.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event_req }\n end\n end",
"def events\n events = @vulnerability.events.map do |e|\n {\n id: e.id,\n title: e.title,\n description: e.description,\n date: e.date,\n event_type: e.event_type,\n color: e.color,\n icon: e.icon,\n notes: e.notes,\n start_hidden: e.start_hidden,\n }\n end\n render_json_for_api events\n end",
"def show\n @ticketing_event = Ticketing::Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ticketing_event }\n end\n end",
"def new\n @idea_event = IdeaEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @idea_event }\n end\n end",
"def show\n @events2 = Events2.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @events2 }\n end\n end",
"def index\n @ticketing_events = Ticketing::Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ticketing_events }\n end\n end",
"def show\n @expend_event = ExpendEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @expend_event }\n end\n end",
"def show\n @eventtracker = Eventtracker.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @eventtracker }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /idea_events/new GET /idea_events/new.json | def new
@idea_event = IdeaEvent.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @idea_event }
end
end | [
"def new\n @create_event = CreateEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @create_event }\n end\n end",
"def new\n @event = Event.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @add_event = AddEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @add_event }\n end\n end",
"def new\n @event_req = EventReq.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_req }\n end\n end",
"def new\n @angel_event = AngelEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @angel_event }\n end\n end",
"def new\n @current_event = CurrentEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @current_event }\n end\n end",
"def new\n @atfal_event = AtfalEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @atfal_event }\n end\n end",
"def new\n @important_event = ImportantEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @important_event }\n end\n end",
"def new\n @recent_event = RecentEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @recent_event }\n end\n end",
"def create\n @idea_event = IdeaEvent.new(params[:idea_event])\n\n respond_to do |format|\n if @idea_event.save\n format.html { redirect_to @idea_event, notice: 'Idea event was successfully created.' }\n format.json { render json: @idea_event, status: :created, location: @idea_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @idea_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @event_request = EventRequest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_request }\n end\n end",
"def new\n @event = current_event\n @attendee = Attendee.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @attendee }\n end\n end",
"def new\n @events_collection = EventsCollection.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @events_collection }\n end\n end",
"def new\n @myevent = Myevent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @myevent }\n end\n end",
"def new\n @expend_event = ExpendEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @expend_event }\n end\n end",
"def new\n @events_tag = EventsTag.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @events_tag }\n end\n end",
"def new\n @planned_event = PlannedEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @planned_event }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /idea_events POST /idea_events.json | def create
@idea_event = IdeaEvent.new(params[:idea_event])
respond_to do |format|
if @idea_event.save
format.html { redirect_to @idea_event, notice: 'Idea event was successfully created.' }
format.json { render json: @idea_event, status: :created, location: @idea_event }
else
format.html { render action: "new" }
format.json { render json: @idea_event.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @event = Event.new(event_params)\n if @event.save\n render json: @event.to_json, status: :created\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def create\n Rails.logger.debug(\"Received event #{params[:event]}\")\n head :ok\n end",
"def event\n begin\n JSON.parse(@omegle.post('/events', \"id=#{@id}\").body)\n rescue\n end\n end",
"def create\n event = Event.create(event_params)\n if event.save\n render json: event, status: 200\n else\n render json: {errors: event.errors.full_messages}, status: :unprocessable_entity\n end\n end",
"def save\n event = params\n # This assumes that all keys exists. Yay no error handling...\n toSave = Event.new(update_type: event[:event],\n start_time: event[:payload][:event][:start_time_pretty],\n end_time: event[:payload][:event][:end_time_pretty],\n location: event[:payload][:event][:location],\n invitee_name: event[:payload][:invitee][:name],\n duration: event[:payload][:event_type][:duration],\n event_kind: event[:payload][:event_type][:kind])\n toSave.save\n render json: {}, status: 200\n end",
"def push_events\n saved = []\n jsonHash = request.POST[:_json];\n jsonHash.each do |jsonEvent|\n event = Event.new\n event.race_id = jsonEvent[\"raceId\"]\n event.walker_id = jsonEvent[\"walkerId\"]\n event.eventId = jsonEvent[\"eventId\"]\n event.eventType = jsonEvent[\"type\"]\n event.eventData = jsonEvent[\"data\"]\n event.batteryLevel = jsonEvent[\"batL\"]\n event.batteryState = jsonEvent[\"batS\"]\n event.timestamp = Time.zone.parse(jsonEvent[\"time\"])\n if event.save # if new\n saved << jsonEvent[\"eventId\"]\n if event.race_id != 0 # if not unknown race_id\n after_create(event)\n end\n else # if exists\n saved << jsonEvent[\"eventId\"]\n puts \"Not Saved!\" # debug print\n puts jsonEvent # debug print \n end\n end\n render :json => {:savedEventIds => saved}\n end",
"def create\n @interview_event = InterviewEvent.new(params[:interview_event])\n\n respond_to do |format|\n if @interview_event.save\n format.html { redirect_to @interview_event, notice: 'Interview event was successfully created.' }\n format.json { render json: @interview_event, status: :created, location: @interview_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @interview_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ticketing_event = Ticketing::Event.new(params[:ticketing_event])\n\n respond_to do |format|\n if @ticketing_event.save\n format.html { redirect_to ticketing_events_path }\n format.json { render json: @ticketing_event, status: :created, location: @ticketing_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ticketing_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @atfal_event = AtfalEvent.new(params[:atfal_event])\n\n respond_to do |format|\n if @atfal_event.save\n format.html { redirect_to @atfal_event, notice: 'Atfal event was successfully created.' }\n format.json { render json: @atfal_event, status: :created, location: @atfal_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @atfal_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n if create_issue_event(params[:event])\n render json: { message: 'Event successfully received' }, status: 200\n else\n render json: { error: 'Invalid event' }, status: 400\n end\n end",
"def create\n @dia_evento = DiaEvento.new(dia_evento_params)\n\n if @dia_evento.save\n render json: @dia_evento, status: :created, location: @dia_evento\n else\n render json: @dia_evento.errors, status: :unprocessable_entity\n end\n end",
"def create\n @angel_event = AngelEvent.new(params[:angel_event])\n\n respond_to do |format|\n if @angel_event.save\n format.html { redirect_to @angel_event, notice: 'Angel event was successfully created.' }\n format.json { render json: @angel_event, status: :created, location: @angel_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @angel_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n event = Event.create(event_params(:admin_id, :start, :end, :max_price, :notes))\n while !event.code\n code = SecureRandom.hex(4)\n codes = Event.all.map{|event| event.code}\n if !codes.include? (code)\n event.update({code: code})\n end\n end\n if params[:events][:add].present?\n admin = Admin.find(event.admin_id)\n user = User.create({first_name: admin.first_name, last_name: admin.last_name, email: admin.email, event_id: event.id})\n user.update(event_params(:ideas))\n end\n render json: event\nend",
"def create\n \t@expected_event = ExpectedEvent.new(expected_event_params)\n\n \trespond_to do |format|\n \t\tif @expected_event.save\n \t\t\tformat.html { redirect_to @expected_event, notice: 'Incoming event was successfully created'}\n \t\telse\n \t\t\tformat.html { render action: 'new' }\n \t\tend\n \tend\n end",
"def create\n @everyday_event = EverydayEvent.new(everyday_event_params)\n\n respond_to do |format|\n if @everyday_event.save\n format.html { redirect_to @everyday_event, notice: 'Everyday event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @everyday_event }\n else\n format.html { render action: 'new' }\n format.json { render json: @everyday_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @idea_event = IdeaEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @idea_event }\n end\n end",
"def create\n @add_event = AddEvent.new(params[:add_event])\n\n respond_to do |format|\n if @add_event.save\n format.html { redirect_to @add_event, notice: 'Add event was successfully created.' }\n format.json { render json: @add_event, status: :created, location: @add_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @add_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @eatup_event = EatupEvent.new(eatup_event_params)\n\n respond_to do |format|\n if @eatup_event.save\n format.html { redirect_to @eatup_event, notice: 'Eatup event was successfully created.' }\n format.json { render :show, status: :created, location: @eatup_event }\n else\n format.html { render :new }\n format.json { render json: @eatup_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @response_event = ResponseEvent.new(params[:response_event])\n @response_event.event_id = params[:event_id]\n @response_event.user_id = current_user.id\n\n respond_to do |format|\n if @response_event.save\n format.html { redirect_to @response_event.event, notice: 'Response event was successfully created.' }\n format.json { render json: @response_event, status: :created, location: @response_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @response_event.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /idea_events/1 PUT /idea_events/1.json | def update
@idea_event = IdeaEvent.find(params[:id])
respond_to do |format|
if @idea_event.update_attributes(params[:idea_event])
format.html { redirect_to @idea_event, notice: 'Idea event was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @idea_event.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n event = event.find(params[\"id\"]) \n event.update_attributes(event_params) \n respond_with event, json: event\n end",
"def update\n #TODO params -> strong_params\n if @event.update(params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { head :no_content }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put_events(args)\n\tapi_url = \"#{@base_url}/#{args[:collection]}/#{args[:key]}/events/#{args[:event_type]}\"\n\tputs do_the_put_call( url: api_url, user: @user, json: args[:json] )\nend",
"def update\n @angel_event = AngelEvent.find(params[:id])\n\n respond_to do |format|\n if @angel_event.update_attributes(params[:angel_event])\n format.html { redirect_to @angel_event, notice: 'Angel event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @angel_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event_req = EventReq.find(params[:id])\n\n respond_to do |format|\n if @event_req.update_attributes(params[:event_req])\n format.html { redirect_to @event_req, notice: 'Event req was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event_req.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @interview_event = InterviewEvent.find(params[:id])\n\n respond_to do |format|\n if @interview_event.update_attributes(params[:interview_event])\n format.html { redirect_to @interview_event, notice: 'Interview event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @interview_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @ticketing_event = Ticketing::Event.find(params[:id])\n\n respond_to do |format|\n if @ticketing_event.update_attributes(params[:ticketing_event])\n format.html { redirect_to ticketing_events_path }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ticketing_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @create_event = CreateEvent.find(params[:id])\n\n respond_to do |format|\n if @create_event.update_attributes(params[:create_event])\n format.html { redirect_to @create_event, notice: 'Create event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @create_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @important_event = ImportantEvent.find(params[:id])\n\n respond_to do |format|\n if @important_event.update_attributes(params[:important_event])\n format.html { redirect_to @important_event, notice: 'Important event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @important_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put_event(args)\n\t\t\tapi_url = \"#{@base_url}/#{args[:collection]}/#{args[:key]}/events/#{args[:event_type]}\"\n\t\t\tdo_the_put_call( url: api_url, json: args[:json] )\n\t\tend",
"def update\n @atfal_event = AtfalEvent.find(params[:id])\n\n respond_to do |format|\n if @atfal_event.update_attributes(params[:atfal_event])\n format.html { redirect_to @atfal_event, notice: 'Atfal event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @atfal_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @js_event = JsEvent.find(params[:id])\n\n respond_to do |format|\n if @js_event.update_attributes(params[:js_event])\n format.html { redirect_to @js_event, notice: 'Js event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @js_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @current_event = CurrentEvent.find(params[:id])\n\n respond_to do |format|\n if @current_event.update_attributes(params[:current_event])\n format.html { redirect_to @current_event, notice: 'Current event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @current_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @agenda_event = Agenda::Event.find(params[:id])\n\n respond_to do |format|\n if @agenda_event.update_attributes(params[:agenda_event])\n format.html { redirect_to(@agenda_event, :notice => 'Event was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @agenda_event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n @event.update_attributes(params[:event])\n\n respond_with(:admin, @event)\n end",
"def update\n \trespond_to do |format|\n \t\tif @expected_event.update(expected_event_params)\n \t\t\tformat.html { redirect_to @expected_event, notice: 'Expected event was successfully updated' }\n \t\telse\n \t\t\tformat.html { render action: 'edit' }\n \t\tend\n \tend\n end",
"def update\n respond_to do |format|\n if @suggested_event.update(suggested_event_params)\n format.html { redirect_to @suggested_event, notice: 'Suggested event was successfully updated.' }\n format.json { render :show, status: :ok, location: @suggested_event }\n else\n format.html { render :edit }\n format.json { render json: @suggested_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @idea_event = IdeaEvent.new(params[:idea_event])\n\n respond_to do |format|\n if @idea_event.save\n format.html { redirect_to @idea_event, notice: 'Idea event was successfully created.' }\n format.json { render json: @idea_event, status: :created, location: @idea_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @idea_event.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /idea_events/1 DELETE /idea_events/1.json | def destroy
@idea_event = IdeaEvent.find(params[:id])
@idea_event.destroy
respond_to do |format|
format.html { redirect_to idea_events_url }
format.json { head :ok }
end
end | [
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @atfal_event = AtfalEvent.find(params[:id])\n @atfal_event.destroy\n\n respond_to do |format|\n format.html { redirect_to atfal_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @authevent.destroy\n respond_to do |format|\n format.html { redirect_to authevents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @important_event = ImportantEvent.find(params[:id])\n @important_event.destroy\n\n respond_to do |format|\n format.html { redirect_to important_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @angel_event = AngelEvent.find(params[:id])\n @angel_event.destroy\n\n respond_to do |format|\n format.html { redirect_to angel_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.using(:shard_one).find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @myevent = Myevent.find(params[:id])\n @myevent.destroy\n\n respond_to do |format|\n format.html { redirect_to myevents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @simple_event = SimpleEvent.find(params[:id])\n @simple_event.destroy\n\n respond_to do |format|\n format.html { redirect_to simple_events_dashboard_index_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_req = EventReq.find(params[:id])\n @event_req.destroy\n\n respond_to do |format|\n format.html { redirect_to event_reqs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @everyday_event.destroy\n respond_to do |format|\n format.html { redirect_to everyday_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @adminevent.destroy\n respond_to do |format|\n format.html { redirect_to adminevents_url }\n format.json { head :no_content }\n end\n end",
"def delete_event\r\n event = Event.find_by(id: params[:eventid].to_i)\r\n if event.present?\r\n event.update(status: 3)\r\n lt_update_event_status event, 'archived'\r\n render json: SuccessResponse.new(\r\n code: 200,\r\n message: 'Event Deleted.'\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new(\r\n code: 404,\r\n message: 'Event not found!'\r\n ), adapter: :json, status: :not_found\r\n end\r\n\r\n end",
"def destroy\n @calevent = Calevent.find(params[:id])\n @calevent.destroy\n\n respond_to do |format|\n format.html { redirect_to calevents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @api_v1_clockwork_database_event.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_clockwork_database_events_url, notice: 'Clockwork database event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @eventlog.destroy\n respond_to do |format|\n format.html { redirect_to eventlogs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_request = EventRequest.find(params[:id])\n @event_request.destroy\n\n respond_to do |format|\n format.html { redirect_to event_requests_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n get_event\n @event_exam = @event.exams.find(params[:id])\n @event_exam.destroy\n\n respond_to do |format|\n format.html { redirect_to event_exams_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @response_event = event.find(params[:id])\n @response_event.destroy\n\n respond_to do |format|\n format.html { redirect_to response_events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @interview_event = InterviewEvent.find(params[:id])\n @interview_event.destroy\n\n respond_to do |format|\n format.html { redirect_to interview_events_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Convert the freebase ID for a type into an API endpoint ID | def to_endpoint id
t=id.split('/')
domain = t[1..-2].join('_')
type = t[-1]
"freebase_tsv_#{domain}__#{type}"
end | [
"def type_id(type)\n type.to_name_s('_')\n end",
"def to_global_id(type_name, id)\n Base64.strict_encode64(\"#{type_name}-#{id}\")\n end",
"def base_type_id(node)\n fixed_type_id(node).split(/[^#A-Za-z0-9_-]/).last\n end",
"def freebase_id\n to_nil _response_entity.fetch(\"freebaseId\", nil)\n end",
"def type_id() end",
"def code_for(id_type)\n if valid_id? id_type\n return ID_TYPES[id_type]\n else\n raise(ArgumentError,\n 'Invalid id_type. Please use a valid id_type for e.g. :national_id')\n end\n end",
"def get_end_point_type(endpoint_type)\n endpoint_type.split('/')[2]\nend",
"def uuid\n \"#{type}/#{uid}\"\n end",
"def make_ref_from_oid(object_type, object_id)\n return \"/#{object_type}/#{object_id}\"\n end",
"def endpoint_id; end",
"def endpoint_id=(_arg0); end",
"def identify_type(api_detail_url)\n if api_detail_url.to_s =~ /comicvine\\.gamespot\\.com\\/api\\/(\\w+)\\/?/\n $1.to_s.to_sym\n else\n nil\n end\n end",
"def account_type_to_param(account_type)\n account_type.to_s.underscore.tr(\"/\", \"_\")\n end",
"def api_id(api_name)\n @api_ids[api_name.downcase.to_s]\n end",
"def find_phoneable_type\n %w(organization person).each do |type|\n param_name = [type, 'id'].join('_').to_sym\n requested_id = params[param_name]\n return type.to_sym unless requested_id.blank?\n end\n nil\n end",
"def type_id\n @type_id ||= extract_int(@content[0...TYPE_SIZE])\n end",
"def toOapID(id)\n return id if isOapID(id)\n tmp = \"ark:/13030/#{id}\"\n return tmp if isOapID(tmp)\n return nil\nend",
"def id\n prefix = case @type\n when 'SiteEventType'\n \"site-\"\n when 'StreetEventType'\n \"street-\"\n else\n \"\"\n end\n id = \"#{prefix}#{@json[:id]}\"\n end",
"def make_id\n \"#{self.class.name.downcase}#{id}\"\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
downloads csv of all grant data to users computer | def get_grant_data
if GrantsData.exists?
@grant_data = GrantsData.all_csv
respond_to do |format|
format.html
format.csv do
send_data @grant_data, filename: "all_grants_data_#{Time.now.to_s(:db)}.csv"
end
end
else
flash[:notice] = 'Grant Data Table Empty'
redirect_to :controller => 'amrc_reports', :action => 'reports'
end
end | [
"def export_csv\n # Find all user with the stored restrictions\n users = EsUser.find :all, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"users_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Activé\".trn,\"Nom\".trn,\"Prénom\".trn,\"Mail\".trn]\n users.each do |t|\n csv << [t.active,t.name,t.firstname,t.mail]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def list_all_guests_csv(args = {}) \n get(\"/guestaccess.json/all/csv\", args)\nend",
"def ExportToCsv\n\n if valid_password?\n @users = User.all\n\n csv_string = CSV.generate do |csv|\n csv << [\"Clinic\", \"Name\", \"Email\", \"TimeStamp\", \"Q1\", \"Q2\", \"Q3\", \"Q4\", \"Q5\", \"F1\", \"F2\", \"F3\", \"F4\", \"F5\"] \n for user in @users\n\n # Null entries cause problems with converting to string for the csv\n answers = user.answers.nil? ? ['','','','',''] : YAML::load(user.answers) # answers to modules\n\n # Null entries cause problems with converting to string for the csv\n ff = user.final_feedback.nil? ? ['','','','',''] : YAML::load(user.final_feedback) # final feedback responses\n\n (answers << ff).flatten!\n info = [user.clinic, user.name, user.email, user.time_stamp]\n (info << answers).flatten!\n csv << info\n end\n end \n send_data csv_string,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :disposition => \"attachment; filename=user_data.csv\"\n else\n flash.now[:danger] = \"Permission Denied - Password Incorrect\"\n render 'about' and return\n end\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_customers_csv\n require 'fastercsv'\n @customers = OrderUser.find(\n :all\n )\n csv_string = FasterCSV.generate do |csv|\n # Do header generation 1st\n csv << [\n \"FirstName\", \"LastName\", \"EmailAddress\"\n ]\n for c in @customers\n csv << [c.first_name, c.last_name, c.email_address]\n end\n end\n\n directory = File.join(RAILS_ROOT, \"public/system/customers\")\n file_name = Time.now.strftime(\"Customer_list-%m_%d_%Y_%H-%M\")\n file = \"#{file_name}.csv\"\n save_to = \"#{directory}/#{file}\"\n\n # make sure we have the directory to write these files to\n if Dir[directory].empty?\n FileUtils.mkdir_p(directory)\n end \n\n # write the file\n File.open(save_to, \"w\") { |f| f.write(csv_string) }\n\n send_file(save_to, :type => \"text/csv\")\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_data_as_csv_file\n send_file \"#{Rails.root}/files/new-file.csv\", type: \"application/csv\", x_sendfile: true\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def export\n \n CsvUtils.setup_request_for_csv headers, request, \"users\"\n \n stream_csv do |csv|\n csv << [\"email\",\n \"first name\", \n \"last name\", \n \"enterprise\",\n \"allocations mgr (Y|N)\",\n \"voter (Y|N)\"\n ]\n end\n end",
"def downloaduserlist\n respond_to do |format|\n format.xls {send_data generateUserXLS(col_sep: \"\\t\") }\n end\n end",
"def export_user_request\n @user = User.find(params[:id])\n @user_requests = @user.pto_requests\n\n respond_to do |format|\n format.csv { send_data @user_requests.to_csv }\n end\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def download_student_list\n students = Student.order(:user_name).includes(:section)\n case params[:format]\n when 'csv'\n output = MarkusCSV.generate(students) do |student|\n info = [student.user_name, student.last_name, student.first_name, student.id_number, student.email]\n unless student.section.nil?\n info << student.section.name\n end\n info\n end\n format = 'text/csv'\n when 'xml'\n output = students.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = students.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'attachment')\n end",
"def downloaduserinfo\n\t\trespond_to do |format|\n\t\t\tformat.xls {send_data generateXLS(col_sep: \"\\t\") }\n\t\tend\n\tend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
passes all Users research_individual page | def research_individual
@users = Array.new()
User.all.each do |u|
if !u.admin?
@users.push(u)
end
end
end | [
"def search_users\n unless @current_admin.is_super_admin\n unless @current_admin.privilages.include? '1'\n flash[:authority_error]=\"You are not authorized to navigate to this page \"\n redirect_to admin_index_path\n empty_user_id\n return\n end\n end\n empty_user_id\n @check=0\n @searched_user=User.new\n end",
"def course_user_search\n return unless authorized_action(@account, @current_user, :read)\n can_read_course_list = @account.grants_right?(@current_user, session, :read_course_list)\n can_read_roster = @account.grants_right?(@current_user, session, :read_roster)\n can_manage_account = @account.grants_right?(@current_user, session, :manage_account_settings)\n\n unless can_read_course_list || can_read_roster\n if @redirect_on_unauth\n return redirect_to account_settings_url(@account)\n else\n return render_unauthorized_action\n end\n end\n\n js_env({\n COURSE_ROLES: Role.course_role_data_for_account(@account, @current_user)\n })\n js_bundle :account_course_user_search\n css_bundle :addpeople\n @page_title = @account.name\n add_crumb '', '?' # the text for this will be set by javascript\n js_env({\n ROOT_ACCOUNT_NAME: @account.root_account.name, # used in AddPeopleApp modal\n ACCOUNT_ID: @account.id,\n 'master_courses?' => master_courses?,\n ROOT_ACCOUNT_ID: @account.root_account.id,\n customized_login_handle_name: @account.root_account.customized_login_handle_name,\n delegated_authentication: @account.root_account.delegated_authentication?,\n SHOW_SIS_ID_IN_NEW_USER_FORM: @account.root_account.allow_sis_import && @account.root_account.grants_right?(@current_user, session, :manage_sis),\n PERMISSIONS: {\n can_read_course_list: can_read_course_list,\n can_read_roster: can_read_roster,\n can_create_courses: @account.grants_right?(@current_user, session, :manage_courses),\n can_create_enrollments: @account.grants_any_right?(@current_user, session, :manage_students, :manage_admin_users),\n can_create_users: @account.root_account.grants_right?(@current_user, session, :manage_user_logins),\n analytics: @account.service_enabled?(:analytics),\n can_masquerade: @account.grants_right?(@current_user, session, :become_user),\n can_message_users: @account.grants_right?(@current_user, session, :send_messages),\n can_edit_users: @account.grants_any_right?(@current_user, session, :manage_students, :manage_user_logins),\n can_manage_groups: @account.grants_right?(@current_user, session, :manage_groups), # access to view user groups?\n can_manage_admin_users: @account.grants_right?(@current_user, session, :manage_admin_users) # access to manage user avatars page?\n }\n })\n render html: '', layout: true\n end",
"def index\n @information_personals = InformationPersonal.find_by(user_id: current_user.id)\n \n if !@information_personals\n redirect_to new_information_personal_path\n end\n end",
"def users_in_subject_rule\n \n @users_per_subject_missing=nil\n \n idSubject= params[:subjectId]\n idSemester= params[:semesterId]\n #Usuarios registrados por cada materia y ordenados por antiguedad para garantizar los cupos\n #Listos para ser listados!\n \n materia_seleccionada=Subject.find(idSubject)\n \n @primeros=PreregisterSubject\n .includes(:subject, :semester)\n .select(\"user_id\").where(\"subject_id =\"+idSubject +\" and semester_id=\"+idSemester)\n .order(:created_at)\n .limit(materia_seleccionada.quota) \n \n @users_per_subject= User\n .where(\"id IN (?)\", @primeros.pluck(:user_id))\n .paginate :page => params[:page], :per_page => 5\n \n render :index \n end",
"def index\n @interested_users = InterestedUser.all\n end",
"def index\n @institute_users = InstituteUser.all\n end",
"def index\n @join_user_to_assistants = JoinUserToAssistant.all\n end",
"def index\n @users = User.all\n\n # If a level is passed, check if it's a number and a valid level of privileges.\n check_level_param\n @users = @users.where(:level => @level) if (!@level.blank?)\n\n # If a search query is received, filter the results\n if (!params[:q].blank?)\n # Do the search\n @query = params[:q]\n @users = @users.where(\"$or\" => [{:name => /#{@query}/i}, {:email => /#{@query}/i}])\n end\n\n # If a page number is received, save it (if not, the page is the first)\n if (!params[:page].blank?)\n page = params[:page].to_i\n page = 1 if (page < 1)\n else\n page = 1\n end\n \n # Paginate!\n @users = @users.page(page)\n\n respond_to do |format|\n format.html\n end\n end",
"def search\n user = User.where(:email => params[:search_data]).first\n\n if user #if such user exists\n redirect_to list_others_path(user.id)\n else #if such user doesn't exist\n redirect_to lists_path, :notice => \"Couldn't find anyone\"\n end\n end",
"def index\n @research_scientists = ResearchScientist.all\n end",
"def index\n @edu_docs = current_user.edu_docs\n end",
"def index\n @request_for_teaching_assistants = (RequestForTeachingAssistant.all.map do\n |request| request\n end).keep_if do |request|\n professor_can_see?(current_professor, request)\n end\n end",
"def search_user\n search = Sunspot.search User do\n fulltext search_params\n end\n r = Hash.new\n search.results.each do |u|\n r.store(u.id,u)\n end\n render :json => r.to_json\n end",
"def index\n @users_specialities = policy_scope(UsersSpeciality).where(user: current_user)\n end",
"def professional_information\n handles_not_set user.university, user.job, check: true do |university, job|\n best_in_place_if(can_edit?, user, :university, type: :input, :nil => 'University not specified', errors: university.errors) +\n best_in_place_if(can_edit?, user, :job, type: :input, :nil => 'Job not specified', errors: job.errors)\n end\n end",
"def profile_specialist_search\n\t\t# search the the speciality name according to the terms\n\t\tspecialities = Speciality.any_of({ :name => /^#{params[:term]}/i }).all.collect{|speciality| {label: speciality.name ,value: speciality.id.to_s}}.to_json \n\t\t# render to the surgery name page\n\t\trespond_to do |format|\n\t\t format.json { render :json => specialities }\n\t\tend\n\tend",
"def advance_search_emp\n @employees = Employee.adv_search(params[:search])\n @search = Employee.adv_search2(params[:search])\n authorize! :read, @employee\n end",
"def index\n authorize HigherEducationInstitution\n @higher_education_institutions = HigherEducationInstitution.order(:name).search(params[:search]).page(params[:page])\n end",
"def index\n if @user\n @pseuds = @user.pseuds.find(:all)\n else\n redirect_to people_path\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
passes all Users grant_individual page | def grants_individual
@users = Array.new()
User.all.each do |u|
if !u.admin?
@users.push(u)
end
end
end | [
"def grant_all\n @_granted = all_permissions\n end",
"def grant_all!\n @all_permissions=true\n end",
"def grant\n @grant \n end",
"def index\n @manage_grants = Manage::Grant.all\n end",
"def grant\n @grant\n end",
"def admin_grant_permissions\n @user = User.includes(:perms).find(params[:id])\n authorize @user\n user_perms = current_user.perms\n @perms = user_perms & [Perm.grant_permissions, Perm.modify_templates, Perm.modify_guidance, Perm.use_api, Perm.change_org_details]\n end",
"def count_grant_entries\n session[:grants_number] = GrantsData.where(userID: current_user.id).count\nend",
"def grant\n self.status = 'granted'\n save\n end",
"def index\n @relatorio_assistente_socials = RelatorioAssistenteSocial.all.page(params[:page]).per(15)\n authorize @relatorio_assistente_socials\n end",
"def index\n @loan_manager_profiles = LoanManagerProfile.all\n authorize LoanManagerProfile\n end",
"def authorize_user(id)\n #check if they are who gets access to the page \n\tif current_user.id != id && id != -1 # modified by Codefire\n\t\tredirect_to summary_sheet_url, :notice => \"Access denied.\" and return\n\tend\n end",
"def check_grant\n\n # Function get config to send notification by Email or Cybozu\n # @author: ThanhTV\n # @Date: 2015/09/07\n @admin_setting = AdminSetting.take\n\n redirect_to \"/home\" unless current_user.grant.zero?\n\n end",
"def index\n #authorize! :index, :static_pages\n \t@users = current_user.other_schools\n \tif @users.blank?\n \trender :text => 'NOT WORKING', :status => :unauthorized\n \tend\n\tend",
"def index\n @permitted_users = PermittedUser.all\n end",
"def authorize_for_all_guests\n # Clear all authorizations and create an allow-all entry\n #ContentAuthorization.transaction do\n update_privacy_level(ContentAuthorization::AuthPrivate)\n clear_accessors\n #end\n end",
"def index\n @users = policy_scope(User) #@users = UserPolicy::Scope.new(current_user, User).resolve\n authorize Access #raise \"not authorized\" unless UserPolicy.new(current_user, User).index?\n end",
"def authorise\n # checks can go here (is the application registered for example)\n grant = generate_grant\n valid_grants << grant\n grant\n end",
"def everyone_can_deposit_everywhere\n AdminSet.all.each do |admin_set|\n next if Hyrax::PermissionTemplateAccess\n .find_by(permission_template_id: admin_set.permission_template.id,\n agent_id: 'registered',\n access: 'deposit',\n agent_type: 'group')\n\n admin_set.permission_template.access_grants.create(agent_type: 'group', agent_id: 'registered', access: 'deposit')\n deposit = Sipity::Role.find_by!(name: 'depositing')\n admin_set.permission_template.available_workflows.each do |workflow|\n workflow.update_responsibilities(role: deposit, agents: Hyrax::Group.new('registered'))\n end\n end\n end",
"def grants\n data.grants\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
checks wether a specific client exists | def exists?(username)
perform_request({:action => 'client-checkexists', :username => username})
statusmsg.match /client exists/i
end | [
"def exists?\n if Client.exists?(name: self.name)\n true\n else\n false\n end\n end",
"def has_client?\n @has_client ||= false\n end",
"def exists?(username)\n perform_request(action: 'client-checkexists', username: username)\n !!statusmsg.match(/Client exists/i)\n end",
"def client_exists?(clientname)\n Chef::ApiClient.load(clientname)\n true\n rescue Net::HTTPClientException => http_error\n return false if http_error.response.code == \"404\"\n\n raise http_error\n end",
"def exists?(username)\n perform_request({:action => 'client-checkexists', :username => username})\n end",
"def reusable_client?\n @options.keys == [:client]\n end",
"def client?\n !!@client\n end",
"def registered?(item)\n @clients.has_key?(item) or @clients.has_value?(item)\n end",
"def known_client?(client_public_key)\n encoded_key = z85_encode(client_public_key)\n if @known_clients.empty?\n store_known_client(encoded_key)\n true\n else\n @known_clients.include?(encoded_key)\n end\n end",
"def is_client?\n client ? true : false\n end",
"def has_client_id?\n client_id && (client_id != '*')\n end",
"def client?\n roles.count == 1 && has_role?('client')\n end",
"def client_event_exists?(client_name, check_name)\n path = \"/events/#{client_name}/#{check_name}\"\n response = sensu_api_get_request(path)\n response.code.to_i == 200\n end",
"def client_event_exists?(client_name, check_name)\n path = \"/events/#{client_name}/#{check_name}\"\n response = sensu_api_get_request(path)\n response.code.to_i == 200\n end",
"def exist?(name, client = Client.new)\n client.droplets.all.any? { |x| x.name == name }\n end",
"def include?(client)\n @clients[client.identity].checked_in?\n end",
"def subclients?\n subclients.any?\n end",
"def client?\n role == 'client'\n end",
"def check_client\n unless client\n raise ClientNotSetup\n end\n unless client.connected?\n if raise_on_error\n raise ClientNotConnected\n else\n @logger.error 'Client not connected! Check internet connection'\n return false\n end\n end\n true\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the conversationId property value. Unique ID of the conversation. Readonly. | def conversation_id
return @conversation_id
end | [
"def conversation_id\n return @conversation_id\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_thread_id\n return @conversation_thread_id\n end",
"def conversation_id=(conversationId)\n @conversation_id = conversationId\n end",
"def chat_id\n return @chat_id\n end",
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def message_id\n return @message_id\n end",
"def message_id\n @message_id ||= message.message_id\n end",
"def meeting_chat_id\n return @meeting_chat_id\n end",
"def conversation_identity_type\n return @conversation_identity_type\n end",
"def conversation\n @_conversation ||=\n site.conversations.build(id: params[:conversation_id] || params[:id])\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def communication_service_id\n return @communication_service_id\n end",
"def inbox_recipient_id\n @attributes[:inbox_recipient_id]\n end",
"def fetch_conversation(conversation_id)\r\n self.conversations.fetch(conversation_id.to_s)\r\n end",
"def message_id\n headers['Message-Id'] || msg['_id'] || self['_id']\n end",
"def conversation_member_user\n return @conversation_member_user\n end",
"def conference_id\n return @conference_id\n end",
"def conversation_index\n return @conversation_index\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the conversationId property value. Unique ID of the conversation. Readonly. | def conversation_id=(value)
@conversation_id = value
end | [
"def conversation_id=(conversationId)\n @conversation_id = conversationId\n end",
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def conversation_id\n return @conversation_id\n end",
"def conversation_id\n return @conversation_id\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def meeting_chat_id=(value)\n @meeting_chat_id = value\n end",
"def conversation\n @_conversation ||=\n site.conversations.build(id: params[:conversation_id] || params[:id])\n end",
"def update_conversation(id, data)\r\n @client.raw('put', \"/content/conversations/#{id}\", nil, data_transform(data), @contact_v1_url)\r\n end",
"def update_conversation!(conversation_id, params = {})\n cleaned = params.permit(:assignee_id, :inbox_id, :status, :tags)\n update(\"conversations/#{conversation_id}\", cleaned)\n end",
"def reply_to_id=(value)\n @reply_to_id = value\n end",
"def update_conversation(id, data)\n put(\"conversations/#{id}\", { body: data })\n end",
"def message_id=(value)\n @message_id = value\n end",
"def conversation_index=(conversationIndex)\n @conversation_index = conversationIndex\n end",
"def conversation_index=(value)\n @conversation_index = value\n end",
"def setParticipantId(participantId)\r\n\t\t\t\t\t@participantId = participantId\r\n\t\t\t\tend",
"def conference_id=(value)\n @conference_id = value\n end",
"def communication_service_id=(value)\n @communication_service_id = value\n end",
"def recipient_id=(recipient_id)\n @recipient_ids = [recipient_id]\n end",
"def delete_conversation(id)\n @client.raw('delete', \"/content/conversations/#{id}\")\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the conversationThreadId property value. Unique ID of the conversation thread. Readonly. | def conversation_thread_id
return @conversation_thread_id
end | [
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def conversation_id\n return @conversation_id\n end",
"def conversation_id\n return @conversation_id\n end",
"def thread_id\n return @thread_id\n end",
"def thread_id\n check_connection\n @protocol.thread_id\n end",
"def get_thread id\n @threads[id]\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def thread_id\n WinUser.GetWindowThreadProcessId(hwnd, nil)\n end",
"def thread_id\n Thread.current[:id] || nil\n end",
"def thread_id\n return nil unless is_window?\n GetWindowThreadProcessId(handle, nil)\n end",
"def chat_id\n return @chat_id\n end",
"def current_user_id\n thread.user_id\n end",
"def thread_id=(value)\n @thread_id = value\n end",
"def conversation_identity_type\n return @conversation_identity_type\n end",
"def thread_id\n\n\t\t::Pantheios::Core.thread_id\n\tend",
"def conversation_member_user\n return @conversation_member_user\n end",
"def meeting_chat_id\n return @meeting_chat_id\n end",
"def inbox_recipient_id\n @attributes[:inbox_recipient_id]\n end",
"def reply_to_id\n return @reply_to_id\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the conversationThreadId property value. Unique ID of the conversation thread. Readonly. | def conversation_thread_id=(value)
@conversation_thread_id = value
end | [
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_thread_id\n return @conversation_thread_id\n end",
"def thread_id=(value)\n @thread_id = value\n end",
"def conversation_id=(conversationId)\n @conversation_id = conversationId\n end",
"def set_thread_id\n if read_attribute(:thread_id).nil? \n write_attribute(:thread_id, self.id)\n save!\n end\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def set_thread_context(thread_id)\n raise \"nyi\"\n end",
"def crash_thread_id=(value)\n if value == @defaults['crashThreadId']\n @values.delete 'crashThreadId' if @values.key? 'crashThreadId'\n else\n @values['crashThreadId'] = value\n end\n end",
"def conversation_id\n return @conversation_id\n end",
"def meeting_chat_id=(value)\n @meeting_chat_id = value\n end",
"def conversation_id\n return @conversation_id\n end",
"def reply_to_id=(value)\n @reply_to_id = value\n end",
"def set_Thread(value)\n set_input(\"Thread\", value)\n end",
"def thread=(s)\n delete_elements('thread')\n replace_element_text('thread', s) unless s.nil?\n end",
"def set_thread_for_replies\n self.thread = self.commentable.thread if self.reply_comment?\n end",
"def conversation_member_user=(value)\n @conversation_member_user = value\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the from property value. The from property | def from=(value)
@from = value
end | [
"def from=(value)\n @from = value\n end",
"def from= (v)\n add_attribute('from', v.to_s)\n end",
"def set_FromPath(value)\n set_input(\"FromPath\", value)\n end",
"def set_FromPath(value)\n set_input(\"FromPath\", value)\n end",
"def from_term=(value)\n @from_term = value\n end",
"def set_FromCopyRef(value)\n set_input(\"FromCopyRef\", value)\n end",
"def from(value)\n @expected_initial_value = value\n\n self\n end",
"def set_FromAddress(value)\n set_input(\"FromAddress\", value)\n end",
"def mbox_from=(value)\n @mbox_from = value\n end",
"def set_FromDate(value)\n set_input(\"FromDate\", value)\n end",
"def property(property_name, options = {})\n super\n\n if options[:from]\n if property_name == options[:from]\n fail ArgumentError, \"Property name (#{property_name}) and :from option must not be the same\"\n end\n\n translations_hash[options[:from]] ||= {}\n translations_hash[options[:from]][property_name] = options[:with] || options[:transform_with]\n\n define_method \"#{options[:from]}=\" do |val|\n self.class.translations_hash[options[:from]].each do |name, with|\n self[name] = with.respond_to?(:call) ? with.call(val) : val\n end\n end\n else\n if options[:transform_with].respond_to? :call\n transforms[property_name] = options[:transform_with]\n end\n end\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def from=(location)\n if location then\n self.from_container = location.container\n self.from_row = location.row\n self.from_column = location.column\n end\n location\n end",
"def set_FromUserID(value)\n set_input(\"FromUserID\", value)\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def inherited_from=(value)\n @inherited_from = value\n end",
"def from=( *strs )\n set_string_array_attr 'From', strs\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def setter=(property)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the inReplyTo property value. Readonly. Supports $expand. | def in_reply_to
return @in_reply_to
end | [
"def in_reply_to\n envelope[:in_reply_to]\n end",
"def in_reply_to\n headers['In-Reply-To']\n end",
"def reply_to\n address_list_fetch('reply-to')\n end",
"def in_reply_to=(value)\n @in_reply_to = value\n end",
"def set_InReplyTo(value)\n set_input(\"InReplyTo\", value)\n end",
"def reply_to_email_address\n @reply_to_email_address\n end",
"def reply_to\n return @reply_to\n end",
"def reply_to\n address_impl = @message_impl.getReplyTo\n # only return an address if a reply to was specified\n Qpid::Messaging::Address.new(nil, address_impl) if address_impl\n end",
"def jms_reply_to\n getJMSReplyTo\n end",
"def reply_to_id\n return @reply_to_id\n end",
"def reply_address\n @reply_address ||= ReplyAddress.decode(recipient_param)\n end",
"def reply_to\n email_message.reply_to.map(&:to_s)\n end",
"def reply_to\n (envelope[:reply_to] || []).map{|a| address(a)}\n end",
"def external_reply_message\n return @external_reply_message\n end",
"def inbox_recipient_id\n @attributes[:inbox_recipient_id]\n end",
"def reply_to\n Stacked::User.find(reply_to_user_id) if reply_to_user_id\n end",
"def reply_to=(value)\n @reply_to = value\n end",
"def reply_to(value = nil)\n if value.nil?\n @reply_to\n else\n @reply_to = value\n end\n end",
"def replying_to\n return nil unless self.reply?\n user = self.text[0...self.text.index(\" \")]\n return nil unless user[0...1] == \"@\"\n user\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the inReplyTo property value. Readonly. Supports $expand. | def in_reply_to=(value)
@in_reply_to = value
end | [
"def set_InReplyTo(value)\n set_input(\"InReplyTo\", value)\n end",
"def in_reply_to=( *idstrs )\n set_string_array_attr 'In-Reply-To', idstrs\n end",
"def reply_to=(value)\n @reply_to = value\n end",
"def reply_to=(replyTo)\n @reply_to = replyTo\n end",
"def reply_to_id=(value)\n @reply_to_id = value\n end",
"def in_reply_to\n envelope[:in_reply_to]\n end",
"def reply_to=(address)\n address = Qpid::Messaging::Address.new \"#{address}\" if !address.is_a? Qpid::Messaging::Address\n\n @message_impl.setReplyTo address.address_impl\n end",
"def reply_to(value = nil)\n if value.nil?\n @reply_to\n else\n @reply_to = value\n end\n end",
"def jms_reply_to=(reply_to)\n setJMSReplyTo(reply_to)\n end",
"def reply_to=(address)\n Cproton.pn_message_set_reply_to(@impl, address)\n end",
"def reply_to=( *strs )\n set_string_array_attr 'Reply-To', strs\n end",
"def set_ReplyEmail(value)\n set_input(\"ReplyEmail\", value)\n end",
"def in_reply_to\n return @in_reply_to\n end",
"def in_reply_to\n headers['In-Reply-To']\n end",
"def reply_to\n address_list_fetch('reply-to')\n end",
"def enable_inbox_replies\n client.post('/api/sendreplies', id: read_attribute(:name), state: true)\n end",
"def set_ReplyID(value)\n set_input(\"ReplyID\", value)\n end",
"def reply_to_email_address\n @reply_to_email_address\n end",
"def reply_to=(addresses)\n address_list_assign('Reply-To', addresses)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the newParticipants property value. Conversation participants that were added to the thread as part of this post. | def new_participants
return @new_participants
end | [
"def new_participants=(value)\n @new_participants = value\n end",
"def participants_added # :nodoc:\n @properties[ADDED].map { |id| @context.users[id] }\n end",
"def getParticipants\r\n\t\t\t\t\treturn @participants\r\n\t\t\t\tend",
"def participants\n return @participants\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end",
"def participants\n return @participants\n end",
"def participants=(value)\n @participants = value\n end",
"def participants=(value)\n @participants = value\n end",
"def participants\n\n get_participant_map.participants\n end",
"def get_conversation_participants(id)\n @client.raw('get', \"/content/conversations/#{id}/participants\")\n end",
"def get_conversation_participants(id)\r\n #TODO: Test if this method needs data in options.\r\n @client.raw('get', \"/content/conversations/#{id}/participants\", nil, nil, @contact_v1_url)\r\n end",
"def total_participant_count\n return @total_participant_count\n end",
"def participants_removed # :nodoc:\n @properties[REMOVED].map { |id| @context.users[id] }\n end",
"def my_participant_id\n return @my_participant_id\n end",
"def team_participants\n [red_participants, blue_participants]\n end",
"def participant_id\n return @participant_id\n end",
"def getParticipantId\r\n\t\t\t\t\treturn @participantId\r\n\t\t\t\tend",
"def observed_participant_id\n return @observed_participant_id\n end",
"def other_participants_than(user)\n relation = Userconversation.where(:conversation_id => self.id.to_s)\n participants = Array.new\n relation.each do |rel|\n if user != rel.user\n participants << rel.user\n end\n end\n return participants\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the newParticipants property value. Conversation participants that were added to the thread as part of this post. | def new_participants=(value)
@new_participants = value
end | [
"def setParticipants(participants)\r\n\t\t\t\t\t@participants = participants\r\n\t\t\t\tend",
"def participants=(value)\n @participants = value\n end",
"def participants=(value)\n @participants = value\n end",
"def new_participants\n return @new_participants\n end",
"def add_participant\n user = self.load_user(params)\n meeting = self.load_meeting(params)\n participant_ids = params[\"participant_ids\"]\n comment = params[\"comment\"].nil? ? \"\" : params[\"comment\"]\n\n if user != nil and meeting != nil and participant_ids.length > 0\n participant_ids.each do |participant_id|\n unless meeting.participants.exists?(participant_id)\n new_participant = User.find(participant_id)\n meeting.participants << new_participant\n # add default vote for the new added participant to each suggestion\n meeting.suggestions.each do |suggestion|\n suggestion.votes << Vote.new(:voter => new_participant, :decision => \"?\")\n end\n\n NotificationService.send_meeting_invitation(user, new_participant, meeting, comment)\n end\n end\n self.send_ok\n else\n self.send_error 401\n end\n end",
"def add_participant(_user_ids, _current_user = nil)\n update(new_members: _user_ids.is_a?(Array) ? _user_ids : [_user_ids], updated_by: _current_user)\n end",
"def participant=(participant)\n ppl = self_link\n if ppl\n ppl.participant = participant\n else\n participant_person_links.build(:relationship_code => 1, :person => self, :participant => participant, :psu => self.psu)\n end\n end",
"def add_participants_to_global_competition\n\t\tusers = User.where({:in_grand_competition=>true})\n\t\t\n\t\tinvitation_count = 0\n\t\tusers.each do |user|\n\t\t\tCompetitionParticipant.add_participant(user.id, self.id)\n\t\t\tinvitation_count += 1\n\t\tend\n\t\tAppMailer.global_race_admin_notify(self.id, users.length, invitation_count).deliver\n\t\t\n\t\treturn \"#{self.name} created. #{invitation_count}/#{users.length} users invited.\"\n\tend",
"def conversations=(value)\n @conversations = value\n end",
"def setParticipantId(participantId)\r\n\t\t\t\t\t@participantId = participantId\r\n\t\t\t\tend",
"def my_participant_id=(value)\n @my_participant_id = value\n end",
"def add_conversation_followers!(conversation_id, params = {})\n cleaned = params.permit(:teammate_ids)\n create_without_response(\"conversations/#{conversation_id}/followers\", cleaned)\n end",
"def update_participant(discussion_id, username, properties={}, headers=default_jwt_headers)\n @logger.info(\"Updating the #{username} Participant in the #{discussion_id} Discussion\")\n put(\"#{@api_url}/discussions/#{discussion_id}/participants/#{encode(username)}\", properties, headers)\n end",
"def setParticipantPerson(participantPerson)\r\n\t\t\t\t\t@participantPerson = participantPerson\r\n\t\t\t\tend",
"def participant=(part)\n @participant = part\n end",
"def change_preference_list\n\t\tcheck_if_myself\n\t\tuser = current_user\n\t\tnew_preference_list = user_pref_list_params[:preference_list]\n\t\tif new_preference_list != user.preference_list\n\t\t\tuser.update_attributes(user_pref_list_params)\n\t\t\tuser.save\n\t\t\tRecomputeMeetingParticipationsJob.perform_later (0..6).to_a, user\n\t\tend\n\t\tredirect_to settings_page_path\n\tend",
"def participants_added # :nodoc:\n @properties[ADDED].map { |id| @context.users[id] }\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Gets the receivedDateTime property value. Specifies when the post was received. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 20140101T00:00:00Z | def received_date_time
return @received_date_time
end | [
"def received_date_time\n return @received_date_time\n end",
"def message_received_date_time\n return @message_received_date_time\n end",
"def received_at\n request_datetime\n end",
"def received_at\n Time.parse(params['payment_date'])\n end",
"def received_date\n @received_at.getlocal('-05:00').strftime('%Y-%m-%d-05:00')\n end",
"def received_at\n Time.now\n end",
"def received_date_time=(receivedDateTime)\n @received_date_time = receivedDateTime\n end",
"def received_at\n Time.parse params['payment_date']\n end",
"def received_date_time=(value)\n @received_date_time = value\n end",
"def message_received_date_time=(value)\n @message_received_date_time = value\n end",
"def received_at\n\tTime.parse params['payment_date']\n end",
"def received_at\n return nil unless (temp_extended_received_at = read_attribute(:received_at))\n temp_received_at1 = encrypt_remove_pre_and_postfix(temp_extended_received_at, 'received_at', 5)\n temp_received_at2 = YAML::load(temp_received_at1)\n temp_received_at2 = temp_received_at2.to_time if temp_received_at2.class.name == 'Date'\n temp_received_at2\n end",
"def result\n DateTime.parse(value.sub(/^datetime-/, '')).utc\n end",
"def timestamp\n Time.parse(message.date.to_s).utc\n end",
"def posted_at\n @posted_at ||= Time.parse(JSON.load(self.data)['created_at'])\n end",
"def datetime\n if self.observed_on\n if self.time_observed_at\n Time.mktime(self.observed_on.year, \n self.observed_on.month, \n self.observed_on.day, \n self.time_observed_at.hour, \n self.time_observed_at.min, \n self.time_observed_at.sec, \n self.time_observed_at.zone)\n else\n Time.mktime(self.observed_on.year, \n self.observed_on.month, \n self.observed_on.day)\n end\n end\n end",
"def observed_utc_offset\n @offset.observed_utc_offset\n end",
"def created_date_time_utc\n return @created_date_time_utc\n end",
"def returned_date_time\n return @returned_date_time\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets the receivedDateTime property value. Specifies when the post was received. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 20140101T00:00:00Z | def received_date_time=(value)
@received_date_time = value
end | [
"def received_date_time=(receivedDateTime)\n @received_date_time = receivedDateTime\n end",
"def message_received_date_time=(value)\n @message_received_date_time = value\n end",
"def sent_date_time=(sentDateTime)\n @sent_date_time = sentDateTime\n end",
"def received_date_time\n return @received_date_time\n end",
"def received_date_time\n return @received_date_time\n end",
"def created_date_time_utc=(value)\n @created_date_time_utc = value\n end",
"def sent_date_time=(value)\n @sent_date_time = value\n end",
"def received_at\n request_datetime\n end",
"def received_date\n @received_at.getlocal('-05:00').strftime('%Y-%m-%d-05:00')\n end",
"def stamp\n @options[:last_received_time] = Time.now\n self\n end",
"def received_at\n Time.parse(params['payment_date'])\n end",
"def sender_date_time=(value)\n @sender_date_time = value\n end",
"def received_at\n Time.parse params['payment_date']\n end",
"def created_date_time=(createdDateTime)\n @created_date_time = createdDateTime\n end",
"def received_at\n\tTime.parse params['payment_date']\n end",
"def acknowledged_date_time=(value)\n @acknowledged_date_time = value\n end",
"def received_at\n Time.now\n end",
"def message_received_date_time\n return @message_received_date_time\n end",
"def request_date_time=(value)\n @request_date_time = value\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Config pagination header manually | def set_pagination_header(name, options = {})
scope = instance_variable_get("@#{name}")
request_params = request.query_parameters
url_without_params = request.original_url.slice(0..(request.original_url.index("?")-1)) unless request_params.empty?
url_without_params ||= request.original_url
page = {}
page[:first] = 1 if scope.total_pages > 1 && !scope.first_page?
page[:last] = scope.total_pages if scope.total_pages > 1 && !scope.last_page?
page[:next] = scope.current_page + 1 unless scope.last_page?
page[:prev] = scope.current_page - 1 unless scope.first_page?
pagination_links = []
page.each do |k, v|
new_request_hash= request_params.merge({ page: v })
pagination_links << "<#{url_without_params}?#{new_request_hash.to_param}>; rel=\"#{k}\""
end
headers['Link'] = pagination_links.join(', ')
end | [
"def set_pagination_header(resource,resource_name)\n #print current page\n headers[\"x-page\"] = page\n #print records per page\n headers[\"x-per-page\"] = per_page\n #print total records\n headers[\"x-total\"] = resource.total_count\n #print next page url\n headers[\"next_page\"] = eval \"api_v1_#{resource_name}_url(request.query_parameters.merge(page: resource.next_page))\" if resource.next_page\n #print prev page url\n headers[\"prev_page\"] = eval \"api_v1_#{resource_name}_url(request.query_parameters.merge(page: resource.next_page))\" if resource.prev_page\n end",
"def set_pagination_headers\n headers[\"Access-Control-Expose-Headers\"] = \"Link, Page, Per-Page\"\n headers[\"Page\"] = current_page\n headers[\"Per-Page\"] = current_per_page\n end",
"def pagination_header(collection)\n ['<div class=\"pagination\">',\n if collection.total_pages < 2\n case collection.size\n when 0; _(\"No items found\")\n when 1; _(\"Displaying <b>1</b> item\")\n else; _(\"Displaying <b>all %d</b> items\") % [collection.size]\n end\n else\n _('Displaying items <b>%d–%d</b> of <b>%d</b> in total') %\n [ collection.offset + 1,\n collection.offset + collection.length,\n collection.total_entries ]\n end,\n '</div>' ].join ''\n end",
"def pagination_headers\n self.class.const_get(:PAGINATION_HEADERS)\n end",
"def headers(pagination, httpize = false)\n keys = httpize_keys(httpize)\n head = {\n keys[:page] => pagination.page.to_s,\n keys[:limit] => pagination.limit.to_s\n }\n head[keys[:total]] = pagination.total.to_s unless pagination.total.nil?\n head[keys[:pages]] = pagination.pages.to_s unless pagination.pages.nil?\n head\n end",
"def page_header\n params_to_title( true )\n end",
"def pagination(items_count, default_per_page: 20,\n maxium_per_page: 100,\n set_header: true)\n items_count = items_count.count if items_count.respond_to? :count\n\n @pagination_per_page = (params[:per_page] || default_per_page).to_i\n @pagination_per_page = maxium_per_page if @pagination_per_page > maxium_per_page\n @pagination_per_page = 1 if @pagination_per_page < 1\n\n items_count = 0 if items_count < 0\n pages_count = (items_count.to_f / @pagination_per_page).ceil\n pages_count = 1 if pages_count < 1\n\n @pagination_items_count = items_count\n @pagination_pages_count = pages_count\n\n @pagination_page = (params[:page] || 1).to_i\n @pagination_page = pages_count if @pagination_page > pages_count\n @pagination_page = 1 if @pagination_page < 1\n\n if current_page > 1\n @pagination_first_page_url = add_or_replace_uri_param(request.url, :page, 1)\n @pagination_prev_page_url = add_or_replace_uri_param(request.url, :page, (current_page > pages_count ? pages_count : current_page - 1))\n end\n\n if current_page < pages_count\n @pagination_next_page_url = add_or_replace_uri_param(request.url, :page, current_page + 1)\n @pagination_last_page_url = add_or_replace_uri_param(request.url, :page, pages_count)\n end\n\n if set_header\n link_headers ||= []\n\n if current_page > 1\n link_headers << \"<#{@pagination_first_page_url}>; rel=\\\"first\\\"\" if @pagination_first_page_url\n link_headers << \"<#{@pagination_prev_page_url}>; rel=\\\"prev\\\"\" if @pagination_prev_page_url\n end\n\n if current_page < pages_count\n link_headers << \"<#{@pagination_next_page_url}>; rel=\\\"next\\\"\" if @pagination_next_page_url\n link_headers << \"<#{@pagination_last_page_url}>; rel=\\\"last\\\"\" if @pagination_last_page_url\n end\n\n link_header = link_headers.join(', ')\n\n if self.respond_to?(:header)\n self.header('Link', link_header)\n self.header('X-Items-Count', items_count.to_s)\n self.header('X-Pages-Count', pages_count.to_s)\n end\n\n if defined?(response) && response.respond_to?(:headers)\n response.headers['Link'] = link_header\n response.headers['X-Items-Count'] = items_count.to_s\n response.headers['X-Pages-Count'] = pages_count.to_s\n end\n end\n end",
"def pagination=(count)\n settings.pagination = count\n end",
"def paginate(resp)\n present resp\n header 'X-Pagination', {\n next: params[:page] + 1,\n previous: params[:page] - 1,\n current: params[:page],\n per_page: params[:per_page],\n count: resp.to_a.count,\n total: total,\n pages: (total / [total, params[:per_page]].min).ceil\n }.to_json\n end",
"def pagination_param\n Kaminari.config.param_name\n end",
"def index\n @cust_iti_headers = CustItiHeader.paginate(:page => params[:page])\n end",
"def paginate!\n content_for :rails_metrics_footlinks do\n content_tag(:p, [previous_link, pagination_info, next_link].join(\" | \"),nil,false) <<\n content_tag(:p, \"Show per page: #{show_per_page([10, 25, 50, 100])}\",nil,false)\n end\n end",
"def index\n @dest_iti_headers = DestItiHeader.paginate(:page => params[:page])\n end",
"def pagination\n element_text 'Pagination'\n end",
"def paginate_with_headers?\n false\n end",
"def pagination\n {}.tap do |h|\n h[:next_page_params] = next_page_params if next_page?\n h[:prev_page_params] = prev_page_params if prev_page?\n end\n end",
"def add_page_numbering\n numbering_options = {\n at: [@pdf.bounds.right - 150, 0-@format[:footer_size]],\n width: 150,\n align: :right,\n start_count_at: 1,\n color: \"999999\",\n size: 8\n }\n @pdf.number_pages t('paging'), numbering_options\n end",
"def paginator=(_arg0); end",
"def pagination_method\n defined?(Kaminari) ? Kaminari.config.page_method_name : :page\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /character/geo_positions/1 GET /character/geo_positions/1.json | def show
@character_geo_position = Character::GeoPosition.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @character_geo_position }
end
end | [
"def new\n @character_geo_position = Character::GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def index\n @fundamental_character_positions = Fundamental::CharacterPosition.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @fundamental_character_positions }\n end\n end",
"def show\n @fundamental_character_position = Fundamental::CharacterPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fundamental_character_position }\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def show\n @map_position = MapPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @map_position }\n end\n end",
"def index\n @character_locations = CharacterLocation.all\n end",
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @positions = Position.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @positions }\n end\n end",
"def update\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n if @character_geo_position.update_attributes(params[:character_geo_position])\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @geo_tag = GeoTag.find(params[:id])\n\n respond_to do |format|\n format.json { render :json => @geo_tag }\n end\n end",
"def get_geo_response\n get_data_from(geo_response)\n end",
"def locations\n customer = Customer.find_by(id: params[:id], company: current_company)\n locations = customer.locations\n render json: { locations: locations }\n end",
"def get_location\n as_json(get_results('/locations.json'))\n end",
"def get_city_latlng\n \n city = Place.find(params[:id])\n latlng = {lat: city.lat.to_f, lng: city.lng.to_f}\n respond_to do |format|\n format.json { render json: latlng }\n end\n \n end",
"def show\n @unko_position = UnkoPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @unko_position }\n end\n end",
"def get_location\n JSON.parse(get_results('/locations.json'), {symbolize_names: true})\n end",
"def stops_by_position\n get '/gtfs/stops/geosearch/'\n end",
"def show\n @position = Position.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\n @geo_position = GeoPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @geo_position }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /character/geo_positions/new GET /character/geo_positions/new.json | def new
@character_geo_position = Character::GeoPosition.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @character_geo_position }
end
end | [
"def new\n @map_position = MapPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map_position }\n end\n end",
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @position = Position.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position }\n end\n end",
"def new\n @geolocation = Geolocation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @geolocation }\n end\n end",
"def new\n @geoloc = Geoloc.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @geoloc }\n end\n end",
"def new\n @position_mapper = PositionMapper.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position_mapper }\n end\n end",
"def new\n @geo = Geo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @geo }\n end\n end",
"def new\n @position_type = PositionType.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position_type }\n end\n end",
"def new\n @positionlevel = Positionlevel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @positionlevel }\n end\n end",
"def new\n @geo_position = GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @geo_position }\n end\n end",
"def new\n @fundamental_character_position = Fundamental::CharacterPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fundamental_character_position }\n end\n end",
"def new\n @text_position = TextPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @text_position }\n end\n end",
"def new\n @user_position = UserPosition.new\n @positions = Position.all\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user_position }\n end\n end",
"def new\n @loc = current_user.locs.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @loc }\n end\n end",
"def new\n @points_map = PointsMap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @points_map }\n end\n end",
"def new\n @position_number = PositionNumber.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position_number }\n end\n end",
"def new\n \t@internship_position = InternshipPosition.new\n\n \trespond_to do |format|\n \t\tformat.html #new.html.erb\n \t\tformat.json { render json: @internship_position }\n \tend\n end",
"def new\n @crew_position = CrewPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @crew_position }\n end\n end",
"def create\n @map_position = MapPosition.new(params[:map_position])\n\n respond_to do |format|\n if @map_position.save\n format.html { redirect_to @map_position, notice: 'Map position was successfully created.' }\n format.json { render json: @map_position, status: :created, location: @map_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @map_position.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /character/geo_positions POST /character/geo_positions.json | def create
@character_geo_position = Character::GeoPosition.new(params[:character_geo_position])
respond_to do |format|
if @character_geo_position.save
format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }
format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }
else
format.html { render action: "new" }
format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n @fundamental_character_position = Fundamental::CharacterPosition.new(params[:fundamental_character_position])\n\n respond_to do |format|\n if @fundamental_character_position.save\n format.html { redirect_to @fundamental_character_position, notice: 'Character position was successfully created.' }\n format.json { render json: @fundamental_character_position, status: :created, location: @fundamental_character_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fundamental_character_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geo_position = GeoPosition.new(params[:geo_position])\n\n respond_to do |format|\n if @geo_position.save\n flash[:notice] = 'GeoPosition was successfully created.'\n format.html { redirect_to(@geo_position) }\n format.xml { render :xml => @geo_position, :status => :created, :location => @geo_position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @geo_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @map_position = MapPosition.new(params[:map_position])\n\n respond_to do |format|\n if @map_position.save\n format.html { redirect_to @map_position, notice: 'Map position was successfully created.' }\n format.json { render json: @map_position, status: :created, location: @map_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @map_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @map_position = MapPosition.new(map_position_params)\n\n respond_to do |format|\n if @map_position.save\n format.html { redirect_to @map_position, notice: 'Map position was successfully created.' }\n format.json { render :show, status: :created, location: @map_position }\n else\n format.html { render :new }\n format.json { render json: @map_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def correct_geo_coords\n coords = params.seek :geo, :geojson, :geometry, :coordinates\n if coords\n array = GeosHelper.geo_coords_to_array(coords)\n params[:geo][:geojson][:geometry][:coordinates] = array\n end\n end",
"def create\n position = Position.new(position_params)\n\n if position.save\n render json: position, status: :created\n else\n render json: position.errors, status: :unprocessable_entry\n end\n\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to positions_path, notice: 'Position was successfully created.' }\n format.json { render json: positions_path, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @character_geo_position = Character::GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def create\n @position = Position.new(params[:position])\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n if @character_geo_position.update_attributes(params[:character_geo_position])\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geo_attribute = GeoAttribute.new(geo_attribute_params)\n\n if @geo_attribute.save\n render :show, status: :created, location: @geo_attribute\n else\n render json: @geo_attribute.errors, status: :unprocessable_entity\n end\n end",
"def create\n @character_location = CharacterLocation.new(character_location_params)\n\n respond_to do |format|\n if @character_location.save\n format.html { redirect_to @character_location, notice: \"Character location was successfully created.\" }\n format.json { render :show, status: :created, location: @character_location }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @character_location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def create\n @text_position = TextPosition.new(params[:text_position])\n\n respond_to do |format|\n if @text_position.save\n format.html { redirect_to @text_position, notice: 'Text position was successfully created.' }\n format.json { render json: @text_position, status: :created, location: @text_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @text_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geoloc = Geoloc.new(params[:geoloc])\n\n respond_to do |format|\n if @geoloc.save\n format.html { redirect_to @geoloc, notice: 'Geoloc was successfully created.' }\n format.json { render json: @geoloc, status: :created, location: @geoloc }\n else\n format.html { render action: \"new\" }\n format.json { render json: @geoloc.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @crew_position = CrewPosition.new(params[:crew_position])\n\n respond_to do |format|\n if @crew_position.save\n format.html { redirect_to @crew_position, :notice => 'Crew position was successfully created.' }\n format.json { render :json => @crew_position, :status => :created, :location => @crew_position }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @crew_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n \t@internship_position = InternshipPosition.new(internship_position_params)\n\n \trespond_to do |format|\n \t\tif @internship_position.save\n \t\t\tformat.html { redirect_to @internship_position, notice: 'Position was successfully created'}\n \t\t\tformat.json { render json: @internship_position, status: :created, location: @internship_position }\n \t\telse\n \t\t\tformat.html { render action: \"new\" }\n \t\t\tformat.json { render json: @internship_position.errors, status: :unprocessable_entity }\n \t\tend\n \tend\n end",
"def create\n @user_position = UserPosition.new(params[:user_position])\n\n respond_to do |format|\n if @user_position.save\n format.html { redirect_to @user_position, notice: 'User position was successfully created.' }\n format.json { render json: @user_position, status: :created, location: @user_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user_position.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /character/geo_positions/1 PUT /character/geo_positions/1.json | def update
@character_geo_position = Character::GeoPosition.find(params[:id])
respond_to do |format|
if @character_geo_position.update_attributes(params[:character_geo_position])
format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @fundamental_character_position = Fundamental::CharacterPosition.find(params[:id])\n\n respond_to do |format|\n if @fundamental_character_position.update_attributes(params[:fundamental_character_position])\n format.html { redirect_to @fundamental_character_position, notice: 'Character position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fundamental_character_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @geo_position = GeoPosition.find(params[:id])\n\n respond_to do |format|\n if @geo_position.update_attributes(params[:geo_position])\n flash[:notice] = 'GeoPosition was successfully updated.'\n format.html { redirect_to(@geo_position) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @geo_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character_location.update(character_location_params)\n format.html { redirect_to @character_location, notice: \"Character location was successfully updated.\" }\n format.json { render :show, status: :ok, location: @character_location }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @character_location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @map_position.update(map_position_params)\n format.html { redirect_to @map_position, notice: 'Map position was successfully updated.' }\n format.json { render :show, status: :ok, location: @map_position }\n else\n format.html { render :edit }\n format.json { render json: @map_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @map_position = MapPosition.find(params[:id])\n\n respond_to do |format|\n if @map_position.update_attributes(params[:map_position])\n format.html { redirect_to @map_position, notice: 'Map position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @map_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def change_position\n supplemental_material = SupplementalMaterial.find params[:id]\n supplemental_material.insert_at params[:newPos].to_i\n render json: { status: :ok }\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def update\n respond_to do |format|\n if @spatial.update(spatial_params)\n format.html { redirect_to @spatial, notice: 'Spatial was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @spatial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @geo_attribute.update(geo_attribute_params)\n render :show, status: :ok, location: @geo_attribute\n else\n render json: @geo_attribute.errors, status: :unprocessable_entity\n end\n end",
"def update\n @position = Position.find(params[:id])\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @position = Position.find(params[:id])\n\n respond_to do |format|\n if @position.update_attributes(params[:position].permit(:equipe_type_id, :name))\n format.html { redirect_to positions_path, notice: I18n.t(:general_update_success) }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @unko_position = UnkoPosition.find(params[:id])\n\n respond_to do |format|\n if @unko_position.update_attributes(params[:unko_position])\n format.html { redirect_to @unko_position, notice: 'Unko position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @unko_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @position = Position.find(params[:id])\n\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def correct_geo_coords\n coords = params.seek :geo, :geojson, :geometry, :coordinates\n if coords\n array = GeosHelper.geo_coords_to_array(coords)\n params[:geo][:geojson][:geometry][:coordinates] = array\n end\n end",
"def update\n respond_to do |format|\n if @open_position.update(open_position_params)\n format.html { redirect_to @open_position, notice: 'Open position was successfully updated.' }\n format.json { render :show, status: :ok, location: @open_position }\n else\n format.html { render :edit }\n format.json { render json: @open_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @character.update(character_params)\n render json: @character, status: 201, location: @character\n else\n render json: @character.errors, status: :unprocessable_entity\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /character/geo_positions/1 DELETE /character/geo_positions/1.json | def destroy
@character_geo_position = Character::GeoPosition.find(params[:id])
@character_geo_position.destroy
respond_to do |format|
format.html { redirect_to character_geo_positions_url }
format.json { head :ok }
end
end | [
"def destroy\n @geo = Geo.find(params[:id])\n @geo.destroy\n\n respond_to do |format|\n format.html { redirect_to geos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geolocation.destroy\n respond_to do |format|\n format.html { redirect_to geolocations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geoloc = Geoloc.find(params[:id])\n @geoloc.destroy\n\n respond_to do |format|\n format.html { redirect_to geolocs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo_position = GeoPosition.find(params[:id])\n @geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to(geo_positions_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @ogrgeojson.destroy\n respond_to do |format|\n format.html { redirect_to ogrgeojsons_url, notice: 'Ogrgeojson was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @georeference.destroy\n respond_to do |format|\n format.html { redirect_to georeferences_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo.destroy\n respond_to do |format|\n format.html { redirect_to geos_url, notice: 'Geo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @spatial.destroy\n respond_to do |format|\n format.html { redirect_to spatials_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @post_geo = current_user.post_geos.find(params[:id])\n @post_geo.destroy\n\n respond_to do |format|\n format.html { redirect_to [:client, :post_geos] }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo_entry.destroy\n respond_to do |format|\n format.html { redirect_to geo_entries_url, notice: 'Geo entry was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n Position.delete(params[:id])\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo = Geo.find(params[:id])\n @geo.destroy\n\n respond_to do |format|\n format.html { redirect_to(geos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @fundamental_character_position = Fundamental::CharacterPosition.find(params[:id])\n @fundamental_character_position.destroy\n\n respond_to do |format|\n format.html { redirect_to fundamental_character_positions_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @geo_datum.destroy\n respond_to do |format|\n format.html { redirect_to geo_data_url, notice: 'Geo datum was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo_coordinate = GeoCoordinate.find(params[:id])\n @geo_coordinate.destroy\n\n respond_to do |format|\n format.html { redirect_to(geo_coordinates_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @georef = Georef.find(params[:id])\n @georef.destroy\n\n respond_to do |format|\n format.html { redirect_to georefs_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @character_location.destroy\n respond_to do |format|\n format.html { redirect_to character_locations_url, notice: \"Character location was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geojson_layer.destroy\n respond_to do |format|\n format.html { redirect_to @geojson_layer.organization }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geometry.destroy\n respond_to do |format|\n format.html { redirect_to geometries_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
We are going to avoid indexing of README.md when there is also an index.md in the same directory, to keep behavior consistent with the display logic | def skip_index?(file)
if file.end_with? 'README.md'
dir = File.dirname file
File.exist? "#{dir}/index.md"
else
false
end
end | [
"def should_be_index?(readme); end",
"def should_be_index?(readme)\n return false unless readme\n\n !dir_has_index? File.dirname(readme.url)\n end",
"def readme\n \"#{app_path}/readme.md\"\n end",
"def show_readme\n readme \"README\"\n end",
"def help\n path = Dir[src_path('README')].first || Dir[src_path('README.md')].first || Dir[src_path('README.markdown')].first\n if path\n File.read path\n end\n end",
"def gen_main_index\n template = TemplatePage.new(RDoc::Page::INDEX)\n File.open(\"index.html\", \"w\") do |f|\n tStr = \"\"\n #File.open(main_url, 'r') do |g|\n # tStr = markup(g)\n #end\n values = {\n \"initial_page\" => tStr,\n 'title' => CGI.escapeHTML(@options.title),\n 'charset' => @options.charset,\n 'content' => File.read('files/README.html')\n }\n \n values['inline_source'] = true\n template.write_html_on(f, values)\n end\n end",
"def about\n require \"github/markup\"\n @readme = GitHub::Markup.render(\"README.md\", File.read(\"README.md\")).html_safe\n end",
"def copy_readme_and_fix_relative_links(readme_file, github_file_prefix, github_raw_file_prefix, module_name)\n # Find the relative path of the README so we update the URL prefix accordingly\n relative_readme_pathname = Pathname.new(readme_file).relative_path_from(Pathname.new(\"#{$SCRIPT_DIR}/..\"))\n path = relative_readme_pathname.dirname.to_s\n url_prefix = \"#{github_file_prefix}/#{path}\"\n url_raw_prefix = \"#{github_raw_file_prefix}/#{path}\"\n\n # Read README file\n text = File.read(readme_file)\n\n # Replace the level 1 heading with the module name in docc's required ``ModuleName`` format\n # Without this, docc will treat this as a regular article instead of a landing page.\n new_contents = text.gsub(/^# .*/, \"# ``#{module_name}``\")\n\n # Remove any GitHub badges ([![), they don't render well\n new_contents = new_contents.gsub(/^\\[\\!\\[.*/, '')\n\n # Remove extraneous level 1 headings, they cause issues\n new_contents = new_contents.gsub(/^===.*/, '')\n\n # Prepend markdown links with the `url_prefix` that don't start with\n # \"http://\", \"https://\", \"mailto:\", or \"#\"\n new_contents = new_contents.gsub(%r{\\]\\(((?!https\\://)(?!http\\://)(?!mailto\\:)[^#].*?)\\)}, \"](#{url_prefix}/\\\\1)\")\n\n # Prepend `<a/>` tag 'href' attributes with the `url_prefix` that don't start\n # with \"http://\", \"https://\", \"mailto:\", or \"#\"\n new_contents = new_contents.gsub(%r{<a\\s+(.+\\s)*?href=(\"|')((?!https\\://)(?!http\\://)(?!mailto\\:)[^#].*?)(\"|')}, \"<a \\\\href='#{url_prefix}/\\\\3'\")\n\n # Prepend `<img/>` tag 'src' attributes with the `url_prefix` that don't start\n # with \"http://\" or \"https://\"\n new_contents = new_contents.gsub(%r{<img\\s+(.+\\s)*?src=(\"|')((?!https\\://)(?!http\\://).*?)(\"|')}, \"<img \\\\1src='#{url_raw_prefix}/\\\\3'\")\n\n # Create temp file & write updated contents to it\n new_file = Tempfile.new('README.md')\n File.open(new_file.path, 'w') { |file| file.puts new_contents }\n\n new_file.path\nend",
"def repo_with_index(name, index_content)\n repo_with_file name, 'index.asciidoc', <<~ASCIIDOC\n = Title\n\n [[chapter]]\n == Chapter\n #{index_content}\n ASCIIDOC\n end",
"def show_readme\n readme \"README\" if behavior == :invoke\n end",
"def find_readme repo_dir\n files = Dir.entries(repo_dir).sort\n files.each { |f| return f if f.downcase.include? 'readme' }\n return ''\nend",
"def render_readme(content, extension)\n if %w(md mdown markdown).include?(extension.downcase)\n render_markdown(content)\n else\n content\n end\n end",
"def url_for_main\n options.readme ? url_for_file(options.readme) : url_for_index\n end",
"def remove_old_readme\n readme = File.join(@opts[:path], 'README.md')\n File.delete(readme) if File.exist?(readme)\n end",
"def find_readme_url(repo_url)\n html_url = git_url_to_gitraw_url(repo_url)\n file_names = ['README.md', 'README.MD', 'readme.md', 'readme.MD', 'README', 'readme']\n existing_files = file_names.select {|file| !is_404?(html_url + '/master/' + file) }\n if !existing_files.empty?\n return html_url + '/master/' + existing_files.first\n end\n nil\n end",
"def md_path(name)\n Rails.root.join(ManPages::MARKDOWN_PATH, name + \".md\")\nend",
"def find_readme\n\t\tfile = self.project_files.find {|file| file =~ /^README\\.(md|rdoc)$/ }\n\t\tif file\n\t\t\treturn Pathname( file )\n\t\telse\n\t\t\tself.prompt.warn \"No README found in the project files.\"\n\t\t\treturn DEFAULT_README_FILE\n\t\tend\n\tend",
"def markdown_files; end",
"def buildParentReadMeIndex\n x = 0\n fileOutArr = Array.new\n\n # Build list of reference pages\n fileOutArr.push \"### Fn Command Reference\\n\\n\"\n @cmdListArr.each do |command|\n # Add each command to output\n fileOutArr.push \"[\" + command + \"](ref/\" + @fileNameArr[x] + \".md\" + \") \\n\"\n x = x + 1 # Counter for syncing arrays\n end\n \n # Add Fn Version\n fileOutArr.push(\"\\n<sub>\" + @fnVersion + \"</sub>\")\n\n\n # Write REFLIST.md file to disk\n puts \"Writing: \" + \"REFLIST\" + \".md\"\n File.open(\"REFLIST.md\", \"w\") do |f|\n f.puts(fileOutArr)\n end\n\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns a list of repository names which match `regexp` | def repository_names
list_repos
.select { |repo| repo["name"] =~ regexp }
.map { |repo| repo["name"] }
end | [
"def find_aliases(regexp)\n aliases = find(regexp,:object=>:alias,:find_mode=>:partial)\n return [] if aliases==[nil]\n aliases\n end",
"def get_all(regexp)\n Kitchen::Collection.new(__getobj__.select { |i| i.name =~ regexp })\n end",
"def project_names\n repositories.map { |s| s.match(/[^\\/]*$/)[0] }\n end",
"def select_regexp_matching_images(re_hn)\n Docker::Image.all.select { |img|\n info_map = img.info\n info_map && info_map['RepoTags'] && info_map['RepoTags'].any? { |n| n.match re_hn }\n }\nend",
"def regexp_matches(regexp); end",
"def find_grep(repo_path, extensions, search_string)\n results = []\n Find.find(repo_path) do |path|\n next if FileTest.directory?(path)\n extensions.each do |ext|\n if (path.downcase.end_with?(ext))\n find_matching_lines path, search_string, results\n# puts path\n end\n end\n end\n return results\nend",
"def list(pattern = /.*/)\n if Gem::Specification.respond_to?(:each)\n Gem::Specification.select{|spec| spec.name =~ pattern }\n else\n Gem.source_index.gems.values.select{|spec| spec.name =~ pattern }\n end\n end",
"def search_repo(query)\n repos = load_and_cache_user_repos\n results = repos.select do |repo|\n repo['name'] =~ Regexp.new(query, 'i')\n end\n results += search_all_repos(query) if query =~ %r{\\/}\n results.uniq\n end",
"def find_projects(regex, match_attribute=:name)\n project_list = projects\n\n project_list.find_all do |project|\n project[match_attribute] =~ regex\n end\n end",
"def scan_file_svn(f, regexs)\n repos = [[], [], []]\n consts = {}\n begin\n File.open(f).each_line.map(&:chomp).each do |line|\n line.strip!\n if line =~ WWWAUTH # Fastest compare first\n repos[2] << line\n elsif line =~ ASFSVN # Find all ASF::SVN and also map if it uses a CONSTANT_DEF\n consts.each do |k,v|\n line.sub!(k, v)\n end\n if line =~ regexs[0]\n repos[0] << line\n elsif line =~ regexs[1]\n repos[1] << line\n end\n elsif line =~ CONSTANT_DEF\n consts[$~['matchconst']] = \"'#{$~['matchval']}'\"\n end\n end\n return repos\n rescue Exception => e\n return [[\"#{ISERR}Bogosity! #{e.message[0..255]}\", \"\\t#{e.backtrace.join(\"\\n\\t\")}\"],[]]\n end\nend",
"def get_all(regexp)\n Jamie::Config::Collection.new(\n __getobj__.find_all { |i| i.name =~ regexp }\n )\n end",
"def search(regexp)\n all_cards = get_all_cards\n matched_cards = all_cards.select { |card| card['name'] =~ regexp }\n pretty_print matched_cards\n end",
"def extract_patterns(regexp)\n case_insensitive = false\n root = Regexp::Parser.parse(regexp)\n return [] if root.expressions.empty?\n first = root.expressions.first\n if first.token == :options\n if first.options[:x] || first.options[:m]\n STDERR.puts \"SKIP -- Can't handle options.\"\n return []\n end\n if first.options[:i]\n case_insensitive = true\n end\n root.expressions.shift\n end\n\n extract_patterns_from_parse(root, case_insensitive)\nend",
"def find_log_matches(regexp, args={})\n logcontent = \"\"\n get_vespalog_helper(-> logfile { logcontent << read_file_ignore_error(logfile) }, args)\n\n # scan returns an array of matches\n logcontent.scan(regexp)\n end",
"def repos_which_match(repo_names)\n @repos.select { |repo| repo_names.include?(repo.name) }\n end",
"def find_zones(regexp)\n zones = find(regexp,:object=>:zone,:find_mode=>:partial)\n return [] if zones==[nil]\n zones\n end",
"def build_regexp(list)\n r = []\n list.each do |itm|\n r << \"#{ASFSVN.source}\\['#{itm}']\"\n end\n return Regexp.union(r)\nend",
"def services_by_regexp(regexp)\n matched = {}\n services_list.each do |name, status|\n matched.store name, status if name =~ regexp\n end\n matched\n end",
"def find_all_regex(sCOMMAND)\n array = Array.new()\n search =/#{sCOMMAND}/\n @commands.each do |command|\n if (command.commandName.match(search) )\n array.push(command)\n end\n\n end\n return array\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /resume_responses POST /resume_responses.json | def create
@resume_response = ResumeResponse.new(params[:resume_response])
@resume = Resume.find(params[:id])
@resume_response.resume = @resume
@resume_response.sender = @current_user
@resume_response.recipient = @resume.user
respond_to do |format|
if @resume_response.save
format.html { redirect_to home_path, notice: 'Отклик на резюме отправлен.' }
format.json { render json: @resume_response, status: :created, location: @resume_response }
else
format.html { render action: "new" }
format.json { render json: @resume_response.errors, status: :unprocessable_entity }
end
end
end | [
"def create\n # puts \"params: #{params}\"\n @resume = Resume.new(resume_params)\n header_build(@resume)\n education_build(@resume)\n work_experience_build(@resume)\n skills_build(@resume)\n if @resume.save\n render json: @resume, status: :created, location: api_v1_resumes_url(@resume)\n else\n render json: @resume.errors, status: :unprocessable_entity\n end\n end",
"def create\n @resume = current_user.resumes.new(params[:resume])\n\n respond_to do |format|\n if @resume.save\n format.html { redirect_to @resume, notice: 'Resume was successfully created.' }\n format.json { render json: @resume, status: :created, location: @resume }\n else\n format.html { render action: \"new\" }\n format.json { render json: @resume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_resume\n resume = params[:resume]\n\n if resume.nil?\n return render json: {success: false, message: \"Resume failed to upload\"}\n end\n \n resume_content = resume.read\n resume_old = @hacker.resume\n\n if resume_old.nil?\n @hacker.build_resume(resume: resume_content)\n else\n resume_old.resume = resume_content\n resume_old.save\n end\n @hacker.save\n\n return render json: {success: true, message: \"Resume uploaded succsessfully\"}\n end",
"def resume\n resource_id = params[:resource_id]\n head :no_content\n end",
"def cmd_resume argv\n setup argv\n uuid = @hash['uuid']\n response = @api.resume(uuid)\n msg response\n return response\n end",
"def create\n @applicantresume = Applicantresume.new(params[:applicantresume])\n\n respond_to do |format|\n if @applicantresume.save\n flash[:success] = \"Your application form was successfully sent to #{@applicantresume.user.name}. Expect that you may receive an email or text message from your employer anytime soon.\"\n format.html { redirect_to @applicantresume }\n format.json { render json: @applicantresume, status: :created, location: @applicantresume }\n else\n format.html { render action: \"new\" }\n format.json { render json: @applicantresume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def resumes_matching\n\t\tresp = @resp\n\t\t# Prcessing keyword values for matching resume list\n\t\tresp = Resume.resumes_matching(resp, params)\n\t\trender :json => resp\n end",
"def create\n\t\t@resume = Resume.new(resume_params)\n\n\t\trespond_to do |format|\n\t\t\tif @resume.save\n\t\t\t\tformat.html { redirect_to resumes_url, notice: 'Resume entry was successfully created.' }\n\t\t\t\tformat.json { render :show, status: :created, location: @resume }\n\t\t\telse\n\t\t\t\tformat.html { render :new }\n\t\t\t\tformat.json { render json: @resume.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def resume_all\n self.class.post('/command/resumeAll')\n end",
"def create\n @resume = current_user.resumes.new(params[:resume])\n\n respond_to do |format|\n if @resume.save\n format.html { redirect_to(@resume, :notice => 'Resume was successfully created.') }\n format.xml { render :xml => @resume, :status => :created, :location => @resume }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @resume.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def answer_questions\n @project = Project.find(params[:id])\n @question_responses = []\n params[:question_responses].each do |question_id, response|\n @question_responses << QuestionResponse.create!(:question_id => question_id.to_i, :user => current_user, :response => response.to_i)\n end\n \n respond_to do |format|\n format.html { redirect_to @project, notice: 'Your answers have been saved. Thanks!' }\n format.json { render json: @question_responses, status: :created, location: @project }\n end\n end",
"def create\n @resumee = Resumee.new(resumee_params)\n\n respond_to do |format|\n if @resumee.save\n format.html { redirect_to @resumee, notice: 'Resumee was successfully created.' }\n format.json { render :show, status: :created, location: @resumee }\n else\n format.html { render :new }\n format.json { render json: @resumee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def user_resumes\n user_id = params[:user_id]\n resumes = Resume.where(user_id: user_id)\n if resumes.empty?\n render json: [], status: :ok\n else\n render json: resumes, status: :ok\n end\n end",
"def create\n @responses = params[:responses].values.collect { |response| Response.new(response) }\n\n if @responses.all?(&:valid?)\n @responses.each(&:save!)\n\n Worker.new.async.notify_admin_of_response(@responses)\n render 'success'\n else\n render 'invalid'\n end\n end",
"def create_questionnaire_response(data)\n attrs = data.to_h.with_indifferent_access\n response = questionnaire_response_service.create(attrs)\n\n response.tap do |resp|\n if resp.response[:code] == SUCCESS_STATUS\n questionnaire_response.tap do |qr|\n qr.user_uuid = user.uuid\n qr.user_account = user.user_account\n qr.appointment_id = attrs.dig(:appointment, :id)\n qr.questionnaire_response_id = resp.resource.id\n qr.user = user\n qr.questionnaire_response_data = data\n\n qr.save\n end\n end\n end\n end",
"def resumes_offered\n\t @resumes = ReqMatch.where(:status => \"OFFERED\")\n\t\trender \"resumes/offered\"\n\tend",
"def update\n @resume_response = ResumeResponse.find(params[:id])\n\n respond_to do |format|\n if @resume_response.update_attributes(params[:resume_response])\n format.html { redirect_to @resume_response, notice: 'Resume response was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resume_response.errors, status: :unprocessable_entity }\n end\n end\n end",
"def resume\n card_id = params[:card_id]\n subscription = UserSubscription.get_my_paused_subscription(current_user, params[:id])\n resumed_sub = subscription.resume_subscription(card_id) if subscription.present?\n\n if resumed_sub.present?\n result = { key: 'success', message: \"#{resumed_sub} has been resumed Successfully. please wait...\" }\n else\n result = { key: 'error', message: \"Sorry, you are not authorized for this subscription.\" }\n end\n\n render json: result.to_json\n end",
"def create\n @resume_entry = ResumeEntry.new(resume_entry_params)\n\n respond_to do |format|\n if @resume_entry.save\n format.html { redirect_to @resume_entry, notice: 'Resume entry was successfully created.' }\n format.json { render action: 'show', status: :created, location: @resume_entry }\n else\n format.html { render action: 'new' }\n format.json { render json: @resume_entry.errors, status: :unprocessable_entity }\n end\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PUT /resume_responses/1 PUT /resume_responses/1.json | def update
@resume_response = ResumeResponse.find(params[:id])
respond_to do |format|
if @resume_response.update_attributes(params[:resume_response])
format.html { redirect_to @resume_response, notice: 'Resume response was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @resume_response.errors, status: :unprocessable_entity }
end
end
end | [
"def update\n @resume = Resume.find(params[:id])\n\n if @resume.update(resume_params)\n head :no_content\n else\n render json: @resume.errors, status: :unprocessable_entity\n end\n end",
"def update\n user = demo? ? demo_user : current_user\n @resume = user.resumes.find(params[:id])\n @resume.update_count += 1\n\n respond_to do |format|\n if @resume.update_attributes(params[:resume])\n format.html { \n redirect_to(html_resume_path(@resume), :notice => 'updated') \n }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resume.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_specific_resume\n id = params[:id]\n title = params[:title]\n revision = params[:revision]\n\n resume = Resume.find_by_id(id)\n if resume.nil?\n render json: {}, status: :not_found\n else\n resume.title = title\n resume.revision = revision\n save = resume.save\n if save\n render json: resume, status: :ok\n else\n msg = [\"Save the resume into the database?=#{!!save}\"]\n msg << resume.errors.messages.to_s unless resume.errors.messages.empty?\n Rails.logger.error(msg)\n render json: { errors: msg }, status: :bad_request\n end\n end\n end",
"def update\n @resume = @user.resumes.find(params[:id])\n respond_to do |format|\n if @resume.update_attributes(params[:resume])\n format.html { redirect_to @resume, notice: 'Резюме успешно обновлено' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @user_resume.update(user_resume_params)\n format.html { redirect_to @user_resume, notice: 'User resume was successfully updated.' }\n format.json { render :show, status: :ok, location: @user_resume }\n else\n format.html { render :edit }\n format.json { render json: @user_resume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @resume = Resume.find(params[:id])\n\n respond_to do |format|\n if @resume.update_attributes(params[:resume])\n flash[:notice] = 'Resume was successfully updated.'\n format.html { redirect_to(@resume) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @resume.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @resume_section = ResumeSection.find(params[:id])\n\n respond_to do |format|\n if @resume_section.update_attributes(params[:resume_section])\n format.html { redirect_to @resume_section, notice: 'Resume section was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resume_section.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @resume_entry.update(resume_entry_params)\n format.html { redirect_to @resume_entry, notice: 'Resume entry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @resume_entry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @full_resume.update(full_resume_params)\n format.html { redirect_to @full_resume, notice: 'Full resume was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @full_resume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @resume_doc = ResumeDoc.find(params[:id])\n\n respond_to do |format|\n if @resume_doc.update_attributes(params[:resume_doc])\n format.html { redirect_to @resume_doc, notice: 'Resume doc was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resume_doc.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_resume\n resume = params[:resume]\n\n if resume.nil?\n return render json: {success: false, message: \"Resume failed to upload\"}\n end\n \n resume_content = resume.read\n resume_old = @hacker.resume\n\n if resume_old.nil?\n @hacker.build_resume(resume: resume_content)\n else\n resume_old.resume = resume_content\n resume_old.save\n end\n @hacker.save\n\n return render json: {success: true, message: \"Resume uploaded succsessfully\"}\n end",
"def update\n @applicantresume = Applicantresume.find(params[:id])\n\n respond_to do |format|\n if @applicantresume.update_attributes(params[:applicantresume])\n format.html { redirect_to @applicantresume, notice: 'Applicantresume was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @applicantresume.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @resume_item.update(resume_item_params)\n format.html { redirect_to @resume_item, notice: 'Resume item was successfully updated.' }\n format.json { render :show, status: :ok, location: @resume_item }\n else\n format.html { render :edit }\n format.json { render json: @resume_item.errors, status: :unprocessable_entity }\n end\n end\n end",
"def resume\n resource_id = params[:resource_id]\n head :no_content\n end",
"def update\n @resume_template = ResumeTemplate.find(params[:id])\n\n respond_to do |format|\n if @resume_template.update_attributes(params[:resume_template])\n format.html { redirect_to @resume_template, notice: 'Resume template was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @resume_template.errors, status: :unprocessable_entity }\n end\n end\n end",
"def cmd_resume argv\n setup argv\n uuid = @hash['uuid']\n response = @api.resume(uuid)\n msg response\n return response\n end",
"def update\n respond_to do |format|\n if @resume_collection.update(resume_collection_params)\n format.html { redirect_to @resume_collection, notice: 'Resume collection was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @resume_collection.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @resume_response = ResumeResponse.new(params[:resume_response])\n @resume = Resume.find(params[:id])\n @resume_response.resume = @resume\n @resume_response.sender = @current_user\n @resume_response.recipient = @resume.user\n respond_to do |format|\n if @resume_response.save\n format.html { redirect_to home_path, notice: 'Отклик на резюме отправлен.' }\n format.json { render json: @resume_response, status: :created, location: @resume_response }\n else\n format.html { render action: \"new\" }\n format.json { render json: @resume_response.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\t\trespond_to do |format|\n\t\t\tif @resume_entry.update(resume_entry_params)\n\t\t\t\tformat.html { redirect_to :edit_admin_resume_entry, notice: 'Resume entry was successfully updated.' }\n\t\t\t\tformat.json { render :show, status: :ok, location: @resume_entry }\n\t\t\telse\n\t\t\t\tformat.html { render :edit }\n\t\t\t\tformat.json { render json: @resume_entry.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /resume_responses/1 DELETE /resume_responses/1.json | def destroy
@resume_response = ResumeResponse.find(params[:id])
@resume_response.destroy
respond_to do |format|
format.html { redirect_to resume_responses_url }
format.json { head :no_content }
end
end | [
"def delete\n RestClient.delete \"#{@uri}/api/requests/request/#{@data['requestId']||@data['id']}\"\n puts ' Deleted request: '.red + \"#{@data['requestId']||@data['id']}\".light_blue\n end",
"def destroy\n @resume.destroy\n respond_to do |format|\n format.html { redirect_to resumes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @full_resume.destroy\n respond_to do |format|\n format.html { redirect_to full_resumes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume = Resume.find(params[:id])\n @resume.destroy\n\n respond_to do |format|\n format.html { redirect_to resumes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume = @user.resumes.find(params[:id])\n @resume.destroy\n\n respond_to do |format|\n format.html { redirect_to resumes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_info.destroy\n respond_to do |format|\n format.html { redirect_to resume_infos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_entry.destroy\n respond_to do |format|\n format.html { redirect_to resume_entries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @create_resume = CreateResume.find(params[:id])\n @create_resume.destroy\n\n respond_to do |format|\n format.html { redirect_to create_resumes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_section = ResumeSection.find(params[:id])\n @resume_section.destroy\n\n respond_to do |format|\n format.html { redirect_to resume_sections_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @applicantresume = Applicantresume.find(params[:id])\n @applicantresume.destroy\n\n respond_to do |format|\n format.html { redirect_to current_user }\n format.json { head :no_content }\n end\n end",
"def destroy\n @capture_resume.destroy\n respond_to do |format|\n format.html { redirect_to capture_resumes_url, notice: 'Capture resume was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume = Resume.find(params[:id])\n @resume.destroy\n\n respond_to do |format|\n format.html { redirect_to(resumes_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @recuest = Recuest.find(params[:id])\n @recuest.destroy\n\n respond_to do |format|\n format.html { redirect_to requests_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_doc = ResumeDoc.find(params[:id])\n @resume_doc.destroy\n\n respond_to do |format|\n format.html { redirect_to resume_docs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_item.destroy\n respond_to do |format|\n format.html { redirect_to resume_items_url, notice: 'Resume item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @req = Req.find(params[:id])\n @req.destroy\n\n respond_to do |format|\n format.html { redirect_to reqs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_exam.destroy\n respond_to do |format|\n format.html { redirect_to resume_exams_url, notice: 'Resume exam was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resumelink.destroy\n respond_to do |format|\n format.html { redirect_to resumelinks_url, notice: 'Resumelink was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @resume_template = ResumeTemplate.find(params[:id])\n @resume_template.destroy\n\n respond_to do |format|\n format.html { redirect_to resume_templates_url }\n format.json { head :no_content }\n end\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find the edge connecting the two specified nodes, if it exists Note that our edges are undirected, so the order of the parameters `one` and `two` is not meaningful. | def find_edge(one, two)
edges.find do |e|
(e.one == one && e.two == two) || (e.two == one && e.one == two)
end
end | [
"def find_edge(node_1, node_2)\n @edges[node_1][node_2] if @edges[node_1]\n end",
"def find_or_add_edge(one, two)\n edge = find_edge(one, two)\n if edge\n edge.weight += 1\n edge\n else\n edges << Edge.new(one: one, two: two, weight: 1)\n edges.last\n end\n end",
"def edge a, b\n # works for both digraph and undirected graph\n @edges[a].each{|e| return e if e.connects? a, b}\n end",
"def edge a, b\n @edges[a] and @edges[a][b]\n end",
"def findEdge(vertex1, vertex2)\n visualedges = @visualGraph.visual_edges\n visualedges.each do |edge|\n if edge.v1.id == vertex1 && edge.v2.id == vertex2\n # edge from vertex1 to vertex2\n return edge\n elsif edge.v2.id == vertex1 && edge.v1.id == vertex2\n # edge from vertex2 to vertex1\n return edge\n end\n end\n return nil\n end",
"def add_edges( node_one, node_two, options = {} )\n\n if( node_one.class == Array )\n node_one.each do |no|\n add_edges( no, node_two, options )\n end\n else\n if( node_two.class == Array )\n node_two.each do |nt|\n add_edges( node_one, nt, options )\n end\n else\n edge = GraphViz::Edge::new( node_one, node_two, self )\n edge.index = @elements_order.size_of( \"edge\" )\n\n options.each do |xKey, xValue|\n edge[xKey.to_s] = xValue\n end\n\n @elements_order.push( {\n \"type\" => \"edge\",\n \"value\" => edge\n } )\n @loEdges.push( edge )\n\n return( edge )\n end\n end\n end",
"def get_edge vertex_a, vertex_b\n @edges.select{ |e| (e.vertices.include? vertex_a) && (e.vertices.include? vertex_b) }.first\n end",
"def edge_for(left, right)\n edges.detect { |edge| edge.left.equal?(left) && edge.right.equal?(right) }\n end",
"def edge?(node_1, node_2)\n node?(node_1) && @adj[node_1].key?(node_2)\n end",
"def get_edge(x, y)\n return nil unless edge?(x, y)\n w = @g.get_edge(index(x), index(y))\n Edge.new(x, y, w) if w\n end",
"def edge_with(head, tail)\n edges.find { |edge| edge.head == head && edge.tail == tail }\n end",
"def add_edge(node1, node2)\n if node1.graph == self && node2.graph == self\n node1.add_adjacent(node2)\n node2.add_adjacent(node1)\n else\n raise \"Error: #{node1.inspect} and #{node2.inspect} shoul belong to #{self.inspect}\"\n end\n end",
"def common_point(e1, e2)\n p1, p2 = @edges[e1]\n p3, p4 = @edges[e2]\n if p1 == p3 || p1 == p4\n p1\n elsif p2 == p3 || p2 == p4\n p2\n else\n nil\n end\n end",
"def edge?(v1,v2)\n e = @edges[v1]\n return false if e.nil?\n return e.include?(v2)\n end",
"def test_find_lonely_edge\n a = Node.create!\n b = Node.create!\n e = Default.create_edge(a,b)\n e = Default.find_edge(a,b)\n assert_equal e.ancestor, a\n assert_equal e.descendant, b\n end",
"def edge?(x, y)\n connected?(x, y)\n end",
"def common_point(e1, e2)\n p1, p2 = @points_of_edge[e1]\n p3, p4 = @points_of_edge[e2]\n if p1 == p3 || p1 == p4\n p1\n elsif p2 == p3 || p2 == p4\n p2\n else\n nil\n end\n end",
"def add_edge(x, y) # from x to y\n if @nodes.key?(x) and @nodes.key?(y)\n if @edges.key?(x)\n unless @edges[x].include?(y)\n @edges[x] << y\n end\n end\n if @back_edges.key?(y)\n unless @back_edges[y].include?(x)\n @back_edges[y] << x\n end\n end\n else\n raise RuntimeError.new \"#{x} and #{y} not both present\"\n end\n end",
"def bothE(*key)\n find_edges(key, :both)\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return the maximum edge weight in the graph | def max_edge_weight
edges.map(&:weight).max
end | [
"def maximum_weight\n max = 0\n self.configuration.shipping_costs.each do |sc|\n max = sc[:weight_max] if sc[:weight_max] > max\n end\n return max.to_f\n end",
"def weight_max\r\n 251\r\n end",
"def weighted_max_score\n max_score * weight\n end",
"def get_edge_weight(source, destination)\n if contains_edge?(source, destination)\n @edges[source][destination]\n else\n Float::INFINITY\n end\n end",
"def maxweight(w)\n @weight = w if w>@weight\n end",
"def weight(g)\n edges.map { |e| g.w(e) }.reduce(0, :+)\n end",
"def maximum_weight\n Measured::Weight.new(150, :pounds)\n end",
"def edge_class\n WeightedDirectedEdge\n end",
"def max_degree\n self.vertices.map{|v| self.degree(v)}.max\n end",
"def w(edge)\n i, j = edge\n raise ArgumentError, \"Invalid edge: #{edge}\" if i.nil? || j.nil?\n raise \"Edge not found: #{edge}\" unless has_edge?(*edge)\n init_weights if @weight.nil?\n @weight[i - 1][j - 1]\n end",
"def get_edge(x, y)\n edge = get(x, y)\n edge[:weight] if edge\n end",
"def max_weight\n rsp = error_wrap{JSON.parse(open(\"#{api_url}?method=ems.get.max.weight\").read)}['rsp']\n if rsp['stat'] == Spree::EmsShipping::Constants::RESPONSE_STATE_OK\n return rsp['max_weight'].to_f\n else\n return rsp['err']\n end \n end",
"def max_weight\n Course::Assessment.find(@assessment_id).questions.pluck(:weight).max\n end",
"def node_depth_max()\n #This is a stub, used for indexing\n end",
"def currentWeight() weighins.last.weight end",
"def weights\n return @weights if @weights\n return @weights = [] if array.empty?\n\n lo = edges.first\n step = edges[1] - edges[0]\n\n max_index = ((@max - lo) / step).floor\n @weights = Array.new(max_index + 1, 0)\n\n array.each do |x|\n index = ((x - lo) / step).floor\n @weights[index] += 1\n end\n\n return @weights\n end",
"def max_in_degree\n return nil if to_a.empty?\n vertices.map { |v| in_degree(v)}.max\n end",
"def max_weights\n # find the max weigth\n temp = 0\n index = -1\n (0..2).each do |i|\n if @rps[i] > temp\n temp = @rps[i]\n index = i\n end\n end\n weights = []\n # Adds all the max weights\n (0..2).each {|i| weights << i if @rps[i] == @rps[index]}\n weights\n end",
"def max\n val = max_node(@root).data if @root\n val\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add nodes and edges for a given gap This function adds nodes and edges to the graph for a given size of sliding window. | def add_nodes_for_gap(gap)
analyzer = Frequency.call(
dataset: dataset,
ngrams: gap,
num_blocks: 1,
split_across: true,
progress: ->(p) { progress&.call((p.to_f / 100 * 33).to_i + 33) }
)
ngrams = analyzer.blocks[0]
ngrams.each do |(gram, _)|
gram_words = gram.split(' ')
stemmed_gram_words = gram_words.map(&:stem)
next if focal_word && !stemmed_gram_words.include?(focal_word_stem)
nodes = stemmed_gram_words.zip(gram_words).map do |w|
find_or_add_node(*w)
end
nodes.combination(2).each do |pair|
find_or_add_edge(pair[0].id, pair[1].id)
end
end
end | [
"def add_nodes_for_gap(gap)\n words.each_cons(gap).each_with_index do |gap_words, i|\n # Get the stemmed words to go with the un-stemmed words\n gap_words_stem = words_stem[i, gap]\n\n # Update progress meter\n if progress\n val = (i.to_f / (words.size - 1).to_f) * (66.0 / gaps.size)\n progress.call(val.to_i + 33)\n end\n\n # Cull based on focal word (stemmed) if present\n next if focal_word && !gap_words_stem.include?(focal_word_stem)\n\n # Find or create nodes for all of these words\n nodes = gap_words_stem.zip(gap_words).map do |w|\n find_or_add_node(*w)\n end\n\n nodes.combination(2).each do |pair|\n find_or_add_edge(pair[0].id, pair[1].id)\n end\n end\n end",
"def create_knight_tour_graph(board_size)\n new_graph = Graph.new()\n board_size.times do |x|\n board_size.times do |y|\n new_graph.add_vertex([x,y])\n end\n end\n knight_legal_moves(new_graph)\n new_graph\nend",
"def range_add_edge(item); end",
"def put_nodes\n\t\t\tx = 0\n\t\t\twhile x <=7\n\t\t\t\ty = 7 \n\t\t\t\twhile y >= 0\n\t\t\t\t\tadd_node( Node.new(pos = [x,y], board = self) )\n\t\t\t\t\ty -=1\n\t\t\t\tend\n\t\t\t\tx+=1\n\n\t\t\tend\n\t\tend",
"def rgt_gap\n gap = Gap.new\n gap.left_col_val = self.right_col_val\n if parent.children.last == self \n gap.right_col_val = parent.right_col_val \n else\n next_index = parent.children.index(self)+1\n gap.right_col_val = parent.children[next_index].left_col_val\n end \n return gap\nend",
"def add_edges_to_graph\n @board.each do |coordinate, cell|\n @board.each do |other_coordinate, other_cell|\n if adjacent?(@board[coordinate], @board[other_coordinate])\n add_edge(@board[coordinate], @board[other_coordinate])\n end\n end\n end\n end",
"def fill!\n adjust = 0\n gaps.each_with_index do |gap, index|\n match = index + adjust\n items = block_given? && yield(gap)\n if items != gap\n fill = rawgaps.at(match).fill(items)\n adjust += plug_gap!(match, fill) - 1\n end\n end\n self\n end",
"def add_to_neighbors(node)\n @neighbors << node\n end",
"def fillWindow(dest_ip, seqNum, data, wSize)\n\twindow = Array.new\n\n\tfor i in 0..wSize-1\n\t\tpacket = Packet.new\n\n\t\tpacket.dest_ip = dest_ip\n\t\tpacket.src_ip = $local_ip\n\t\tpacket.type = 1\n\t\tpacket.seqNum = seqNum + i\n\t\tpacket.ackNum = seqNum + i + 1\n\t\tpacket.data = data[seqNum + i]\n\n\t\twindow.push(packet)\n\tend\n\n\treturn window\nend",
"def add_nodes_to_graph(graph, nodes)\n nodes.each do |node|\n graph.add_node(node)\n end\nend",
"def add from, to\n @nodes << from unless @nodes.include?(from)\n @nodes << to unless @nodes.include?(to)\n @follow_links[from] ||= []\n @follow_links[from] << to\n @back_links[to] ||= []\n @back_links[to] << from\n end",
"def add_edges(node, neighbours)\n # we store edges and (for easier lookup) always use the lower index first\n neighbours.each do |n|\n v1 = [node, n].min\n v2 = [node, n].max\n @edges.push([v1, v2])\n end\n end",
"def insert_edge(new_edge_val, node_from_val, node_to_val)\n nodes = { node_from_val => nil, node_to_val => nil }\n @nodes.each do |node|\n next unless nodes.include?(node.value)\n nodes[node.value] = node\n break if nodes.all? { |_node_val, node_obj| node_obj }\n end\n nodes.each do |node_val, _node_obj|\n nodes[node_val] = nodes[node_val] || insert_node(node_val)\n end\n node_from = nodes[node_from_val]\n node_to = nodes[node_to_val]\n new_edge = Edge.new(new_edge_val, node_from, node_to)\n node_from.edges << new_edge\n node_to.edges << new_edge\n @edges << new_edge\n new_edge\n end",
"def gapfill(finishm_graph, probe_index1, probe_index2, options)\n start_onode = finishm_graph.velvet_oriented_node(probe_index1)\n end_onode_inward = finishm_graph.velvet_oriented_node(probe_index2)\n unless start_onode and end_onode_inward\n raise \"Unable to retrieve both probes from the graph for gap #{gap_number} (#{gap.coords}), fail\"\n end\n\n # The probe from finishm_graph points in the wrong direction for path finding\n end_onode = Bio::Velvet::Graph::OrientedNodeTrail::OrientedNode.new\n end_onode.node = end_onode_inward.node\n end_onode.first_side = end_onode_inward.starts_at_start? ? Bio::Velvet::Graph::OrientedNodeTrail::END_IS_FIRST : Bio::Velvet::Graph::OrientedNodeTrail::START_IS_FIRST\n\n adjusted_leash_length = finishm_graph.adjusted_leash_length(probe_index1, options[:graph_search_leash_length])\n log.debug \"Using adjusted leash length #{adjusted_leash_length }\" if log.debug?\n\n cartographer = Bio::AssemblyGraphAlgorithms::AcyclicConnectionFinder.new\n trails = cartographer.find_trails_between_nodes(\n finishm_graph.graph, start_onode, end_onode, adjusted_leash_length, {\n :recoherence_kmer => options[:recoherence_kmer],\n :sequences => finishm_graph.velvet_sequences,\n :max_explore_nodes => options[:max_explore_nodes],\n :max_gapfill_paths => options[:max_gapfill_paths],\n }\n )\n if trails.circular_paths_detected\n log.warn \"Circular path detected here, not attempting to gapfill\"\n end\n # Convert the trails into OrientedNodePaths\n trails = trails.collect do |trail|\n path = Bio::Velvet::Graph::OrientedNodeTrail.new\n path.trail = trail\n path\n end\n\n acon = Bio::AssemblyGraphAlgorithms::ContigPrinter::AnchoredConnection.new\n acon.start_probe_noded_read = finishm_graph.probe_node_reads[probe_index1]\n acon.end_probe_noded_read = finishm_graph.probe_node_reads[probe_index2]\n acon.start_probe_contig_offset = options[:contig_end_length]\n acon.end_probe_contig_offset = options[:contig_end_length]\n acon.paths = trails\n\n return acon\n end",
"def move_node!(tg,tga,i)\n #first just add node i in\n tga[0].each_index do |ii|\n if (i == tga[0][ii][0])\n tg[0].push(tga[0][ii])\n tga[0][ii] = nil\n break\n end\n end\n #then add the edges, only add edges for which both nodes exist in tg \n tga[1].each_index do |ii|\n if tg[0].index{|item| item[0] == tga[1][ii][0]} && tg[0].index{|item| item[0] == tga[1][ii][1]}\n tg[1].push(tga[1][ii])\n tga[1][ii] = nil\n end\n end\n tga[0].compact!\n tga[1].compact!\nend",
"def build_sketch_graph\n\t@temp_hop_record = Hash.new\n\t@processed_list = Hash.new\n\t$node_list.each do |n|\n\t\t#Nodes in sketched graph: queries, user_inputs\n\t\t#if n.getInstr.getFromUserInput or (n.isQuery? and n.isWriteQuery?) or (n.getInstr.instance_of?AttrAssign_instr and n.getInstr.getFuncname.index('!') == nil)\n\t\tif n.isQuery? #or n.getInstr.getFromUserInputor isTableAttrAssign(n) \n\t\t\tn.Tnode = TreeNode.new(n)\n\t\t\t$sketch_node_list.push(n.Tnode)\t\n\t\t\t@temp_hop_record[n] = Array.new\n\t\t\t@processed_list[n] = Array.new\n\t\t\t@temp_hop_record[n].push(n)\n\t\tend \n\tend\n\tfor i in 0...$node_list.length\n\t\tadded_edge = false\n\t\tno_changes = true\n\t\t$node_list.each do |n|\n\t\t\tif n.Tnode != nil\n\t\t\t\t@temp_hop_record[n].push(nil)\n\t\t\t\ttemp_node = @temp_hop_record[n].shift\n\t\t\t\tstep = 0\n\t\t\t\twhile temp_node != nil and step < 10000 do\n\t\t\t\t\tstep += 1\n\t\t\t\t\tno_changes = false\n\t\t\t\t\ttemp_node.getDataflowEdges.each do |e|\n\t\t\t\t\t\tif e.getToNode.Tnode != nil and e.getToNode != n\n\t\t\t\t\t\t\tif n.Tnode.hasChildren(e.getToNode.Tnode) == false and (n.getIndex < e.getToNode.getIndex)\n\t\t\t\t\t\t\t\tn.Tnode.addChildren(e.getToNode.Tnode, i)\n\t\t\t\t\t\t\t\t#puts \"\\tAdd edge: #{n.getIndex}:#{n.getInstr.toString} -> #{e.getToNode.getIndex}:#{e.getToNode.getInstr.toString}\"\n\t\t\t\t\t\t\t\tadded_edge = true\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tif e.getToNode != n \n\t\t\t\t\t\t\t\tif @processed_list[n].include?e\n\t\t\t\t\t\t\t\telsif e.getToNode.getIndex < e.getFromNode.getIndex #returnv\n\t\t\t\t\t\t\t\t\te.getToNode.getDataflowEdges.each do |e1|\n\t\t\t\t\t\t\t\t\t\tif e1.getToNode.getIndex > temp_node.getIndex\n\t\t\t\t\t\t\t\t\t\t\t@temp_hop_record[n].push(e1.getToNode)\n\t\t\t\t\t\t\t\t\t\t\t@processed_list[n].push(e1)\n\t\t\t\t\t\t\t\t\t\tend \n\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t\t@temp_hop_record[n].push(e.getToNode)\n\t\t\t\t\t\t\t\t\t@processed_list[n].push(e)\n\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\ttemp_node = @temp_hop_record[n].shift\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tif added_edge\n\t\t\t#puts \"iteration #{i} ||\"\n\t\tend\n\t\tif no_changes\n\t\t\tbreak\n\t\tend\n\tend\n=begin\n\tgraph_write($graph_file, \"digraph sketch {\\n\")\n\t$node_list.each do |n|\n\t\tif n.Tnode != nil\n\t\t\tif n.isQuery?\n\t\t\t\tcolor = \"blue\"\n\t\t\telsif n.getInstr.instance_of?AttrAssign_instr\n\t\t\t\tcolor = \"darkorange\"\n\t\t\telse\n\t\t\t\tcolor = \"crimson\"\n\t\t\tend\n\t\t\tgraph_write($graph_file, \"\\tn#{n.getIndex} [label=<<i>#{n.getIndex}__#{n.getInstr.toString2}</i>> color=#{color}]; \\n\")\n\t\t\tn.Tnode.children.each do |c|\n\t\t\t\tgraph_write($graph_file, \"\\tn#{n.getIndex} -> n#{c.node.node.getIndex} [label=\\\"#{c.dist}\\\"]\\n\")\n\t\t\tend\n\t\tend\n\tend\n\tgraph_write($graph_file, \"}\")\n=end\nend",
"def fill_gaps(movie)\n sorted_visuals = movie.visual_sequence.sort\n gaps = 0\n #Iterate through visuals and determine gaps => \n #add generated blackness to fill in the gaps\n 0.upto(sorted_visuals.length - 1) { |i|\n unless i == (sorted_visuals.length - 1)\n clip_a = sorted_visuals[i]\n clip_b = sorted_visuals[i + 1]\n\n delta_x = clip_b.place.milliseconds - (clip_a.place.milliseconds + clip_a.length)\n unless delta_x == 0 or delta_x < 0 #ToDo!: Why there is even a possibility for negative values?\n gap_place = \"00:00:00:\" + (clip_a.place.milliseconds + clip_a.length).to_s\n gap_start = \"00:00:00:00\"\n gap_end = \"00:00:00:\" + delta_x.to_s\n \n gap = Visual.new(\"blackness\", \"nil\", gap_start, gap_end, gap_place, true, [], [])\n sorted_visuals << gap\n gaps += 1\n end\n end\n }\n movie.visual_sequence.visuals = sorted_visuals\n return gaps\n end",
"def build_edges(users, start_time, finish_time)\n users.each do |user|\n schedule = ModelFabric.get_class(SocialFramework.schedule_class).find_or_create_by(user_id: user.id)\n\n events = schedule.events_in_period(start_time, finish_time)\n i = 0\n\n @slots.each do |slot|\n if events.empty? or slot_empty?(slot, events[i])\n slot.add_edge(user)\n slot.attributes[:gained_weight] += user.attributes[:weight] if user.attributes[:weight] != :fixed\n end\n if not events.empty? and((slot.id + @slots_size).to_datetime >= events[i].finish.to_datetime)\n events.clear if events[i] == events.last\n i += 1\n end\n end\n end\n end",
"def genWindow(initNum, windowSize, destIP)\n i = 1\n seqNum = initNum\n while(i <= windowSize.to_i)\n packet = makePacket(destIP, $localIP, 1, seqNum, 0)\n $window.push(packet)\n seqNum += 1\n puts $window[i - 1].data\n\ti += 1\n end\n return seqNum\nend"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Find an edge and increment its weight, or add it if needed | def find_or_add_edge(one, two)
edge = find_edge(one, two)
if edge
edge.weight += 1
edge
else
edges << Edge.new(one: one, two: two, weight: 1)
edges.last
end
end | [
"def increase_weight(node, dw)\n node = find(node)\n node.update_weight(node.weight + dw) unless node.nil?\n end",
"def weight_node(node, weight)\n @edges.each do |source, dests|\n dests.each do |dest, _|\n if dest == node\n @edges[source][dest] = weight\n end\n end\n end\n end",
"def edge_inc(i, j)\n inode = (nodes[i] ||= Node.new(i))\n jnode = (nodes[j] ||= Node.new(j))\n inode.edge_inc(jnode)\n jnode.edge_inc(inode)\n end",
"def _edge_labeling_inc\n end",
"def hyperball_on_edge(edge)\n update_node = @counters.get_counter(edge[1])\n neighbor_node = @counters.get_counter(edge[0])\n\n update_node.merge(neighbor_node)\n end",
"def weight(g)\n edges.map { |e| g.w(e) }.reduce(0, :+)\n end",
"def add_edge(source, destination, weight)\n super(source, destination, weight) if weight > 0\n end",
"def add_edge(source, destination, weight)\n if @edges.key?(source)\n @edges[source][destination] = weight\n else\n @edges[source] = { destination => weight }\n end\n end",
"def addweight(w)\n @weight += w\n end",
"def increase_weight(v, u, dw)\n id1, id2 = v.id, u.id\n adj_table[id1].increase_weight(_create_node(id2), dw) if has_vertex?(id1)\n adj_table[id2].increase_weight(_create_node(id1), dw) if has_vertex?(id2)\n end",
"def path_weight_to(other)\n edge_class.shortest_path(self,other,:method => :djikstra).map{|edge| edge.weight.to_f}.sum\n end",
"def get_edge_weight(source, destination)\n if contains_edge?(source, destination)\n @edges[source][destination]\n else\n Float::INFINITY\n end\n end",
"def update_edge?(from, to ,weight)\n bool = false\n @edges.each() do |edge|\n if (edge.from() == from and edge.to() == to) or (edge.from() == to and edge.to() == from)\n edge.weight = weight\n bool = true\n end\n end\n return bool\n end",
"def relax(edge)\n return if @distance_to[edge.to] <= @distance_to[edge.from] + edge.weight\n\n @distance_to[edge.to] = @distance_to[edge.from] + edge.weight\n @path_to[edge.to] = edge.from\n\n # If the node is already in this priority queue, the only that happens is\n # that its distance is decreased.\n @pq.insert(edge.to, @distance_to[edge.to])\n end",
"def next_edge_index\n # starting at zero\n @next_edge_index ||= 0\n\n @next_edge_index += 1\n\n (@next_edge_index - 1)\n end",
"def set_w(edge, weight)\n if edge[0].nil? || edge[1].nil?\n raise ArgumentError, \"Invalid edge: #{edge}\"\n end\n unless weight.is_a?(Integer)\n raise TypeError, 'Edge weight must be integer'\n end\n init_weights if @weight.nil?\n i = edge[0] - 1\n j = edge[1] - 1\n raise \"Edge not found: #{edge}\" unless has_edge?(*edge)\n @weight[i] ||= []\n @weight[j] ||= []\n @weight[i][j] = weight\n @weight[j][i] = weight\n end",
"def w(edge)\n i, j = edge\n raise ArgumentError, \"Invalid edge: #{edge}\" if i.nil? || j.nil?\n raise \"Edge not found: #{edge}\" unless has_edge?(*edge)\n init_weights if @weight.nil?\n @weight[i - 1][j - 1]\n end",
"def path_weight_to(other)\n shortest_path_to(other,:method => :djikstra).map{|edge| edge.weight.to_f}.sum\n end",
"def upgrade( element, inc = 1 )\n new_weight = weight_of(element) + inc\n set_weight( element, new_weight )\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns an array of items which have a paid quantity greater than zero. That is, any items which have nonbonus components. | def candidate_items
sku_items.select {|i| i.paid_quantity > 0}
end | [
"def paid_quantity\n components.reject {|c| c.kind == 'bonus'}.map(&:quantity).sum\n end",
"def unpaid_items\n reis = registrant_expense_items.joins(:registrant).where(registrants: {deleted: false})\n reis.free.select{ |rei| !rei.registrant.reg_paid? } + reis.where(free: false)\n end",
"def total_items\n @items.reject { |item| item.quantity == 0 }\n end",
"def reject_zeros\n @items.reject { |i| i.quantity.to_i == 0 }\n end",
"def items_no_sale\r\n sale = Array.new\r\n\r\n if(self.items.size > 0)\r\n self.items.each {|val|\r\n if (!val.is_active )\r\n sale.push(val)\r\n end\r\n }\r\n end\r\n\r\n return sale\r\n end",
"def orders_with_items() orders.select {|o| o.line_items_unshipped_and_uncancelled.any? } end",
"def get_orders_missing_items\n\t\torders_missing_items = Array.new\n\t\tself.mws_orders.each do |o|\n\t\t\tif o.get_item_quantity_missing > 0 || o.get_item_quantity_ordered == 0\n\t\t\t\torders_missing_items << o\n\t\t\tend\n\t\tend\n\t\treturn orders_missing_items\n\tend",
"def get_eligible_quantities\n quantity_ids = Array.new\n Quantity.all.each do | quantity |\n # if any promotions already applied, consider number of items in basket\n # minus number of items in applied promotions\n if @using_promotions\n if quantity.item_quantity <= @purchasing_items_count[quantity.item_id].to_i -\n (@using_promotions_count[quantity.id].to_i *\n quantity.item_quantity)\n quantity_ids << quantity.id\n end\n # if no promotions applied consider only number of items in the basket\n else\n if quantity.item_quantity <= @purchasing_items_count[quantity.item_id].to_i\n quantity_ids << quantity.id\n end\n end\n end\n\n @quantities = Quantity.where(id: quantity_ids)\n end",
"def get_products_bought\n res = []\n\n @data.product_qty.each do |prd_id, qty|\n qty = qty.to_i\n if qty > 0\n res << [ qty, @data.products.with_id(prd_id) ]\n end\n end\n\n res\n end",
"def paid_items\n Item.where(:id => self.paid_selected_items.pluck(:item_id))\n end",
"def billable_donations\n line_items.reject{|d| d.amount.zero? || d.class == Fee}\n end",
"def people_not_paid\n all_paid? ? [] : people - paid_infos.keys\n end",
"def paid_products\n Product.joins(:customer).where.not(price: '0.0').where(customers: {id: id})\n end",
"def eligible_items(checkout_items)\n checkout_items.select do |item| \n self.applies_to?(item) && (!item.discounted || (item.discounted && item.discount_rule == self))\n end\n end",
"def unpaid_carts\n Cart.where(:paid => false, :user => self.id)\n end",
"def items_in_cart\n Item.where(:id => self.selected_items.where(:paid => nil).pluck(:item_id))\n end",
"def paid_total\n components.reject {|c| c.kind == 'bonus'}.reduce(SpookAndPuff::Money.zero){|a, c| a + c.total}\n end",
"def items_in_stock\n cart_items.none? { |item| item.item_quantity > item.product.quantity }\n end",
"def get_products_bought\n res = []\n mem_prd = @data.products.with_type(@data.mem.mem_type)\n raise \"Can't find product #{@data.mem.mem_type.inspect}\" unless mem_prd\n res << [ 1, mem_prd ]\n\n @data.product_qty.each do |prd_id, qty|\n if qty > 0\n res << [ qty, @data.products.with_id(prd_id) ]\n end\n end\n\n res\n end"
] | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Subsets and Splits