branch_name
stringclasses 15
values | target
stringlengths 26
10.3M
| directory_id
stringlengths 40
40
| languages
sequencelengths 1
9
| num_files
int64 1
1.47k
| repo_language
stringclasses 34
values | repo_name
stringlengths 6
91
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
| input
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>#if TOOLS
using Godot;
using System;
[Tool]
public class GdOpenTdb : EditorPlugin
{
public override void _EnterTree()
{
base._EnterTree();
var path = (GetScript() as Resource).ResourcePath.GetBaseDir();
var script = ResourceLoader.Load(path + "/OpenTdbHTTP.cs") as Script;
//var iconTex = ResourceLoader.Load(path + "/icon.png") as Texture;
AddCustomType("OpenTdb", "HTTPRequest", script, null);
}
public override void _ExitTree()
{
base._ExitTree();
RemoveCustomType("OpenTdb");
}
}
#endif
<file_sep># Godot OpenTdb Plugin
WIP
* Add Plugin to Project Settings
* Add OpenTdb as a Child Node to any Node
* Then:
Example Usage:
```
extends Node2D
func _ready():
$OpenTdb.connect("QuestionsLoaded", self, "_on_questions_loaded")
$OpenTdb.LoadTriviaQuestions(10)
pass
func _on_questions_loaded(questions_arr):
print("Questions loaded in GDSCript!")
for q in questions_arr:
print(q)
```
<file_sep>using Godot;
using System;
using Godot.Collections;
using Array = Godot.Collections.Array;
using System.Net;
public class OpenTdbHTTP : HTTPRequest
{
public const String OpenTDB_BASE_URL = "https://opentdb.com/";
[Export] public EEncoding apiEncoding;
[Export] public EType apiType;
[Export] public EDifficulty apiDifficulty;
[Signal]
public delegate void QuestionsLoaded(Array<Question> questions);
[Signal]
public delegate void QuestionsLoadError(String errorMessage);
public enum EEncoding
{
Default,
UrlLegacy,
Url3986,
Base64
}
public enum EDifficulty
{
All,
Easy,
Medium,
Hard
}
public enum EType
{
All,
TrueFalse,
MultipleChoice
}
public static class Encoding
{
public static String Default { get { return null; } }
public static String UrlLegacy { get { return "urlLegacy"; } }
public static String Url3986 { get { return "url3986"; } }
public static String Base64 { get { return "base64"; } }
public static String GetFromEnum(EEncoding e)
{
switch (e)
{
case EEncoding.UrlLegacy:
return UrlLegacy;
case EEncoding.Url3986:
return Url3986;
case EEncoding.Base64:
return Base64;
case EEncoding.Default:
return Default;
default:
return null;
}
}
}
public static class Difficulty
{
public static String Easy { get { return "easy"; } }
public static String Medium { get { return "medium"; } }
public static String Hard { get { return "hard"; } }
public static String GetFromEnum(EDifficulty d)
{
switch (d)
{
case EDifficulty.Medium:
return Medium;
case EDifficulty.Hard:
return Hard;
case EDifficulty.Easy:
return Easy;
case EDifficulty.All:
return null;
default:
return null;
}
}
}
public static class Type
{
public static String MultipleChoice { get { return "multiple"; } }
public static String TrueFalse { get { return "boolean"; } }
public static String GetFromEnum(EType t)
{
switch (t)
{
case EType.MultipleChoice:
return MultipleChoice;
case EType.TrueFalse:
return TrueFalse;
case EType.All:
return null;
default:
return null;
}
}
}
public override void _Ready()
{
this.Connect("request_completed", this, "OnTriviaRequestComplete");
}
public void LoadTriviaQuestions(int amount)
{
FetchTriviaQuestions(amount, null, Difficulty.GetFromEnum(apiDifficulty), Type.GetFromEnum(apiType), Encoding.GetFromEnum(apiEncoding) );
}
private void FetchTriviaQuestions(int amount, String category = null, String difficulty = null, String type = null, String encoding = null)
{
string[] customHeaders = null;
var validateSsl = true;
String baseUrl = OpenTDB_BASE_URL + "api.php";
String url = baseUrl + "?amount=" + amount;
if (difficulty != null)
{
url += "&difficulty=" + difficulty;
}
if (category != null)
{
url += "&category=" + category;
}
if (type != null)
{
url += "&type=" + type;
}
if (encoding != null)
{
url += "&encode=" + encoding;
}
GD.Print("[GdOpenTdb] Fetching from URL: " + url);
var error = this.Request(url, customHeaders, validateSsl, HTTPClient.Method.Get);
if (error != Error.Ok)
{
OnTriviaRequestError(error);
}
}
private void OnTriviaRequestComplete(Result result, int response_code, string[] headers, byte[] body)
{
GD.Print("[GdOpenTdb] Loading Questions success!");
var jsonStr = System.Text.Encoding.UTF8.GetString(body);
JSONParseResult dict = JSON.Parse(jsonStr);
if (dict.Error != 0)
{
GD.Print("Error: "+dict.Error+"/", dict.ErrorLine);
}
else
{
Dictionary parsed = dict.Result as Dictionary;
Godot.Collections.Array results = parsed["results"] as Godot.Collections.Array;
Array<Question> ParsedQuestions = new Array<Question>();
foreach (Dictionary r in results)
{
ParsedQuestions.Add(CreateFromJsonResult(r));
}
EmitSignal(nameof(QuestionsLoaded), ParsedQuestions);
}
}
// {category:Geography, correct_answer:False, difficulty:easy, incorrect_answers:[True], question:Greenland is covered with grass and Iceland covered with ice., type:boolean}
// {category:Entertainment: Video Games, correct_answer:The Hotshot, difficulty:medium, incorrect_answers:[The Discard, The Elephant, The Mohawk], question:In WarioWare: Smooth Moves, which one of these is NOT a Form?, type:multiple}
private Question CreateFromJsonResult(Dictionary res)
{
Question newQ = new Question();
newQ.category = res["category"] as String;
newQ.questionString = WebUtility.HtmlDecode(res["question"] as String);
newQ.typeString = res["type"] as String;
newQ.difficultyString = res["difficulty"] as String;
newQ.questionDifficulty = Question.GetDifficultyFromString(res["difficulty"] as String);
newQ.questionType = Question.GetTypeFromString(res["type"] as String);
Array wrongAnswers = res["incorrect_answers"] as Array;
Array<String> waS = new Array<string>();
foreach (var wa in wrongAnswers)
{
waS.Add(WebUtility.HtmlDecode(wa as String));
}
newQ.wrongAnswers = waS;
newQ.correctAnswer = WebUtility.HtmlDecode(res["correct_answer"] as String);
return newQ;
}
private void OnTriviaRequestError(Error e)
{
String ErrorMsg = "[GdOpenTdb] Error loading Questions from OpenTDB. Error: " + e;
GD.PrintErr(ErrorMsg);
EmitSignal(nameof(QuestionsLoadError), ErrorMsg);
}
}
<file_sep>using Godot;
using Godot.Collections;
using System;
using Array = Godot.Collections.Array;
public class Question : Godot.Object
{
public enum Type
{
TRUE_FALSE,
MULTIPLE_CHOICE
}
public enum Difficulty
{
EASY,
MEDIUM,
HARD
}
public String questionString { set; get; } = "Ques";
public String category { get; set; } = "Cat";
public bool isRightQuestion { set; get; } = false;
public string typeString { set; get; } = "boolean";
public string difficultyString { set; get; } = "easy";
public Type questionType { set; get; } = Type.TRUE_FALSE;
public Difficulty questionDifficulty { set; get; } = Difficulty.EASY;
public String correctAnswer { set; get; }
public Array<String> wrongAnswers { set; get; }
public static Type GetTypeFromString(String typeString)
{
if (typeString.Equals(OpenTdbHTTP.Type.TrueFalse))
{
return Type.TRUE_FALSE;
}
if (typeString.Equals(OpenTdbHTTP.Type.MultipleChoice))
{
return Type.MULTIPLE_CHOICE;
}
return Type.MULTIPLE_CHOICE;
}
public static Difficulty GetDifficultyFromString(String difficultyString)
{
if (difficultyString.Equals(OpenTdbHTTP.Difficulty.Easy))
{
return Difficulty.EASY;
}
if (difficultyString.Equals(OpenTdbHTTP.Difficulty.Medium))
{
return Difficulty.MEDIUM;
}
if (difficultyString.Equals(OpenTdbHTTP.Difficulty.Hard))
{
return Difficulty.HARD;
}
return Difficulty.HARD;
}
public override string ToString()
{
return "Question: [" + questionString + "] in Category [" + category + "]";
}
}
| 302fdf24cb769be6acae2ab55c9a8e02acd6f3ef | [
"Markdown",
"C#"
] | 4 | C# | dAmihl/GdOpenTdb | 7904ed45574a68a4b4325439eabfe4d91796dcb6 | 4b597d3f7ff8673d8e1ac4457f5a2fe94cb26a22 | |
refs/heads/master | <repo_name>sriharshakappala/incident-alerter<file_sep>/runner.rb
require './services/runner_service'
runner = RunnerService.instance()
runner.add_project('Project A')
runner.add_project('Project B')
runner.add_employee('Employee 1', '<EMAIL>', '111')
runner.add_employee('Employee 2', '<EMAIL>', '222')
runner.add_employee('Employee 3', '<EMAIL>', '333')
runner.assign_project('Project A', 'Employee 1')
runner.assign_project('Project A', 'Employee 2')
runner.assign_project('Project B', 'Employee 2')
runner.assign_project('Project B', 'Employee 3')
runner.set_level('Project A', 'Employee 1', 0)
runner.set_level('Project A', 'Employee 2', 1)
runner.set_level('Project A', 'Employee 3', 0)
runner.unset_level('Project A', 0)
runner.set_level('Project A', 'Employee 2', 3)
runner.set_level('Project A', 'Employee 3', 0)
binding.pry
pp runner.inspect
<file_sep>/models/employee.rb
require 'securerandom'
class Employee
attr_accessor :name, :email_id, :phone_number
def initialize name, email_id, phone_number
@employee_id = employee_id
@name = name
@email_id = email_id
@phone_number = phone_number
end
def employee_id
SecureRandom.hex(10)
end
end
<file_sep>/services/runner_service.rb
require 'singleton'
require 'pry'
require './models/project'
require './models/employee'
class RunnerService
include Singleton
attr_accessor :projects, :employees
def initialize
@projects = []
@employees = []
end
def add_project name
begin
project = Project.new(name)
projects << project
response = {"status": "success"}
rescue
end
pp response
end
def add_employee name, email_id, phone_number
begin
employee = Employee.new(name, email_id, phone_number)
employees << employee
response = {"status": "success"}
rescue
end
pp response
end
def assign_project project_name, employee_name
project = get_project(project_name)
employee = get_employee(employee_name)
if project && employee
project.employees << employee
response = {"status": "success"}
end
pp response
end
def set_level project_name, employee_name, level
project = get_project(project_name)
employee = get_employee(employee_name)
if project && employee
if project.escalation_matrix.values.map(&:name).include? employee_name
response = {"status": "Employee already in escalation matrix"}
elsif project.escalation_matrix[level].nil?
project.escalation_matrix[level] = employee
response = {"status": "success"}
else
response = {"status": "error", "message": "This level is already assigned for the project"}
end
end
pp response
end
def unset_level project_name, level
project = get_project(project_name)
project.escalation_matrix.delete(level)
response = {"status": "success"}
pp response
end
private
def get_project project_name
return projects.find { |p| p.name == project_name }
end
def get_employee employee_name
return employees.find { |e| e.name == employee_name }
end
end
<file_sep>/models/incident.rb
require 'securerandom'
class Incident
attr_accessor :name, :assigned_to
def initialize name, assigned_to
@name = name
@assigned_to = assigned_to
end
def notifiy
end
end
<file_sep>/models/project.rb
require 'securerandom'
class Project
attr_accessor :name, :employees, :escalation_matrix, :incident
def initialize name
@id = project_id
@name = name
@employees = []
@escalation_matrix = {}
@incidents = []
end
def project_id
SecureRandom.hex(10)
end
def create_incident name
incident = Incident.new(name)
incident.assigned_to = self.escalation_matrix
end
end
| 7ce3b993ad0b2e095a5adbebb9b0fd98c03657ed | [
"Ruby"
] | 5 | Ruby | sriharshakappala/incident-alerter | fc4044d863f9fa2402dafabec8a8ec87feebac36 | 24cf7102534057a4af7410e74d10c0b5aee8ede9 | |
refs/heads/master | <repo_name>octo888/test-df<file_sep>/lib/components/form-field/form-field.component.d.ts
import { OnInit } from '@angular/core';
import { FormGroup } from '@angular/forms';
export declare class FormFieldComponent implements OnInit {
field: any;
data: any;
form: FormGroup;
constructor();
ngOnInit(): void;
}
<file_sep>/lib/components/form-field/components/fields/array-field/array-field.component.d.ts
import { OnInit } from '@angular/core';
import { FormArray, FormGroup } from '@angular/forms';
import { Field } from '../../../../../models/field.model';
import { NiDynamicFormService } from '../../../../../services/ni-dynamic-form.service';
export declare class ArrayFieldComponent implements OnInit {
formService: NiDynamicFormService;
form: FormGroup;
field: Field;
data: any;
parentForm: FormGroup;
formArr: FormArray;
parentData: any;
arrayModel: any;
private initialGroupSchema;
constructor(formService: NiDynamicFormService);
ngOnInit(): void;
onAdd(): void;
onCopy(item: Field[], control: any): void;
onRemove(idx: number): void;
}
<file_sep>/lib/dynamic-form.component.d.ts
import { EventEmitter, OnChanges, OnInit, SimpleChanges } from '@angular/core';
import { FormGroup } from '@angular/forms';
import { Field } from './models/field.model';
import { NiDynamicFormService } from './services/ni-dynamic-form.service';
import { Subscription } from 'rxjs';
import { ActionService } from './services/action.service';
export declare class DynamicFormComponent implements OnInit, OnChanges {
private formService;
private actionService;
formModel: Array<Field>;
isInline: boolean;
data: any;
form: FormGroup;
fieldsets: Array<any>;
fieldChangedEvent: EventEmitter<{}>;
actionEvent: EventEmitter<{}>;
viewModel: Array<any>;
eventSub: Subscription;
private previousModelLen;
constructor(formService: NiDynamicFormService, actionService: ActionService);
ngOnInit(): void;
ngOnChanges(changes: SimpleChanges): void;
initSub(): void;
getField(key: any, formModel: any): any;
setViewData(): void;
mapFieldSets(fieldsets: any, formModel: any): any[];
}
<file_sep>/lib/components/form-field/components/bootstrap-fields/array-field/array-field.component.d.ts
import { OnInit } from '@angular/core';
import { Field } from '../../../../../models/field.model';
import { ArrayFieldComponent } from '../../fields/array-field/array-field.component';
import { NiDynamicFormService } from '../../../../../services/ni-dynamic-form.service';
export declare class ArrayBsFieldComponent extends ArrayFieldComponent implements OnInit {
constructor(formService: NiDynamicFormService);
onCopy(item: Field[], control: any): void;
onAdd(): void;
onRemove(idx: number): void;
}
<file_sep>/lib/models/field.model.d.ts
import { Validators } from '@angular/forms';
export declare class Field {
uid: string;
type: string;
key: string;
value: any;
label: string;
placeholder: string;
required: boolean;
disabled: boolean;
order: number;
hidden: boolean;
displayMode: string;
colSize: string;
cssClass: string;
renderType: string;
icon: string;
iconUrl: string;
dataUrl: string;
routeTemplate: string;
routeParameters: Array<any>;
routeQueryParameters: Array<any>;
selectedValueFieldName: any;
valueFieldName: string;
keyFieldName: string;
dataLookupKey: string;
dataDependentFields: Array<string>;
group: Array<any>;
internalFields: Array<Field>;
fieldsets: any[];
validations: Validators[];
validators: any;
mapOptions: Function;
loading: boolean;
constructor(options?: {
uid?: string;
value?: any;
key?: string;
label?: string;
placeholder?: string;
type?: string;
required?: boolean;
disabled?: boolean;
order?: number;
hidden?: boolean;
options?: Array<any>;
displayMode?: string;
colSize?: string;
cssClass?: string;
renderType?: string;
icon?: string;
iconUrl?: string;
dataUrl?: string;
routeTemplate?: string;
routeParameters?: Array<any>;
routeQueryParameters?: Array<any>;
selectedValueFieldName?: any;
valueFieldName?: string;
keyFieldName?: string;
dataLookupKey?: string;
dataDependentFields?: Array<string>;
group?: Array<any>;
validations?: Validators[];
validators?: any;
internalFields?: Array<Field>;
fieldsets?: any[];
mapOptions?: Function;
loading?: boolean;
});
private _options;
options: Array<any>;
generateGuid(): string;
}
<file_sep>/lib/components/form-field/components/material-fields/material-fields-declarations.d.ts
export declare const MAT_FIELD_COMPONENTS_DECLARATIONS: any[];
<file_sep>/lib/components/form-field/components/fields/switcher-list-field/switcher-list-field.component.d.ts
import { EventEmitter, OnInit } from '@angular/core';
export declare class SwitcherListFieldComponent implements OnInit {
field: any;
fieldChange: EventEmitter<{}>;
selectedAll: boolean;
selectedValues: Array<string>;
constructor();
ngOnInit(): void;
onSelectAll(): void;
onSelect(option: any): void;
isSelected(key: any): boolean;
setValue(value: any): void;
}
<file_sep>/lib/components/validation-message/validation-message.component.d.ts
import { OnInit } from '@angular/core';
import { FormGroup } from '@angular/forms';
import { Field } from '../../models/field.model';
import { LibService } from '../../services/lib-config.service';
export declare class ValidationMessageComponent implements OnInit {
private libService;
form: FormGroup;
field: Field;
isBootstrapLayout: boolean;
validatorsKeys: any;
constructor(libService: LibService);
ngOnInit(): void;
}
<file_sep>/lib/services/common-http.service.d.ts
import { Observable } from 'rxjs';
import { HttpClient } from '@angular/common/http';
export declare class CommonHttpService {
private http;
constructor(http: HttpClient);
getReq(params: {
url: any;
options?: any;
config?: any;
}): Observable<Object>;
}
<file_sep>/lib/components/form-field/components/fields/upload-file/checkbox-modal/checkbox-modal.component.d.ts
import { OnInit, EventEmitter } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { FormGroup } from '@angular/forms';
export declare class CheckboxModalComponent implements OnInit {
activeModal: NgbActiveModal;
selectedData: any;
modalData: any;
key: string;
onSubmit: EventEmitter<{}>;
editForm: FormGroup;
testForm: FormGroup;
data: any;
options: any;
constructor(activeModal: NgbActiveModal);
ngOnInit(): void;
onModalSubmit(): void;
closeModal(): void;
private initForm;
private toSameStructure;
setCaption(): "Add Tags" | "Add Visibilities";
}
<file_sep>/lib/components/form-field/components/material-fields/switcher-list-field/switcher-list-field.component.d.ts
import { OnInit } from '@angular/core';
import { SwitcherListFieldComponent } from '../../fields/switcher-list-field/switcher-list-field.component';
export declare class SwitcherListMatFieldComponent extends SwitcherListFieldComponent implements OnInit {
constructor();
ngOnInit(): void;
onSelectAll(): void;
onSelect(option: any): void;
isSelected(key: any): boolean;
setValue(value: any): void;
}
<file_sep>/lib/constants/validation.constants.d.ts
export declare const ValidationKeys: {
REQUIRED: string;
REGEXP: string;
};
<file_sep>/lib/components/form-field/components/fields/upload-file/upload-file.component.d.ts
import { EventEmitter, OnDestroy, OnInit } from '@angular/core';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { Subscription } from 'rxjs';
import { HttpClient } from '@angular/common/http';
export declare class UploadFileComponent implements OnInit, OnDestroy {
private modalService;
private commonHttpService;
field: any;
form: any;
apiUrl: any;
filesData: Array<any>;
hideLabel: boolean;
uploadConfig: any;
onChangeFiles: EventEmitter<any>;
uploadFiles: any;
currentFiles: Array<any>;
errorUploads: Array<any>;
errorMsg: string;
plainMode: boolean;
submitUrl: string;
uploadUrl: string;
imgPrw: any;
selectedFile: any;
modalRef: any;
modalData: any;
progressSub: Subscription;
submitResSub: Subscription;
htmlId: string;
constructor(modalService: NgbModal, commonHttpService: HttpClient);
ngOnInit(): void;
ngOnDestroy(): void;
initSettings(): void;
fileUpload(event: EventTarget): void;
upload(event: EventTarget): void;
clearUploadedFiles(): void;
previewImage(file: any): void;
openCheckboxModal(selectedData: any, id: any, key: any): void;
patchForm(files: any): void;
openModal(content: any, id: any): void;
closeModal(): void;
onDeleteFile(): void;
onSubmitDescription(): void;
private setTagsAndVisibilities;
private setSelectedFile;
getAllowedExts(): string;
private generateGuid;
}
<file_sep>/lib/components/form-field/components/material-fields/radio-field/radio-field.component.d.ts
import { OnInit } from '@angular/core';
import { RadioFieldComponent } from '../../fields/radio-field/radio-field.component';
export declare class RadioMatFieldComponent extends RadioFieldComponent implements OnInit {
constructor();
ngOnInit(): void;
}
<file_sep>/lib/components/form-field/components/fields/plain-field/plain-field.component.d.ts
import { OnInit, OnChanges } from '@angular/core';
import { FormGroup } from '@angular/forms';
export declare class PlainFieldComponent implements OnInit, OnChanges {
field: any;
data: any;
form: FormGroup;
fieldValue: any;
showValue: boolean;
constructor();
ngOnInit(): void;
ngOnChanges(): void;
getUrl(dataUrl: any, dataUrlTokens: any): any;
transformUrl(dataUrl: any, dataUrlTokens: any, data: any): any;
getDate(value: any): any;
}
<file_sep>/lib/components/form-field/components/fields/date-field/date-field.component.d.ts
import { EventEmitter, OnInit } from '@angular/core';
import { IMyDateModel, IMyDpOptions, MyDatePicker } from 'mydatepicker';
export declare class DateFieldComponent implements OnInit {
field: any;
fieldChange: EventEmitter<{}>;
mydp: MyDatePicker;
myDatePickerOptions: IMyDpOptions;
selectedDate: any;
constructor();
ngOnInit(): void;
onDateChanged(event: IMyDateModel): void;
onToggleSelector(event: any): void;
clearDate(): void;
}
<file_sep>/lib/models/event.model.d.ts
import { FormGroup } from '@angular/forms';
import { Field } from './field.model';
export declare class ValueChangedEventModel {
form: FormGroup;
field: Field;
value: any;
constructor(form: any, field: any, value: any);
}
<file_sep>/lib/components/form-field/components/material-fields/textarea-field/textarea-field.component.d.ts
import { OnInit } from '@angular/core';
import { TextareaFieldComponent } from '../../fields/textarea-field/textarea-field.component';
export declare class TextareaMatFieldComponent extends TextareaFieldComponent implements OnInit {
constructor();
}
<file_sep>/lib/components/form-field/components/material-fields/switcher-field/switcher-field.component.d.ts
import { OnInit } from '@angular/core';
import { SwitcherFieldComponent } from '../../fields/switcher-field/switcher-field.component';
export declare class SwitcherMatFieldComponent extends SwitcherFieldComponent implements OnInit {
color: string;
constructor();
ngOnInit(): void;
}
<file_sep>/lib/components/form-field/components/fields/input-field/input-field.component.d.ts
import { AfterViewInit, OnInit } from '@angular/core';
import { FormGroup } from '@angular/forms';
export declare class InputFieldComponent implements OnInit, AfterViewInit {
field: any;
form: FormGroup;
defaultPlaceholder: string;
constructor();
ngOnInit(): void;
ngAfterViewInit(): void;
setPlaceholder(): any;
}
<file_sep>/lib/components/form-field/components/material-fields/checkbox-field/checkbox-field.component.d.ts
import { OnInit } from '@angular/core';
import { CheckboxFieldComponent } from '../../fields/checkbox-field/checkbox-field.component';
export declare class CheckboxMatFieldComponent extends CheckboxFieldComponent implements OnInit {
constructor();
ngOnInit(): void;
}
<file_sep>/lib/dynamic-form.module.d.ts
import { InjectionToken, ModuleWithProviders } from '@angular/core';
export declare const LibConfigService: InjectionToken<FormConfig>;
export declare const CommonHttpServiceToken: InjectionToken<any>;
export interface FormConfig {
uiMode: string;
component?: any;
}
export declare class DynamicFormModule {
static forRoot(config?: FormConfig): ModuleWithProviders;
}
<file_sep>/lib/constants/uploader-config.constant.d.ts
export declare const UploaderConfig: {
allowDownload: boolean;
allowedExtensions: any;
editDescription: boolean;
editTags: boolean;
editVisibility: boolean;
preview: boolean;
singleFile: boolean;
tagsLookup: any[];
uploadUrl: string;
visibilityLookup: any[];
};
<file_sep>/lib/components/form-field/components/material-fields/date-field/date-field.component.d.ts
import { OnInit } from '@angular/core';
import { IMyDateModel } from 'mydatepicker';
import { DateFieldComponent } from '../../fields/date-field/date-field.component';
export declare class DateMatFieldComponent extends DateFieldComponent implements OnInit {
constructor();
ngOnInit(): void;
onDateChanged(event: IMyDateModel): void;
onToggleSelector(event: any): void;
clearDate(): void;
toggle(p: any): void;
}
<file_sep>/lib/components/field-label/field-label.component.d.ts
import { OnInit } from '@angular/core';
import { Field } from '../../models/field.model';
import { LibService } from '../../services/lib-config.service';
export declare class FieldLabelComponent implements OnInit {
private libService;
field: Field;
class: Field;
isBootstrapLayout: boolean;
constructor(libService: LibService);
ngOnInit(): void;
}
<file_sep>/lib/services/ni-dynamic-form.service.d.ts
import { AbstractControl, FormArray, FormControl, FormGroup } from '@angular/forms';
import { Field } from '../models/field.model';
export declare class NiDynamicFormService {
constructor();
buildForm(fields: any): FormGroup;
mapFieldSets(fieldsets: any, formModel: any): any;
createControl(field: any): any;
getControl(field: any): FormControl;
setFieldValue(model: any, data: any): any;
isNotEmptyValue(value: any): boolean;
setControlType(field: any, value: any): any;
setValidation(field: any): {
validations: any[];
required: any;
validators: any;
};
addGroupToFormArray(formArr: any, group: Field[]): any;
copyFormControl(control: AbstractControl): FormGroup | FormArray | FormControl;
isEmptyObject(obj: any): boolean;
generateGuid(): string;
arrayToObject(array: any[], keyField: string): void;
}
<file_sep>/lib/components/form-field/components/bootstrap-fields/bs-fields-declarations.d.ts
export declare const BS_FIELD_COMPONENTS_DECLARATIONS: any[];
<file_sep>/lib/components/form-field/components/material-fields/input-field/input-field.component.d.ts
import { AfterViewInit, OnInit } from '@angular/core';
import { InputFieldComponent } from '../../fields/input-field/input-field.component';
export declare class InputMatFieldComponent extends InputFieldComponent implements OnInit, AfterViewInit {
constructor();
ngOnInit(): void;
ngAfterViewInit(): void;
setPlaceholder(): any;
}
<file_sep>/lib/components/form-field/components/fields/select-field/select-field.component.d.ts
import { ChangeDetectorRef, OnInit } from '@angular/core';
import { LibService } from '../../../../../services/lib-config.service';
import { NiDynamicFormService } from '../../../../../services/ni-dynamic-form.service';
import { ActionService } from '../../../../../services/action.service';
export declare class SelectFieldComponent implements OnInit {
private cdr;
private libService;
private actionService;
private formService;
field: any;
form: any;
page: number;
perPage: number;
timeout: any;
isBootstrapLayout: boolean;
isMultiple: boolean;
loading: boolean;
constructor(cdr: ChangeDetectorRef, libService: LibService, actionService: ActionService, formService: NiDynamicFormService);
ngOnInit(): void;
emitAction(action: any): void;
onOpen(): void;
onScrollToEnd(event: any): void;
onSearch(event: any): void;
}
<file_sep>/lib/constants/lib-config.constant.d.ts
export declare const UI_MODE: {
MATERIAL: string;
BOOTSTRAP: string;
};
<file_sep>/lib/services/action.service.d.ts
import { Subject } from 'rxjs';
export declare class ActionService {
emitActionEventSubject: Subject<{}>;
constructor();
}
<file_sep>/lib/pipes/pipes.d.ts
import { DomSanitizer, SafeHtml } from '@angular/platform-browser';
import { PipeTransform } from '@angular/core';
export declare class SafeUrlPipe implements PipeTransform {
private sanitizer;
constructor(sanitizer: DomSanitizer);
transform(url: any): import("@angular/platform-browser").SafeResourceUrl;
}
export declare class NoSanitizePipe implements PipeTransform {
private domSanitizer;
constructor(domSanitizer: DomSanitizer);
transform(html: string): SafeHtml;
}
<file_sep>/lib/components/form-field/dynamic-field.directive.d.ts
import { ComponentFactoryResolver, OnInit, ViewContainerRef } from '@angular/core';
import { FormGroup } from '@angular/forms';
import { SelectFieldComponent } from './components/fields/select-field/select-field.component';
import { CheckboxFieldComponent } from './components/fields/checkbox-field/checkbox-field.component';
import { ArrayBsFieldComponent } from './components/bootstrap-fields/array-field/array-field.component';
import { CheckboxBsFieldComponent } from './components/bootstrap-fields/checkbox-field/checkbox-field.component';
import { UploadFileComponent } from './components/fields/upload-file/upload-file.component';
import { LibService } from '../../services/lib-config.service';
export declare const COMPONENTS: {
[x: string]: {
[x: string]: typeof SelectFieldComponent | typeof UploadFileComponent | typeof ArrayBsFieldComponent | typeof CheckboxBsFieldComponent;
};
};
export declare class DynamicFieldDirective implements OnInit {
private resolver;
private container;
private libServ;
field: any;
data: any;
form: FormGroup;
private component;
constructor(resolver: ComponentFactoryResolver, container: ViewContainerRef, libServ: LibService);
ngOnInit(): void;
getComponentByType(type: any): typeof CheckboxFieldComponent | typeof SelectFieldComponent | typeof ArrayBsFieldComponent;
}
<file_sep>/lib/services/lib-config.service.d.ts
import { FormConfig } from '../dynamic-form.module';
import { Observable } from 'rxjs';
export declare class LibService {
private config;
constructor(config: FormConfig);
getConfig(): Observable<FormConfig>;
}
<file_sep>/lib/dynamic-form.service.d.ts
export declare class DynamicFormService {
constructor();
}
<file_sep>/lib/components/form-field/components/fields/fields-declarations.d.ts
export declare const FIELD_COMPONENTS_DECLARATIONS: any[];
<file_sep>/lib/constants/field-types.constant.d.ts
export declare const FieldTypes: {
INPUT: string;
NUMBER: string;
PASSWORD: string;
TEXTAREA: string;
RADIO: string;
SELECT: string;
MULTI_SELECT: string;
CHECKBOX: string;
SWITCHER: string;
INLINE_SWITCHER: string;
DATE: string;
PLAIN: string;
HTML: string;
INTERNAL: string;
FORM_ARRAY: string;
FORM_GROUP: string;
FILE_UPLOADER: string;
HIDDEN_INPUT: string;
};
export declare const OptionsFieldTypes: string[];
export declare const InternalFieldTypes: string[];
<file_sep>/public_api.d.ts
export * from './lib/dynamic-form.service';
export * from './lib/dynamic-form.component';
export * from './lib/dynamic-form.module';
export * from './lib/services/ni-dynamic-form.service';
export * from './lib/services/common-http.service';
export * from './lib/models/field.model';
export * from './lib/constants/lib-config.constant';
| 8f9b36a0013094ed752e86c96b892d30b0ee993e | [
"TypeScript"
] | 38 | TypeScript | octo888/test-df | a6b8f5144fe8357d97a995afca1634cc7c6ea4d6 | c0695004176e13d3175963496355c4288f587d78 | |
refs/heads/master | <repo_name>Maryville-SP2018-ISYS-120-1W/review-expressions-operators-and-assignment-aminnerath<file_sep>/ExpressionsAndAssignmentReview.playground/Contents.swift
/* Overview
Your assignment is to create your own programming problem using variables, operators, and assignment. Think about the Operators playground you completed. This assignment is like you are producing a page for that assignment. You will work on your playground page in a playground cloned from GitHub. It should include *at least* the following:
- three arithmetic operators
- one compound operator
- three variables
- one type conversion
It's not very exciting to have a bunch of random numbers in code. When writing this problem you should be creating a story around it. This need not be tremendously complex, just a simple story that motivates the values being used and gives them a context.
For example 40 / 5 isn't interesting, but writing a tree planting app that given a species of tree and how much space you have will estimate how many trees you can plant is moreso. (e.g., I have 40 sq ft and want to plant walnut trees. They need 5 sq ft of growing space around the trunk, how many can I fit?).
You should write your story bits in comments (e.g., between /* and */, or lines starting with // ) around where the code should go.
*/
//Jeff and Andrea have seperate accounts and they keep a cash account at home. One account has 100 dollars in it and another account has 200. At home they keep 500 dollats. Create a budget of how much they have to pay bills with.
var bank1:Double=100
var bank2:Double=200
var cash:Double=500
let total=bank1+bank2+cash
//The best advice is to pay bills first then plan trip. Print this advice.
print("Pay bills first,then plan trip")
//The bills to be paid are a credit card and a student loan. The credit card payment is 50 and the student loan is 100.
// print the total bill amount
var creditPayment=50
var studentLoan=100
var bills=creditPayment+studentLoan
print(bills)
//Since you should always pay more than minium due you sometimes need a reminder. Print why and a reminder
print("Pay more down to avoid interest")
var bills=(creditPayment+studentLoan)*2
print(bills)
//what does this amount mean? print this answer
Print("This is how much to budget out to pay everything that is due!")
Print(total
// Be sarcastic and print a statement
Print("Look at all the cash")
Var paybill:Double=total-bills
Print(paybill)
//How do you think Jeff and Andrea feel after paying bills? Print your answer
Print("Ouch!That sure went fast")
//Andrea is signed up for cash back rewards. However only one account has this option. Take the lowest account multiple by 2 and add 50.
Print("Cash back rewards came. Add to bank1")
bank1=(100*2)+50
// Sometimes things come up unexpected in a budget. Today the kids needed lunch. The lunch is 20. Adjust accordingly and print reason
Print("Kids need lunch.")
var lunch=20
bank1=(100*2)+50 -lunch
print (paybill)
// What do you think was the largest influence in the budget? Print your answer
print("It pays to do cash back rewards")
| d32bb129ef9409ba60055022ce99a0ecb37a9a46 | [
"Swift"
] | 1 | Swift | Maryville-SP2018-ISYS-120-1W/review-expressions-operators-and-assignment-aminnerath | 1177a5b1b7f0e5e1be2db5f1e811d97ad9d6f5f4 | 9b3662c1101f2766d69611f1fed2be7aa7a3407c | |
refs/heads/master | <file_sep>class InvalidLookupCombination(Exception):
def __init__(self, lookup, lookups, value, *args, **kwargs):
message = (
"Lookup: \"{}\" has non-string return value, must be only lookup "
"present (not {}) in \"{}\""
).format(lookup.raw, len(lookups), value)
super(InvalidLookupCombination, self).__init__(message,
*args,
**kwargs)
class UnknownLookupType(Exception):
def __init__(self, lookup, *args, **kwargs):
message = "Unknown lookup type: \"{}\"".format(lookup.type)
super(UnknownLookupType, self).__init__(message, *args, **kwargs)
class UnresolvedVariables(Exception):
def __init__(self, blueprint, *args, **kwargs):
message = "Blueprint: \"%s\" hasn't resolved it's variables" % (
blueprint.name)
super(UnresolvedVariables, self).__init__(message, *args, **kwargs)
class UnresolvedVariable(Exception):
def __init__(self, blueprint, variable, *args, **kwargs):
message = (
"Variable \"%s\" in blueprint \"%s\" hasn't been resolved"
) % (variable.name, blueprint.name)
super(UnresolvedVariable, self).__init__(message, *args, **kwargs)
class MissingVariable(Exception):
def __init__(self, blueprint, variable_name, *args, **kwargs):
message = "Variable \"%s\" in blueprint \"%s\" is missing" % (
variable_name, blueprint.name)
super(MissingVariable, self).__init__(message, *args, **kwargs)
class StackDoesNotExist(Exception):
def __init__(self, stack_name, *args, **kwargs):
message = "Stack: \"%s\" does not exist in outputs" % (stack_name,)
super(StackDoesNotExist, self).__init__(message, *args, **kwargs)
class MissingParameterException(Exception):
def __init__(self, parameters, *args, **kwargs):
self.parameters = parameters
message = "Missing required cloudformation parameters: %s" % (
", ".join(parameters),
)
super(MissingParameterException, self).__init__(message, *args,
**kwargs)
class MissingLocalParameterException(Exception):
def __init__(self, parameter, *args, **kwargs):
self.parameter = parameter
message = "Missing required local parameter: %s" % parameter
super(MissingLocalParameterException, self).__init__(message, *args,
**kwargs)
class OutputDoesNotExist(Exception):
def __init__(self, stack_name, output, *args, **kwargs):
self.stack_name = stack_name
self.output = output
message = "Output %s does not exist on stack %s" % (output,
stack_name)
super(OutputDoesNotExist, self).__init__(message, *args, **kwargs)
class MissingEnvironment(Exception):
def __init__(self, key, *args, **kwargs):
self.key = key
message = "Environment missing key %s." % (key,)
super(MissingEnvironment, self).__init__(message, *args, **kwargs)
class ImproperlyConfigured(Exception):
def __init__(self, cls, error, *args, **kwargs):
message = "Class \"%s\" is improperly configured: %s" % (
cls,
error,
)
super(ImproperlyConfigured, self).__init__(message, *args, **kwargs)
class StackDidNotChange(Exception):
"""Exception raised when there are no changes to be made by the
provider.
"""
class CancelExecution(Exception):
"""Exception raised when we want to cancel executing the plan."""
<file_sep>import unittest
from mock import MagicMock
from troposphere import (
Base64,
Ref,
)
from stacker.blueprints.base import (
Blueprint,
CFNParameter,
build_parameter,
get_local_parameters,
)
from stacker.blueprints.variables.types import (
CFNString,
EC2AvailabilityZoneNameList,
)
from stacker.exceptions import (
InvalidLookupCombination,
MissingLocalParameterException,
MissingVariable,
UnresolvedVariables,
)
from stacker.variables import Variable
from ..factories import mock_lookup
class TestLocalParameters(unittest.TestCase):
def test_default_parameter(self):
parameter_def = {"Param1": {"default": 0}}
parameters = {}
local = get_local_parameters(parameter_def, parameters)
self.assertEquals(local["Param1"], 0)
def test_missing_required(self):
parameter_def = {"Param1": {"default": 0}, "Param2": {}}
parameters = {}
with self.assertRaises(MissingLocalParameterException) as cm:
get_local_parameters(parameter_def, parameters)
self.assertEquals("Param2", cm.exception.parameter)
def test_supplied_parameter(self):
parameter_def = {"Param1": {"default": 0}, "Param2": {}}
parameters = {"Param1": 1, "Param2": 2}
local = get_local_parameters(parameter_def, parameters)
self.assertEquals(parameters, local)
class TestBuildParameter(unittest.TestCase):
def test_base_parameter(self):
p = build_parameter("BasicParam", {"type": "String"})
p.validate()
self.assertEquals(p.Type, "String")
class TestVariables(unittest.TestCase):
def test_defined_variables(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"default": "default", "type": str},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
self.assertEqual(
blueprint.defined_variables(),
blueprint.VARIABLES,
)
def test_defined_variables_subclass(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"default": 0, "type": int},
"Param2": {"default": 0, "type": int},
}
class TestBlueprintSublcass(TestBlueprint):
def defined_variables(self):
variables = super(TestBlueprintSublcass,
self).defined_variables()
variables["Param2"]["default"] = 1
variables["Param3"] = {"default": 1, "type": int}
return variables
blueprint = TestBlueprintSublcass(name="test", context=MagicMock())
variables = blueprint.defined_variables()
self.assertEqual(len(variables.keys()), 3)
self.assertEqual(variables["Param2"]["default"], 1)
def test_get_variables_unresolved_variables(self):
class TestBlueprint(Blueprint):
pass
blueprint = TestBlueprint(name="test", context=MagicMock())
with self.assertRaises(UnresolvedVariables):
blueprint.get_variables()
def test_resolve_variables(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"default": 0, "type": int},
"Param2": {"type": str},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [
Variable("Param1", 1),
Variable("Param2", "${other-stack::Output}"),
Variable("Param3", 3),
]
resolved_lookups = {
mock_lookup("other-stack::Output"): "Test Output",
}
for var in variables:
var.replace(resolved_lookups)
blueprint.resolve_variables(variables)
self.assertEqual(blueprint.resolved_variables["Param1"], 1)
self.assertEqual(blueprint.resolved_variables["Param2"], "Test Output")
self.assertIsNone(blueprint.resolved_variables.get("Param3"))
def test_resolve_variables_lookup_returns_non_string(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": list},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "${custom non-string-return-val}")]
lookup = mock_lookup("non-string-return-val", "custom",
"custom non-string-return-val")
resolved_lookups = {
lookup: ["something"],
}
for var in variables:
var.replace(resolved_lookups)
blueprint.resolve_variables(variables)
self.assertEqual(blueprint.resolved_variables["Param1"], ["something"])
def test_resolve_variables_lookup_returns_troposphere_obj(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": Base64},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "${custom non-string-return-val}")]
lookup = mock_lookup("non-string-return-val", "custom",
"custom non-string-return-val")
resolved_lookups = {
lookup: Base64("test"),
}
for var in variables:
var.replace(resolved_lookups)
blueprint.resolve_variables(variables)
self.assertEqual(blueprint.resolved_variables["Param1"].data,
Base64("test").data)
def test_resolve_variables_lookup_returns_non_string_invalid_combo(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": list},
}
variables = [
Variable(
"Param1",
"${custom non-string-return-val},${some-stack::Output}",
)
]
lookup = mock_lookup("non-string-return-val", "custom",
"custom non-string-return-val")
resolved_lookups = {
lookup: ["something"],
}
with self.assertRaises(InvalidLookupCombination):
for var in variables:
var.replace(resolved_lookups)
def test_get_variables(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int},
"Param2": {"type": str},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", 1), Variable("Param2", "Test Output")]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertEqual(variables["Param1"], 1)
self.assertEqual(variables["Param2"], "Test Output")
def test_resolve_variables_missing_variable(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int},
"Param2": {"type": str},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", 1)]
with self.assertRaises(MissingVariable):
blueprint.resolve_variables(variables)
def test_resolve_variables_incorrect_type(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "Something")]
with self.assertRaises(ValueError):
blueprint.resolve_variables(variables)
def test_get_variables_default_value(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int, "default": 1},
"Param2": {"type": str},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param2", "Test Output")]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertEqual(variables["Param1"], 1)
self.assertEqual(variables["Param2"], "Test Output")
def test_resolve_variables_convert_type(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "1")]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertTrue(isinstance(variables["Param1"], int))
def test_resolve_variables_cfn_type(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": CFNString},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "Value")]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertTrue(isinstance(variables["Param1"], CFNParameter))
def test_resolve_variables_cfn_type_list(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": EC2AvailabilityZoneNameList},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", ["us-east-1", "us-west-2"])]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertTrue(isinstance(variables["Param1"], CFNParameter))
self.assertEqual(variables["Param1"].value, ["us-east-1", "us-west-2"])
self.assertEqual(variables["Param1"].ref.data, Ref("Param1").data)
parameters = blueprint.get_cfn_parameters()
self.assertEqual(parameters["Param1"], ["us-east-1", "us-west-2"])
def test_resolve_variables_cfn_type_list_invalid_value(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": EC2AvailabilityZoneNameList},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", {"main": "us-east-1"})]
with self.assertRaises(ValueError):
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
def test_get_parameters_cfn_type_list(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": EC2AvailabilityZoneNameList},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
parameters = blueprint._get_parameters()
self.assertTrue("Param1" in parameters)
parameter = parameters["Param1"]
self.assertEqual(parameter["type"],
"List<AWS::EC2::AvailabilityZone::Name>")
def test_get_parameters_cfn_type(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": CFNString},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
parameters = blueprint._get_parameters()
self.assertTrue("Param1" in parameters)
parameter = parameters["Param1"]
self.assertEqual(parameter["type"], "String")
def test_required_parameters_cfn_type(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": CFNString},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
blueprint.setup_parameters()
self.assertEqual(blueprint.required_parameters[0][0], "Param1")
def test_get_cfn_parameters(self):
class TestBlueprint(Blueprint):
VARIABLES = {
"Param1": {"type": int},
"Param2": {"type": CFNString},
}
blueprint = TestBlueprint(name="test", context=MagicMock())
variables = [Variable("Param1", "1"), Variable("Param2", "Value")]
blueprint.resolve_variables(variables)
variables = blueprint.get_variables()
self.assertEqual(len(variables), 2)
parameters = blueprint.get_cfn_parameters()
self.assertEqual(len(parameters.keys()), 1)
self.assertEqual(parameters["Param2"], "Value")
| 46bb235fbf35db2c2e089c52e8068acdd35c6a1a | [
"Python"
] | 2 | Python | hrenod/stacker | 8dd778a435de543c926c7df0f152613d2b5188d8 | 8a0db54af0f1fbbc0d5cedd4bf32134632a514ad | |
refs/heads/master | <file_sep># cmix
## my cmus configuration file
This only works with cmus with patches applied
Grab the configurations...
git clone https://github.com/tonyfischetti/cmix.git ~/cmus
Make sure your music is in a directory called `music` in your
home directory (`$HOME/music`) because that's what the playlists
assume
Oh, and don't forget to
sudo apt build-dep cmus
before attempting to compile cmus
Use the patch(es) in ./goodies/ against cmus
(tested against commit 600fa4bbde)
And then compile the patched version of cmusfm in goodies
<file_sep>#!/bin/bash
rm -fr cache
rm -f autosave
rm -f search-history
rm -f command-history
rm -f lib.pl
| 8e04eff715d6c9f963bed98993cbdfad2312ba96 | [
"Markdown",
"Shell"
] | 2 | Markdown | tonyfischetti/cmix | 37a8b05f38826b81c123c95a064c31bb8305bbc4 | 50d95a337493c7a03463c8f3486b34f746191cef | |
refs/heads/master | <file_sep>const canvas = document.getElementById('tela');
const ctx = canvas.getContext('2d');
var dt = 0;
var t0 = 0;
var x = 100;
/*Vector cotains rgb colors*/
var colors = [
"rgba(255, 0, 0, 0.5)",
"rgba(0, 255, 0, 0.5)",
"rgba(0, 0, 255, 0.5)",
"rgba(0, 255, 255, 0.5)",
"rgba(255, 255, 0, 0.5)"
];
/*Random number in range*/
function randomNumber(min,max){
return Math.floor(Math.random() * (max-min) + min );
}
/* Create N aps with random attributes */
function createAps(quant){
var aps = [];
for(var i=0;i<quant;i++){
var ap = {
x: randomNumber(50,450),
y: randomNumber(50,450),
raio: randomNumber(50,100),
color: 0,
xPlus: -10,
yPlus: -30,
xMinus: -10,
yMinus: 15,
xText: -8,
yText: 14
};
for(var j=0;j<aps.length;j++){
if( dist(ap.x,ap.y,aps[j].x,aps[j].y) < 80 ){
ap.x = randomNumber(50,450);
ap.y = randomNumber(50,450);
j=-1;
}
}
aps.push(ap);
}
return aps;
}
/* Draw all aps in vector */
function drawAps(aps){
aps.forEach(function(ap){
//Draw circle
ctx.beginPath();
ctx.arc(ap.x, ap.y, ap.raio, 0, 2 * Math.PI);
ctx.closePath();
ctx.fillStyle = colors[ap.color];
ctx.fill();
// Draw places with click work
ctx.fillStyle = 'rgba(225,225,225,0)';
ctx.fillRect(ap.x+ap.xPlus, ap.y+ap.yPlus , 20, 20);
ctx.fillRect(ap.x+ap.xMinus, ap.y+ap.yMinus , 20, 20);
// Draw Plus and Minus
ctx.fillStyle = "black";
ctx.fillRect(ap.x+ap.xPlus, ap.y+ap.yPlus+5 , 20, 10);
ctx.fillRect(ap.x+ap.xPlus+5, ap.y+ap.yPlus , 10, 20);
ctx.fillRect(ap.x+ap.xMinus, ap.y+ap.yMinus+5 , 20, 10);
// Draw number
ctx.font = "25px Arial";
ctx.fillText(ap.color+1, ap.x+ap.xText, ap.y+ap.yText);
});
}
function dist(X1,Y1,X2,Y2){
var d = Math.sqrt( Math.pow(X1 - X2,2) + Math.pow(Y1 - Y2,2) );
return d;
}
function insideRect(x,y,clientX,clientY){
if(clientX > x+20 || clientX < x)
return false;
if(clientY > y+20 || clientY < y)
return false;
return true;
}
/*Main Code*/
requestAnimationFrame(passo);
var aps = createAps(3);
function passo(t){
dt = (t - t0)/1000;
ctx.clearRect(0,0,canvas.width,canvas.height);
drawAps(aps);
t0 = t;
requestAnimationFrame(passo);
}
canvas.addEventListener("mousedown",function(e){
aps.forEach(function(ap){
if( insideRect(ap.x+ap.xPlus,ap.y+ap.yPlus,e.clientX,e.clientY) ){
if(ap.color == colors.length-1){
ap.color = 0;
}
else ap.color++;
}
else if( insideRect(ap.x+ap.xMinus,ap.y+ap.yMinus,e.clientX,e.clientY) ){
if(ap.color == 0){
ap.color = colors.length-1;
}
else ap.color--;
}
});
}
) | cb06c8680cd371f2c89804bdd14a5cb7c123b22d | [
"JavaScript"
] | 1 | JavaScript | Rodrigo947/ggjam2020 | 6ceb5454028fd045868ffe7247d420d4dac85394 | 8c0255b1e0d3e8c1749eeaed1232f60065b7c6e4 | |
refs/heads/master | <file_sep>package com.example.myapplication
import android.content.Intent
import android.os.Bundle
import android.widget.Button
import android.widget.EditText
import androidx.appcompat.app.AppCompatActivity
class MainActivity : AppCompatActivity() {
// кнопка перехода на следующую активити
var nextButton: Button? = null
// Поле ввода данных в главной активити
var editText: EditText? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
// Инициализация переменной кнопки перехода
nextButton = findViewById(R.id.main_layout_next_button)
// Добавление обработчика нажатия кнопки
nextButton?.setOnClickListener {
moveToNextScreen()
}
// Инициализация переменной поля ввода
editText = findViewById(R.id.main_layout_edit_text)
}
// Получение текста из поля ввода данных
fun getStringFromEditText(): String? {
return editText?.text?.toString()
}
// Переход на следующий экран
fun moveToNextScreen() {
// Создание интента перехода
val intent = Intent(this, SecondActivity::class.java)
// Добавление extra данных в интент
intent.putExtra(TRANSMITTED_STRING, getStringFromEditText())
// Старт второй активити
startActivity(intent)
}
companion object {
const val TRANSMITTED_STRING = "transmittedString"
}
}<file_sep>package com.example.myapplication
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.widget.Button
import android.widget.EditText
class SecondActivity : AppCompatActivity() {
var backButton: Button? = null
var editText: EditText? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_second)
backButton = findViewById(R.id.second_layout_back_button)
backButton?.setOnClickListener {
onBackPressed()
}
editText = findViewById(R.id.second_layout_edit_text)
editText?.setText(getTransmittedDataFromIntent() ?: DEFAULT_STRING_FOR_EDIT_TEXT)
}
private fun getTransmittedDataFromIntent(): String? {
return intent.getStringExtra(TRANSMITTED_STRING)
}
companion object {
const val TRANSMITTED_STRING = "transmittedString"
const val DEFAULT_STRING_FOR_EDIT_TEXT = "default text"
}
} | ec698e7b1ab8f8ffe50420ac6da8fab14a994c61 | [
"Kotlin"
] | 2 | Kotlin | frANTicBee666/android_architecture_lecture_2_ws_3 | 588f70cb5d16d9c1ee3e55cde94dce005d26709d | 9d0b91b8120eebc16e39ce79f37693f85aedfa78 | |
refs/heads/master | <repo_name>hreiten/coffee-chooser<file_sep>/requirements.txt
python-vlc==3.0.102
<file_sep>/README.md
# CoffeeChooser
The CoffeeChooser helps you choose who's going to make the next pot of coffee.
Works with python 2.7+, but python 3.x.x is recommended.
Install and run instructions given for MacOS and terminal.
### Install:
```
git clone https://github.com/reitenhalvor/coffee-chooser
cd coffee-chooser # cd to right directory
python -m venv ./venv # make a virtual environment
source venv/bin/activate # activate the virtual environment
sudo pip install -r requirements.txt # install required packages
```
### Instructions:
Run the following in terminal to run the script.
```
source venv/bin/activate # source your venv if not already done
python coffee-chooser.py # run the script
```
Follow the instructions that's printed to screen.
<file_sep>/coffee-chooser.py
# -*- coding: utf-8 -*-
from random import randint, seed
from datetime import datetime
from time import sleep
from vlc import MediaPlayer
import sys, os, platform
# set seed to ensure complete randomness in the drawing process
seed(datetime.now())
# set global variables
macOS = platform.system() == 'Darwin'
work_dir = os.path.dirname(os.path.realpath(__file__))
resources_folder = work_dir + '/resources/'
num_participants = -1
# print the rules of the Coffee Chooser
print("\n\n****** The Amazing Coffee Chooser ******")
print("================== RULES ==================")
print("1. The coffee must be served within 30 minutes.")
print("2. If the script is used only to collect already made coffee, "
"the coffee maker can be excluded from the selection pool of potential collectors.")
print("3. No whining from Karsten.")
print("4. If it's made insufficient quantities of coffee, then more coffee must be made.")
print("5. Whoever participating submits to following these rules to the best of their ability.")
print("Violation of the rules may result in immediate evictions from the bro circle.")
print("============================================\n")
def make_vlc_player(songname):
''' Returns a VLC Media Player object '''
player = MediaPlayer(resources_folder + songname)
return player
def validate_participant(n_participants, new_participant, participants):
''' Check that a new participant is valid '''
if new_participant in participants:
return False
# check if the number (participant) meets constraints
if new_participant > 4 * n_participants or new_participant < 1:
return False
return True
def add_participants():
''' Method to iteratively add participants '''
participants = []
counter = 1
# user enters how many participants to play
global num_participants
while num_participants <= 0:
num = input("Enter the number of participants: ")
try:
num_participants = int(num)
if num_participants <= 0:
raise Exception()
except:
print("Invalid input, try again...")
print("Valid participant numbers are from 1 to %i\n" %
(num_participants * 4))
# user enters the individual participants
while True:
# check if number of participants is met
if counter == num_participants + 1:
break
num = input("Add participant: ")
try:
num = int(num)
if validate_participant(num_participants, num, participants):
participants.append(num)
print("%i added as Participant no. %i." % (num, counter))
counter += 1
else:
raise Exception()
except:
print("Invalid input, try again...")
print("All participants are now added!")
return participants
def speak(content):
''' Function to say stuff in the terminal '''
# the 'say' command only works on mac os
if macOS:
# Fiona (SC); Nora (NOR); Al (US); Anna (DE); Danielter (DK)
person = 'Daniel'
os.system("say -v " + person + " '" + str(content) + "'")
else:
pass
def draw_numbers(participants):
''' Iteratively draws numbers until a winner has been picked '''
while True:
rand_num = randint(1, 4 * len(participants))
if rand_num in participants:
print("We have a winner!")
speak("We have a winner!")
sleep(1)
speak("And the winner is...")
sleep(2)
speak("Number %i!" % rand_num)
print("Number %i is the lucky winner! Go get some fu***** coffee, participant #%i!!! ☕️☕️️️" % (
rand_num, (1 + participants.index(rand_num))))
break
else:
print("Drawn number: %i" % rand_num)
speak(rand_num)
sleep(5)
def run():
''' The main function that runs the entire program '''
# create media instances
player_theme = make_vlc_player('millionaire_theme.mp3')
player_win = make_vlc_player('millionaire_win.mp3')
# start theme music
player_theme.play()
# add participants to the game
participants = add_participants() # user adds participants
print("\n*****The Coffee Chooser will now begin*****")
print("Chosen participants are: %s" %
", ".join([str(x) for x in participants]))
for time_left in reversed(range(0, 10)):
sys.stdout.write(
'Beginning to draw numbers in %i seconds\r' % time_left)
sys.stdout.flush()
if time_left == 0:
print('\nLet the games begin!\n')
break
sleep(1)
# start the game of drawing numbers
draw_numbers(participants)
player_win.play() # start winner music
player_theme.stop() # stop theme music
sleep(25) # let the song play for 25 seconds
player_win.stop() # then stop
if __name__ == '__main__':
run()
| ee9a890acf6fb72ed2b23b1e5d339f7a9f1ecbcb | [
"Markdown",
"Python",
"Text"
] | 3 | Text | hreiten/coffee-chooser | e21b53c1fd9c95b6c880a4aa8c7cdf615478e058 | 3c3322c44bb48c6584af1661dc22d4b11ed2e06b | |
refs/heads/master | <repo_name>TFDuesing/packer-templates<file_sep>/README.markdown
packer-templates
================
This is a collection of Packer templates, kickstart files, and shell scripts that can be used to build Vagrant boxes for VirtualBox and VMware providers, starting from canonical Linux distribution ISOs.
Prerequisites
-------------
* [Packer](http://www.packer.io/)
* [VirtualBox](https://www.virtualbox.org) and/or [VMWare Fusion](http://www.vmware.com/products/fusion)
* [Vagrant](http://www.vagrantup.com)
Usage
-----
$ git clone https://github.com/TFDuesing/packer-templates.git
$ cd packer-templates/<desired template directory>
$ packer build template.json
Once the build is complete, Packer will output one or more Vagrant .box files in the current working directory. You can add a box to your Vagrant installation with `vagrant box add --name <name-of-box> </path/to/.box file>`.
Helpful Documentation
---------------------
* [`packer build`](http://www.packer.io/docs/command-line/build.html)
* [`vagrant box`](http://docs.vagrantup.com/v2/boxes.html)
<file_sep>/Fedora-20/scripts/30virtualbox.sh
#!/usr/bin/env bash
# Check which Packer builder type is being used
if [ $PACKER_BUILDER_TYPE = "virtualbox-iso" ]; then
# Required for the VirtualBox Guest Additions for Linux
# http://www.virtualbox.org/manual/ch04.html
yum install --assumeyes bzip2 dkms gcc kernel-devel make
# Required to prevent VBoxLinuxAdditions.run from throwing (harmless) errors
yum install --assumeyes which
# Install the VirtualBox Guest Additions
mkdir /mnt/cdrom
mount /root/VBoxGuestAdditions.iso /mnt/cdrom
sh /mnt/cdrom/VBoxLinuxAdditions.run
# Cleanup
umount /mnt/cdrom
rmdir /mnt/cdrom
rm --force /root/VBoxGuestAdditions.iso
# Fix permissions and SELinux context for Packer’s virtualbox_version_file
chmod 0644 /etc/virtualbox-version
chcon system_u:object_r:etc_t:s0 /etc/virtualbox-version
fi
<file_sep>/Fedora-20/scripts/50cleanup.sh
#!/usr/bin/env bash
# Remove unnecessary packages (and their dependencies)
# which can’t be removed until after the installation process
yum --assumeyes autoremove authconfig firewalld linux-firmware
# Clean up old yum repo data & logs
yum clean all
yum history new
rm --recursive --force /var/lib/yum/yumdb/*
rm --recursive --force /var/lib/yum/history/*
truncate --no-create --size=0 /var/log/yum.log
# Remove random-seed, so it’s not the same in every image
rm --force /var/lib/random-seed
# Change any incorrect SELinux context labels
fixfiles -R -a restore
# Force the filesystem to reclaim space from deleted files
dd if=/dev/zero of=/var/tmp/zeros bs=1M
rm --force /var/tmp/zeros
<file_sep>/Fedora-23/scripts/30cleanup.sh
#!/usr/bin/env sh
# Remove UUID from non-loopback interface configuration files
sed --in-place '/^UUID/d' /etc/sysconfig/network-scripts/ifcfg-eth*
# Remove random-seed, so it’s not the same in every image
rm --force /var/lib/systemd/random-seed
# Truncate machine-id, for the same reasons random-seed is removed
# Note: this image will not boot if /etc/machine-id is not present, but systemd
# will generate a new machine ID if /etc/machine-id is present but empty
truncate --size=0 /etc/machine-id
# Remove SSH host keys
rm --force /etc/ssh/ssh_host_*
# Clean up dnf repo data, keys & logs
dnf clean all
rpm -e gpg-pubkey
rm --recursive --force /var/lib/dnf/history/*
rm --recursive --force /var/lib/dnf/yumdb/*
truncate --no-create --size=0 /var/log/dnf.*
# Force the filesystem to reclaim space from deleted files
dd if=/dev/zero of=/var/tmp/zeros bs=1M
rm --force /var/tmp/zeros
<file_sep>/Fedora-23/README.markdown
Fedora-23
=========
This Packer template can be used to build minimal Fedora 23 base boxes for Vagrant’s VirtualBox provider, starting from Fedora 23 Server’s netinstall ISO for x86_64 architectures.
Vagrant base boxes built from this template can be downloaded from [Atlas][1] directly, or by running `vagrant init TFDuesing/Fedora-23`.
[1]:https://atlas.hashicorp.com/TFDuesing/boxes/Fedora-23
<file_sep>/Fedora-20/README.markdown
Fedora-20
=========
This Packer template can be used to build minimal Fedora 20 base boxes for Vagrant’s VirtualBox and VMware providers, starting from Fedora’s “Install DVD” ISO for x86_64 architectures.
Vagrant base boxes built from this template can be downloaded from [Atlas][1] directly, or by running `vagrant init TFDuesing/Fedora-20`.
[1]:https://atlas.hashicorp.com/TFDuesing/boxes/Fedora-20
<file_sep>/Fedora-21/scripts/30cleanup.sh
#!/usr/bin/env sh
# Remove UUID from non-loopback interface configuration files
sed --in-place '/^UUID/d' /etc/sysconfig/network-scripts/ifcfg-eth*
# Remove random-seed, so it’s not the same in every image
rm --force /var/lib/systemd/random-seed
# Truncate machine-id, for the same reasons random-seed is removed
# Note: this image will not boot if /etc/machine-id is not present, but systemd
# will generate a new machine ID if /etc/machine-id is present but empty
truncate --size=0 /etc/machine-id
# Clean up old yum repo data & logs
yum clean all
yum history new
rm --recursive --force /var/lib/yum/yumdb/*
rm --recursive --force /var/lib/yum/history/*
truncate --no-create --size=0 /var/log/yum.log
# Force the filesystem to reclaim space from deleted files
dd if=/dev/zero of=/var/tmp/zeros bs=1M
rm --force /var/tmp/zeros
<file_sep>/Fedora-20/scripts/10extlinux.sh
#!/usr/bin/env bash
# Set the timeout for the EXTLINUX bootloader to 1/10 second
sed --in-place 's/^timeout 50$/timeout 1/' /boot/extlinux/extlinux.conf
<file_sep>/Fedora-23/scripts/10vmware.sh
#!/usr/bin/env sh
# Check which Packer builder type is being used
if [ $PACKER_BUILDER_TYPE = "vmware-iso" ]; then
# Required for VMware Tools for Linux Guests
# http://kb.vmware.com/kb/1018414
# http://pubs.vmware.com/fusion-7/topic/com.vmware.fusion.help.doc/GUID-08BB9465-D40A-4E16-9E15-8C016CC8166F.html
dnf install --assumeyes tar net-tools gcc kernel-devel
# Install VMware Tools
mkdir /mnt/cdrom
mount /root/linux.iso /mnt/cdrom
tar -zxpf /mnt/cdrom/VMwareTools-*.tar.gz -C /tmp
/tmp/vmware-tools-distrib/vmware-install.pl --default
# Disable ThinPrint service
systemctl disable vmware-tools-thinprint.service
# Cleanup
dnf remove --assumeyes tar net-tools gcc kernel-devel
umount /mnt/cdrom
rmdir /mnt/cdrom
rm --force /root/linux.iso
rm --force --recursive /tmp/vmware*
# Prevent dnf from updating the kernel
# in order to preserve the VMware kernel extensions
echo "exclude=kernel-core" >> /etc/dnf/dnf.conf
fi
<file_sep>/Fedora-21/README.markdown
Fedora-21
=========
This Packer template can be used to build minimal Fedora 21 base boxes for Vagrant’s VirtualBox and VMware providers, starting from Fedora 21 Server’s netinstall ISO for x86_64 architectures.
Vagrant base boxes built from this template can be downloaded from [Atlas][1] directly, or by running `vagrant init TFDuesing/Fedora-21`.
[1]:https://atlas.hashicorp.com/TFDuesing/boxes/Fedora-21
<file_sep>/Fedora-22/scripts/10virtualbox.sh
#!/usr/bin/env sh
# Check which Packer builder type is being used
if [ $PACKER_BUILDER_TYPE = "virtualbox-iso" ]; then
# Required for the VirtualBox Guest Additions for Linux
# http://www.virtualbox.org/manual/ch04.html
dnf install --assumeyes bzip2 dkms kernel-devel
# Install the VirtualBox Guest Additions
mkdir /mnt/cdrom
mount /root/VBoxGuestAdditions.iso /mnt/cdrom
sh /mnt/cdrom/VBoxLinuxAdditions.run
# Disable VirtualBox X11 Guest Additions
systemctl disable vboxadd-x11.service
# Cleanup
dnf remove --assumeyes bzip2 dkms kernel-devel
umount /mnt/cdrom
rmdir /mnt/cdrom
rm --force /root/VBoxGuestAdditions.iso
# Prevent dnf from updating the kernel
# in order to preserve the VirtualBox kernel extensions
echo "exclude=kernel-core" >> /etc/dnf/dnf.conf
# Fix permissions and SELinux context for Packer’s virtualbox_version_file
chmod 0644 /root/virtualbox-version
chcon system_u:object_r:admin_home_t:s0 /root/virtualbox-version
fi
<file_sep>/Fedora-22/README.markdown
Fedora-22
=========
This Packer template can be used to build minimal Fedora 22 base boxes for Vagrant’s VirtualBox and VMware providers, starting from Fedora 22 Server’s netinstall ISO for x86_64 architectures.
Vagrant base boxes built from this template can be downloaded from [Atlas][1] directly, or by running `vagrant init TFDuesing/Fedora-22`.
[1]:https://atlas.hashicorp.com/TFDuesing/boxes/Fedora-22
<file_sep>/Fedora-20/scripts/20network.sh
#!/usr/bin/env bash
# Remove MAC address and UUID from non-loopback interface configuration files
sed --in-place '/^HWADDR/d' /etc/sysconfig/network-scripts/ifcfg-eth*
sed --in-place '/^UUID/d' /etc/sysconfig/network-scripts/ifcfg-eth*
# Tell udev to disable the assignment of fixed network interface names
# http://www.freedesktop.org/wiki/Software/systemd/PredictableNetworkInterfaceNames/
ln --symbolic /dev/null /etc/udev/rules.d/80-net-name-slot.rules
<file_sep>/Fedora-20/scripts/40vagrant.sh
#!/usr/bin/env bash
# Document box build time
echo 'Built by Packer at' $(date '+%H:%M %Z on %B %-d, %Y') \
> /etc/vagrant-box-build-time
chmod 0644 /etc/vagrant-box-build-time
chcon system_u:object_r:etc_t:s0 /etc/vagrant-box-build-time
# Create vagrant user, required by Vagrant box specs
# http://docs.vagrantup.com/v2/virtualbox/boxes.html
useradd --user-group vagrant
echo 'vagrant' | /usr/bin/passwd --stdin vagrant
# Give vagrant user permission to sudo
echo 'Defaults:vagrant !requiretty' > /etc/sudoers.d/vagrant
echo '%vagrant ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers.d/vagrant
chmod 0440 /etc/sudoers.d/vagrant
chcon system_u:object_r:etc_t:s0 /etc/sudoers.d/vagrant
# Install Vagrant’s “insecure” public key
mkdir --parents /home/vagrant/.ssh/
curl --location --remote-name \
https://raw.githubusercontent.com/mitchellh/vagrant/master/keys/vagrant.pub
mv vagrant.pub /home/vagrant/.ssh/authorized_keys
chown --recursive vagrant:vagrant /home/vagrant/.ssh/
chmod 0700 /home/vagrant/.ssh/
chmod 0600 /home/vagrant/.ssh/authorized_keys
chcon --recursive unconfined_u:object_r:ssh_home_t:s0 /home/vagrant/.ssh/
# Delete and lock root password
passwd --delete root
passwd --lock root
<file_sep>/Fedora-20/scripts/30vmware.sh
#!/usr/bin/env bash
# Check which Packer builder type is being used
if [ $PACKER_BUILDER_TYPE = "vmware-iso" ]; then
# Required for VMware Tools for Linux Guests
# http://kb.vmware.com/kb/1018414
# http://pubs.vmware.com/fusion-6/topic/com.vmware.fusion.help.doc/GUID-08BB9465-D40A-4E16-9E15-8C016CC8166F.html
yum install --assumeyes tar perl net-tools make gcc kernel-devel
# Install the VirtualBox Guest Additions
mkdir /mnt/cdrom
mount /root/linux.iso /mnt/cdrom
tar -zxf /mnt/cdrom/VMwareTools-*.tar.gz -C /tmp
/tmp/vmware-tools-distrib/vmware-install.pl --default
# Cleanup
umount /mnt/cdrom
rmdir /mnt/cdrom
rm --force /root/linux.iso
rm --force --recursive /tmp/vmware*
fi
| 548e81db9372fed090d4bb1769c28578f435b1f3 | [
"Markdown",
"Shell"
] | 15 | Markdown | TFDuesing/packer-templates | c91b0e6f27896320c53bc82b5649057fd556d28e | d8b79408a90cdb1a98a78a6a527bb405111ecb9f | |
refs/heads/master | <repo_name>soumyaparambil/movie-rating<file_sep>/Dockerfile
FROM python:latest
WORKDIR /app
COPY . /app
# Install prerequisites
#RUN apt-get update && apt-get install -y \
#curl
RUN apt-get update && apt-get install curl -y && apt-get install jq -y
CMD /bin/bash
<file_sep>/README.md
# movie-rating
This project gets the movie rating from Rotten Tomatoes from an external API http://www.omdbapi.com/ by the movie title. The project is dockerized.
# How to run from docker hub
1. Pull the image from dockerhub `docker pull soumyaparambil/movie-ratings:1.0-shell`
2. Run the docker container `docker run -it soumyaparambil/movie-ratings:1.0-shell`
3. Once the docker container is up, run the script `./movie-ratings.sh <movie_name>`
# Building locally and running ( if need to fork and make changes )
1. Create a docker image with the below command `docker build --tag=<docker-image-name> .`
2. Run the docker container `docker run -it <docker-image-name>`
3. Once the docker container is up, run the script `./movie-ratings.sh <movie_name>`
# Test Cases covered
1. Checks for empty string on movie names
2. Checks movie names with more than one words
3. Checks for valid movie names
<file_sep>/movie-ratings.sh
#!/bin/sh
MOVIE_NAME=$1
API_KEY=30a0de #### apikey is hardcoded now. This can be generalized based on the requirement.
MOVIE_RATINGS_BY="Rotten Tomatoes" #### This can generalized based on the requirement later.
if [ -z "$MOVIE_NAME" ]
then
echo "No argument supplied. Usage is ./list-movies.sh MOVIE_NAME"
exit 0
fi
urlencode() {
python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \
"$1" "$urlencode_safe"
}
ENC_MOVIE_NAME=$(urlencode "$MOVIE_NAME")
JSON_RESULT=$(curl -s "http://www.omdbapi.com/?t=$ENC_MOVIE_NAME&apikey=$API_KEY")
RESPONSE=$(echo $JSON_RESULT | jq -r '.Response')
if [ $RESPONSE != "True" ]
then
echo "No movie found by the name '$MOVIE_NAME'"
exit 0
fi
#RATING=$(curl -s "http://www.omdbapi.com/?t=$ENC_MOVIE_NAME&apikey=30a0de" | jq -r '.Ratings' | jq -r '.map(select(.Source=="Rotten Tomatoes")) | .[] .Value')
RATING=$( echo $JSON_RESULT | jq -r '.Ratings' | jq -r 'map(select(.Source=="Rotten Tomatoes")) | .[] .Value')
if [ -z "$RATING" ]
then
echo "No Rotten Tomatoes rating exist for the movie '$MOVIE_NAME'"
else
echo "Rotten Tomatoes rating for the movie '$MOVIE_NAME' is $RATING"
fi
| dda950bbc6d231eafee88330d1c346232f5f2b90 | [
"Markdown",
"Dockerfile",
"Shell"
] | 3 | Dockerfile | soumyaparambil/movie-rating | 73a1e01483c460653608d442b2a7b0f123bc376c | 0381327d35e40bb9ed2db4e0aea75f7c5bf7f404 | |
refs/heads/main | <repo_name>SilviaRdgz/Talent-Pool<file_sep>/Client/src/components/CRUD/notes.jsx
import React, { useState, useEffect } from "react";
import { Link } from 'react-router-dom';
import PortfolioService from "../../services/PortfolioService";
const PortfolioDetails = (props) => {
const [details, setDetails] = useState({});
const getSinglePortfolio = () => {
const { id } = props.match.params;
const service = new PortfolioService();
service
.getOnePortfolio(id)
.then((responseFromApi) => setDetails(responseFromApi))
.catch((error) => {
console.error(error);
});
};
useEffect(getSinglePortfolio, [props.match.params]);
const deletePortfolio = async () => {
const { id } = props.match.params;
const service = new PortfolioService();
const response = await service.removePortfolio(id);
response.message && props.history.push("/talent-dashboard");
};
const ownershipCheck = (portfolio) => {
console.log(portfolio)
console.log(props.loggedInUser._id)
if (props.loggedInUser && portfolio.owner === props.loggedInUser._id) {
return (
<div>
<Link to={{
pathname: '/edit-portfolio',
state: {portfolio}
}}>
<button className='btn'>Edit Portfolio</button>
</Link>
<button className='btn' onClick={() => deletePortfolio()}>Delete</button>
</div>
);
}
};
return details.length > 0 &&
(
<div>
<section className="hero is-link is-fullheight is-fullheight-with-navbar">
<div className="hero-body">
<div className="container">Hello! I am
<h1 className="title is-1">
{details[0].name}
</h1>
<h2 className="subtitle is-3">
{details[0].role}
</h2>
{ownershipCheck(details[0])}
</div>
</div>
</section>
<section className="section" id="about">
<div className="section-heading">
<h3 className="title is-2">About Me</h3>
<div className="container">
<p>{details[0].about}</p>
</div>
</div>
<br/>
<br/>
<div className="columns has-same-height is-gapless">
<div className="column">
<div className="card">
<div className="card-content">
<h3 className="title is-4">Profile</h3>
<div className="content">
<table className="table-profile">
<tr>
<th colspan="1"></th>
<th colspan="2"></th>
</tr>
<tr>
<td>Email:</td>
<td>{details[0].email}</td>
</tr>
</table>
</div>
<br/>
<div className="buttons has-addons is-centered">
<span style={{marginRight:'3%'}}>
<button href="#" className="button is-primary" >Github</button>
</span>
<span>
<button href="#" className="button is-primary">LinkedIn</button>
</span>
</div>
</div>
</div>
</div>
<div className="column">
<div className="card" style={{marginRight:'2%', marginLeft: '2%'}}>
<div className="card-image">
<figure className="image is-4by5">
<img src={details[0].imageUrl} alt="Placeholder image"/>
</figure>
</div>
</div>
</div>
<div className="column">
<div className="card">
<div className="card-content skills-content">
<h3 className="title is-4">Skills</h3>
<div className="content">
<article className="media">
<div className="media-content">
<div className="content">
<p>
<strong>JavaScript:</strong>
<br/>
<progress className="progress is-primary" value="90" max="100"></progress>
</p>
</div>
</div>
</article>
<article className="media">
<div className="media-content">
<div className="content">
<p>
<strong>Vue.js:</strong>
<br/>
<progress className="progress is-primary" value="90" max="100"></progress>
</p>
</div>
</div>
</article>
<article className="media">
<div className="media-content">
<div className="content">
<p>
<strong>Node.js:</strong>
<br/>
<progress className="progress is-primary" value="75" max="100"></progress>
</p>
</div>
</div>
</article>
<article className="media">
<div className="media-content">
<div className="content">
<p>
<strong>HTML5/CSS3</strong>
<br/>
<progress className="progress is-primary" value="95" max="100"></progress>
</p>
</div>
</div>
</article>
<article className="media">
<div className="media-content">
<div className="content">
<p>
<strong>Databases</strong>
<br/>
<progress className="progress is-primary" value="66" max="100"></progress>
</p>
</div>
</div>
</article>
</div>
</div>
</div>
</div>
</div>
</section>
<br/>
<div className="tags custom-tags">
<span className="tag is-light">Node.js</span><span className="tag is-light">Express.js</span><span className="tag is-light">VueJS</span><span
className="tag is-light">JavaScript</span><span className="tag is-light">HTML5</span><span className="tag is-light">Canvas</span><span
className="tag is-light">CSS3</span><span className="tag is-light">Bulma</span><span className="tag is-light">Bootstrap</span><span
className="tag is-light">jQuery</span><span className="tag is-light">Pug</span><span className="tag is-light">Stylus</span><span
className="tag is-light">SASS/SCSS</span><span className="tag is-light">Webpack</span><span className="tag is-light">Git</span><span
className="tag is-light">ASP.NET Web Forms</span><span className="tag is-light">MSSQL</span><span className="tag is-light">MongoDB</span><span
className="tag is-light">Apache Cordova</span><span className="tag is-light">Chrome Extensions</span>
</div>
<section className="section" id="services">
<div className="section-heading">
<h3 className="title is-2">Services</h3>
</div>
<br/>
<div className="container">
<div className="columns">
<div className="column">
<div className="box">
<div className="content">
<h4 className="title is-5">Front End Web Development</h4>
Develop Front End using latest standards with HTML5/CSS3 with added funtionality using JavaScript and
Vue.js.
</div>
</div>
</div>
<div className="column">
<div className="box">
<div className="content">
<h4 className="title is-5">Back End Web Development</h4>
Develop Back End application/service using Node.js or ASP .NET and SQL server or Mongo DB databases.
</div>
</div>
</div>
</div>
</div>
</section>
<section className="section" id="resume">
<div className="section-heading">
<h3 className="title is-2">Resume</h3>
<a href="#">
<span className="icon">
<i className="fas fa-file-alt"></i>
</span>
<span className="button is-primary is-large">Download PDF</span>
</a>
</div>
</section>
<section className="section" id="contact">
<div className="container">
<div className="section-heading">
<h3 className="title is-2">Get in touch</h3>
</div>
<br/>
<div className="columns">
<div className="column is-6 is-offset-3">
<div className="box">
<div className="field">
<label className="label">Name</label>
<div className="control">
<input className="input" type="text" />
</div>
</div>
<div className="field">
<label className="label">Email</label>
<div className="control has-icons-left">
<input className="input" type="email" value=""/>
<span className="icon is-small is-left">
<i className="fas fa-envelope"></i>
</span>
</div>
</div>
<div className="field">
<label className="label">Message</label>
<div className="control">
<textarea className="textarea" ></textarea>
</div>
</div>
<div className="field is-grouped has-text-centered">
<div className="control">
<button className="button is-primary is-medium">Submit</button>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
</div>
);
};
export default PortfolioDetails;<file_sep>/Server/models/portfolio-model.js
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const portfolioSchema = new Schema({
imageUrl: String,
name: String,
about: String,
role: String,
email: String,
owner: { type: Schema.Types.ObjectId, ref: "User" },
});
const Portfolio = mongoose.model("Portfolio", portfolioSchema);
module.exports = Portfolio;
<file_sep>/Client/src/components/PortfolioList/PortfolioList.jsx
import React, {Component} from 'react';
import axios from 'axios';
import { Link } from 'react-router-dom';
class PortfolioList extends Component {
state = {
listOfPortfolios: [],
};
getAllPortfolios = () => {
axios.get('http://localhost:5000/api/portfolios')
.then((responseFromApi) =>{
console.log(responseFromApi.data)
this.setState({
listOfPortfolios: responseFromApi.data,
});
}).catch((err) => console.log(err));
}
componentDidMount() {
this.getAllPortfolios()
}
render() {
return <div>
<h1 className='intro'>Dive into our Talent</h1>
<div className="container">
<div className="section">
{this.state.listOfPortfolios.map((portfolioItem) => {
return (
<Link to={`/recruitment/${portfolioItem._id}`}>
<div id="app" className="columns">
<div v-for="card in cardData" className="column is-half" key={portfolioItem._id}>
<div className="card large" >
<div className="card-image">
<figure className="image is-5by4">
<img src={portfolioItem.imageUrl} alt="profile-photo"/>
</figure>
</div>
<div className="card-content">
<div className="media">
<div className="media-left">
</div>
<div className="media-content">
<p className="title is-4 no-padding">{portfolioItem.name}</p>
<p className="subtitle is-6">{portfolioItem.role}</p>
</div>
</div>
<div className="content">
{portfolioItem.about}
<div className="background-icon"><span className="icon-twitter"></span></div>
</div>
</div>
</div>
</div>
</div>
</Link>
)
})}
</div>
</div>
</div>
}
};
export default PortfolioList;
<file_sep>/Client/src/components/CRUD/EditPortfolio.jsx
import React, { Component, useHistory } from "react";
import UploadService from '../../services/UploadService.js'
import PortfolioService from "../../services/PortfolioService";
const initialState = {
name: "",
role: '',
about: "",
email: '',
imageUrl: "",
};
class EditPortfolio extends Component {
state = {
name: this.props.location.state.portfolio.name,
about: this.props.location.state.portfolio.about,
role: this.props.location.state.portfolio.role,
email: this.props.location.state.portfolio.email,
imageUrl: '',
disableSubmitButton: false
}
// HANDLE INPUT CHANGES
handleInputChanges = (event) => {
const { name, value } = event.target;
this.setState({ ...this.state, [name]: value });
};
// HANDLE FORM SUBMISSION
handleFormSubmit = (event) => {
event.preventDefault();
const service = new PortfolioService();
const { name, about, role, email, imageUrl } = this.state;
const { _id, imageUrl: existingImageUrl } = this.props.location.state.portfolio;
let data = {name, about, role, email}
if(imageUrl) {
data.imageUrl = imageUrl;
} else {
data.imageUrl = existingImageUrl;
}
service.updatePortfolio(_id, data)
.then((response) => {
console.log("response", response)
this.setState(initialState);
})
.catch(err => console.error(err))
};
// HANDLE FILE UPLOAD
handleFileUpload = (event) => {
const file = event.target.files[0]
this.setState({...this.state, disableSubmitButton: true})
const uploadData = new FormData();
uploadData.append('imageUrl', event.target.files[0]);
const service = new UploadService();
service
.handleUpload(uploadData)
.then(response => {
this.setState({...this.state, imageUrl: response.secure_url, disableSubmitButton: false });
})
.catch(err => {
console.log('Error while uploading the file: ', err);
});
};
render() {
return this.props.location.state && (
<div>
<div className="hero-body">
<div className="container">
<div className="columns is-centered"></div>
<form className="box" onSubmit={this.handleFormSubmit}>
<div className="field">
<label className="label">Full Name</label>
<div className="control">
<input
className="input"
type="text"
placeholder="Text input"
name="name"
value={this.state.name}
onChange={(e) => this.handleInputChanges(e)}
/>
</div>
</div>
<div className="field">
<label className="label">Role</label>
<div className="control">
<input
className="input"
type="text"
placeholder="Text input"
name="role"
value={this.state.role}
onChange={(e) => this.handleInputChanges(e)}
/>
</div>
</div>
<div className="field">
<label className="label">Email</label>
<div className="control has-icons-left has-icons-right">
<input
className="input is-info"
type="email"
placeholder="@"
name="email"
value={this.state.email}
onChange={(e) => this.handleInputChanges(e)}
/>
<span className="icon is-small is-left">
<i className="fas fa-envelope"></i>
</span>
<span className="icon is-small is-right">
<i className="fas fa-exclamation-triangle"></i>
</span>
</div>
</div>
<div className="field">
<label className="label">About me</label>
<div className="control">
<textarea
className="textarea"
placeholder="Textarea"
name="about"
value={this.state.about}
onChange={(e) => this.handleInputChanges(e)}
></textarea>
</div>
</div>
<br/>
<div className="file">
<label className="file-label">
<input
className="file-input"
type="file"
name="imageUrl"
onChange={(e) => this.handleFileUpload(e)}
/>
<span className="file-cta">
<span className="file-icon">
<i className="fas fa-upload"></i>
</span>
<span className="file-label">
Choose a profile photo…
</span>
</span>
</label>
</div>
<br/>
<div className="field is-grouped">
<div className="control">
{this.state.disableSubmitButton ? (
<button className="button is-link" title="Disabled button" disabled>Submit</button>
) : (
<button className="button is-link">Submit</button>
)
}
</div>
<div className="control">
<button className="button is-link is-light">Cancel</button>
</div>
</div>
</form>
</div>
</div>
</div>
);
}
}
export default EditPortfolio;<file_sep>/Client/src/components/TalentDashboard/TalentDashboard.jsx
import React, { useState, useEffect } from "react";
import { Link } from "react-router-dom";
import AuthService from "../../services/AuthService";
const TalentDashboard = ({ loggedInUser}) => {
const [onlineUser, setOnlineUser] = useState(null);
const service = new AuthService();
useEffect(() => {
setOnlineUser(loggedInUser);
}, [loggedInUser]);
return !onlineUser ? (
<div>
<div className="hero-body">
<div className="container has-text-centered">
<div className="column is-6 is-offset-3">
<h1 className="intro">
Welcome !
</h1>
<br/>
<h2 className="content">
and congratulations on completing your training! 🥳 Now, let's help you finding a job. To connect with talent scouters, please upload your resume. We will make sure you get the spotlight.
</h2>
<br/>
<Link to='/login'><button className='button is-primary'>Login to add resume</button></Link>
</div>
</div>
</div>
</div>
) : (
<div className="hero-body">
<div className="container has-text-centered">
<div className="column is-6 is-offset-3">
<h1 className="intro"> My dashboard
</h1>
<br/>
<div>
<Link to='/add' className='content' style={{textDecoration: 'underline'}}>Add Resume</Link>
</div>
<br/>
<div>
<Link to='/my-portfolio' className='content' style={{textDecoration: 'underline'}}>View my resume</Link>
</div>
<br/>
<div>
<Link to='/portfolios' className='content' style={{textDecoration: 'underline'}}>View Talent Pool</Link>
</div>
<br/>
</div>
</div>
</div>
)
}
export default TalentDashboard;
<file_sep>/Client/src/components/Home/Home.jsx
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import LogoMotion from '../../images/TMU-logo-V02.png'
import AuthService from '../../services/AuthService.js';
class Home extends Component {
state = { loggedInUser: null };
service = new AuthService();
componentWillReceiveProps(nextProps) {
this.setState({ ...this.state, loggedInUser: nextProps['userInSession'] });
}
render() {
if (this.state.loggedInUser) {
return (
<div>
<div className="hero-body">
<div className="container has-text-centered">
<div className="columns is-vcentered">
<div className="column is-5">
<figure className="image is-4by3">
<img src={LogoMotion} alt="Description"/>
</figure>
</div>
<div className="column is-6 is-offset-1">
<h1 className="intro-title">
TALENT POOL
</h1>
<h2 className="headline">
Defining the next tech generation.
</h2>
<br/>
<p className="has-text-centered">
<Link to='/talent-dashboard' className='btn' style={{marginLeft:'1%'}}>I'm Talent</Link>
<Link to='/portfolios' className='btn'>I'm Recruiting </Link>
</p>
</div>
</div>
</div>
</div>
</div>
);
} else {
return (
<div>
<div className="hero-body">
<div className="container has-text-centered">
<div className="columns is-vcentered">
<div className="column is-5">
<figure className="image is-4by3">
<img src={LogoMotion} alt="Description"/>
</figure>
</div>
<div className="column is-6 is-offset-1">
<h1 className="intro">
TALENT POOL
</h1>
<h2 className="headline">
Defining the next tech generation.
</h2>
<br/>
<p className="has-text-centered">
<Link to='/talent-dashboard' className='btn' style={{marginLeft:'1%'}}>I'm Talent</Link>
<Link to='/portfolios' className='btn'>I'm Recruiting </Link>
</p>
</div>
</div>
</div>
</div>
</div>
);
}
}
}
export default Home;
<file_sep>/Client/src/components/Auth/SignUp.jsx
import React, { Component } from "react";
import AuthService from "../../services/AuthService";
import { Link } from "react-router-dom";
class SignUp extends Component {
state = { email: "", password: "", errorMessage: "" };
service = new AuthService();
handleFormSubmit = (event) => {
event.preventDefault();
const { email, password } = this.state;
this.service
.signup(email, password)
.then((response) => {
this.setState({ email: "", password: "" });
this.props.setUser(response);
})
.catch((error) => {
if (error.response.data) {
const { message } = error.response.data;
this.setState({ ...this.state, errorMessage: message });
}
console.log(error);
});
};
handleChange = (event) => {
const { name, value } = event.target;
this.setState({ ...this.state, [name]: value });
};
render() {
return (
<div >
<div className="hero-body">
<div className="container">
<div className="columns is-centered">
<div className="column is-5-tablet is-4-desktop is-3-widescreen">
{this.state.errorMessage && <span>{this.state.errorMessage}</span>}
<form className="box" onSubmit={this.handleFormSubmit}>
<div className="field">
<label htmlFor="" className="label">Email</label>
<div className="control has-icons-left">
<input
type="email"
name='email'
placeholder="e.g. <EMAIL>"
className="input"
value={this.state.email}
onChange={(e) => this.handleChange(e)}
required/>
<span className="icon is-small is-left">
<i className="fa fa-envelope"></i>
</span>
</div>
</div>
<div className="field">
<label htmlFor="" className="label">Password</label>
<div className="control has-icons-left">
<input
type="password"
name="password"
value={this.state.password}
onChange={(e) => this.handleChange(e)}
placeholder="*******"
className="input"
required />
<span className="icon is-small is-left">
<i className="fa fa-lock"></i>
</span>
</div>
</div>
<div className="field">
<input type="submit" value="Sign up" className="button is-success"/>
</div>
<h2>
Already have account?
<Link to={"/login"}> Login</Link>
</h2>
</form>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default SignUp;
<file_sep>/README.md
# Talent Pool 🏊♀️
A React App as final project of the Web Development course at Ironhack, 2021.
TechMeUp (TMU) is a tech fund which supported my tech training. Inspired by their mission and their trust, I wanted to give them something back and, in reciprocitty, support their growth.
Talent Pool is site to give extra exposure to the talent funded by TMU, making them more visible to tech recruiters.
## Technologies used:
* HTML
* CSS
* Javascript
* Node.js
* React
* MongoDB
* Mongoose
* Express
* Bulma
* Heroku
### HOW IT LOOKS:
https://user-images.githubusercontent.com/81960826/121230601-ba83a400-c88f-11eb-9b25-9e689e46f87a.mp4
### LINK TO TechMeUp:
https://www.techmeup.nl/
| 7af56ea47c16fd15e76a15ea713de1e7938c5aab | [
"JavaScript",
"Markdown"
] | 8 | JavaScript | SilviaRdgz/Talent-Pool | 0376a31b7a620e905c2a47c9d65ed52d2cd1e0b5 | df0932df0815027762ba256cfda6cafde27a7a4f | |
refs/heads/main | <file_sep>#[derive(Debug)]
pub enum Token {
Bracket(Bracket),
Literal(Literal),
Keyword(Keyword),
Operator(Operator),
Separator(Separator),
}
#[derive(Debug)]
pub enum Bracket {
LeftRound,
RightRound,
LeftSquare,
RightSquare,
LeftCurly,
RightCurly,
}
#[derive(Debug)]
pub enum Literal {
Identifier(String),
Integer(i64),
Float(f64),
Bool(bool),
Char(char),
String(String),
}
#[derive(Debug)]
pub enum Keyword {
If,
Else,
Elif,
Loop,
Break,
Function,
Return,
Import,
As,
And,
Or,
}
#[derive(Debug)]
pub enum Operator {
Plus,
Minus,
Multiply,
Divide,
Assign,
Greater,
GreaterOrEqual,
Less,
LessOrEqual,
Equal,
NotEqual,
Not,
}
#[derive(Debug)]
pub enum Separator {
Dot,
Comma,
Colon,
Semicolon,
}
pub fn lexer(input: String) -> Result<Vec<Token>, String> {
let mut result = Vec::new();
let mut iter = input.chars().peekable();
while let Some(c) = iter.next() {
match c {
' ' => { continue; },
'(' => {
result.push(Token::Bracket(Bracket::LeftRound));
},
')' => {
result.push(Token::Bracket(Bracket::RightRound));
},
'[' => {
result.push(Token::Bracket(Bracket::LeftSquare));
},
']' => {
result.push(Token::Bracket(Bracket::RightSquare));
},
'{' => {
result.push(Token::Bracket(Bracket::LeftCurly));
},
'}' => {
result.push(Token::Bracket(Bracket::RightCurly));
},
'.' => {
result.push(Token::Separator(Separator::Dot));
},
',' => {
result.push(Token::Separator(Separator::Comma));
},
':' => {
result.push(Token::Separator(Separator::Colon));
},
';' => {
result.push(Token::Separator(Separator::Semicolon));
},
'\'' => {
let c = match iter.next() {
Some(c) => c,
None => return Err(format!("Unexpected character: {}", ' ')),
};
match iter.peek() {
Some(&'\'') => result.push(Token::Literal(Literal::Char(c))),
_ => return Err(format!("Unexpected character: {}", iter.peek().unwrap_or(&' '))),
};
iter.next();
},
'"' => {
let mut s = String::new();
while let Some(c) = iter.next() {
match c {
'"' => {
result.push(Token::Literal(Literal::String(s)));
break;
},
_ => s.push(c),
};
};
},
'+' => {
result.push(Token::Operator(Operator::Plus));
},
'-' => {
result.push(Token::Operator(Operator::Minus));
},
'*' => {
result.push(Token::Operator(Operator::Multiply));
},
'/' => {
result.push(Token::Operator(Operator::Divide));
},
'=' => {
match iter.peek() {
Some('=') => {
result.push(Token::Operator(Operator::Equal));
iter.next();
},
_ => result.push(Token::Operator(Operator::Assign)),
};
},
'>' => {
match iter.peek() {
Some('=') => {
result.push(Token::Operator(Operator::GreaterOrEqual));
iter.next();
},
_ => result.push(Token::Operator(Operator::Greater)),
};
},
'<' => {
match iter.peek() {
Some('=') => {
result.push(Token::Operator(Operator::LessOrEqual));
iter.next();
},
_ => result.push(Token::Operator(Operator::Less)),
};
},
'!' => {
match iter.peek() {
Some('=') => {
result.push(Token::Operator(Operator::NotEqual));
iter.next();
},
_ => result.push(Token::Operator(Operator::Not)),
};
},
'0'..='9' => {
let mut n = String::new();
let mut is_float = false;
n.push(c);
while let Some(c) = iter.peek() {
match *c {
c if c.is_digit(10) => {
n.push(c);
iter.next();
},
'.' => {
is_float = true;
n.push('.');
iter.next();
},
_ => break,
};
};
match is_float {
true => {
result.push(Token::Literal(Literal::Float(n
.parse::<f64>()
.expect("Could not parse number")
)));
},
false => {
result.push(Token::Literal(Literal::Integer(n
.parse::<i64>()
.expect("Could not parse number")
)));
},
};
},
'a'..='z' | 'A'..='Z' | '_' => {
let mut s = String::new();
s.push(c);
while let Some(c) = iter.peek() {
match *c {
c if c.is_alphabetic() || c == '_' => {
s.push(c);
iter.next();
},
_ => break,
};
};
match s.as_str() {
"if" => result.push(Token::Keyword(Keyword::If)),
"else" => result.push(Token::Keyword(Keyword::Else)),
"elif" => result.push(Token::Keyword(Keyword::Elif)),
"loop" => result.push(Token::Keyword(Keyword::Loop)),
"break" => result.push(Token::Keyword(Keyword::Break)),
"function" => result.push(Token::Keyword(Keyword::Function)),
"return" => result.push(Token::Keyword(Keyword::Return)),
"import" => result.push(Token::Keyword(Keyword::Import)),
"as" => result.push(Token::Keyword(Keyword::As)),
"and" => result.push(Token::Keyword(Keyword::And)),
"or" => result.push(Token::Keyword(Keyword::Or)),
"true" => result.push(Token::Literal(Literal::Bool(true))),
"false" => result.push(Token::Literal(Literal::Bool(false))),
_ => result.push(Token::Literal(Literal::Identifier(s))),
};
},
_ => return Err(format!("Unexpected character: {}", c)),
};
};
Ok(result)
}
<file_sep>
mod lexer;
use crate::lexer::*;
use std::{io, io::prelude::*};
fn main() {
loop {
let mut input = String::new();
print!(">>> ");
io::stdout()
.flush()
.expect("Failed to flush!");
io::stdin()
.read_line(&mut input)
.expect("Failed to read line!");
input = input.trim().to_string();
let tokens = match lexer(input) {
Ok(value) => value,
Err(err) => { println!("{}", err); continue; },
};
println!("{:?}", tokens);
}
}
/*
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use std::env;
use std::process;
fn main() {
let input = match read_file(env::args().collect()) {
Ok(value) => value,
Err(err) => {
println!("{}", err);
process::exit(0x0);
},
};
let result = match lexer (input.trim().to_string()) {
Ok(value) => value,
Err(err) => {
println!("{}", err);
process::exit(0x0);
},
};
println!("{:?}", result);
}
fn read_file(args: Vec<String>) -> Result<String, String> {
if args.len() != 2 { return Err(format!("No path to file was given!")); }
let path = Path::new(&args[1]);
let mut file = match File::open(&path) {
Ok(file) => file,
Err(err) => return Err(format!("Could not open file {} : {}", path.display(), err)),
};
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(err) => return Err(format!("Could not read file {} : {}", path.display(), err)),
Ok(_) => (),
}
Ok(s)
}
*/ | cd0ad8be6bad89829bd3a8d1a6ca9f295b6da564 | [
"Rust"
] | 2 | Rust | candiel-z/interpreter | 33b2a42e947df257ebd5309259e1ad3125af5d4d | 3ab8aeecad961f7b21010446bcbf4cb9aa29ae5f | |
refs/heads/master | <file_sep>http://stackoverflow.com/questions/14176180/is-there-a-more-basic-tutorial-for-the-c-unit-testing-framework-check/15046864#comment63521263_15046864
https://dl.dropbox.com/u/1987095/test-check.zip
Had to add -pthread and -lm -lrt to get test to build
<file_sep>#include <stdio.h>
#include "implementation.h"
int main(void) {
int a =3, b = 2;
printf("sum of %d + %d = %d\n", a, b, sum(a,b));
return 0;
}
| a54b6e0ac37708c259e67016a56dd34e86316284 | [
"Markdown",
"C"
] | 2 | Markdown | JeffHoover/sum2ints-check-example | 5753e76829788a787ce652490b137955a908e396 | 2b406a1f65d9f83c6b85922fa3722c24e98eec81 | |
refs/heads/master | <file_sep>// needed to support persistence of data via YAML.
const yaml = require('js-yaml');
const fs = require('fs');
// database is let instead of const to allow us to modify it in test.js
let database = {
users: {},
articles: {},
comments: {},
nextCommentId: 1,
nextArticleId: 1
};
// object for each of the routes the app uses
const routes = {
'/users': {
'POST': getOrCreateUser
},
'/users/:username': {
'GET': getUser
},
'/articles': {
'GET': getArticles,
'POST': createArticle
},
'/articles/:id': {
'GET': getArticle,
'PUT': updateArticle,
'DELETE': deleteArticle
},
'/articles/:id/upvote': {
'PUT': upvoteArticle
},
'/articles/:id/downvote': {
'PUT': downvoteArticle
},
'/comments': {
'POST': createComment
},
'/comments/:id': {
'PUT' : updateComment,
'DELETE': deleteComment
},
'/comments/:id/upvote': {
'PUT' : upvoteComment
},
'/comments/:id/downvote': {
'PUT' : downvoteComment
}
};
/* ------------- The Comment functionality - added by SL -------------- */
function createComment (url,request) {
// Using short-circuit logic, assign request.body.comment to
// requestComment if there's a request.body
const requestComment = request.body && request.body.comment;
// define an object to hold the response
const response = {};
// if requestComment, articleId, username is not undefined and the user exists in
// the users object on the database
if (requestComment && requestComment.body && requestComment.articleId && database.articles[requestComment.articleId] && requestComment.username
&& database.users[requestComment.username]) {
// create a comment object to save to the comments object / database
const comment = {
id: database.nextCommentId++, // set id to an increment of database.nextCommentId
body: requestComment.body, // define the rest of the properties of the comment
articleId: requestComment.articleId,
username: requestComment.username,
upvotedBy: [], // no one can upvote the comment as it doesn't exist yet
downvotedBy: [] // as above, that's why it is blank
};
// save the above comments object into the database comments object
database.comments[comment.id] = comment;
// Link the comment to the user by saving the incremented comments id
// to the commentIds array that each user has in the users database object
database.users[comment.username].commentIds.push(comment.id);
// link the comment to the article in the same way as above
database.articles[comment.articleId].commentIds.push(comment.id)
// next, set up the response to send back to the user.
// In this case, send back the article that was saved as
// the body
response.body = {comment: comment};
// because everything went well, set the status to 201
response.status = 201;
} else {
// there was an issue so send back the code 400
response.status = 400;
}
// return back the response
return response;
};
function updateComment(url,request) {
// get the id of the comment from the URL by
// splitting on the forward slash, this gives you a 3 element array,
// then choose the last element.
const id = Number(url.split('/')[2]);
// assign the actual comment by using the id above to select it
// from the comments object (in the database object)
// assign it as savedComment
const savedComment = database.comments[id];
// use short circuit evaluation to assign the comment text to
// the variable requestComment (if request.body is set)
const requestComment = request.body && request.body.comment;
// define an object to return
const response = {};
// check that we have a valid id and comment
// if not this is a bad request and return 400
if (!id || !requestComment) {
response.status = 400;
// check if the id supplied matches a saved comment in the database
// object. If not found, return a 404.
} else if (!savedComment) {
response.status = 404;
} else {
// else everything is good, use short circuit evaluation to assign
// the new comment from the request, or just set it back to
// the original
savedComment.body = requestComment.body || savedComment.body;
// set up the response object and code ready to be returned
response.body = {comment: savedComment};
response.status = 200;
}
// return the response
return response;
}
function deleteComment(url, request) {
// gets the id in the same way as updateComment
const id = Number(url.split('/').filter(segment => segment)[1]);
// assigns the saved comment to the variable savedComment
const savedComment = database.comments[id];
// gets the response ready by initialising it
const response = {};
// if there is a comment at that id
if (savedComment) {
// set it to null - I.e. delete it from the
// database comments object
database.comments[id] = null;
// next we want to remove the comments id from the article object.
// First, get all the comment ids on the article this comment was attached to
const articleCommentIds = database.articles[savedComment.articleId].commentIds;
// Next, remove the comment id from the articles object
articleCommentIds.splice(articleCommentIds.indexOf(id),1);
// now get the commentIds array from the user object for the user that created this comment
const userCommentIds = database.users[savedComment.username].commentIds;
// alter the array by splicing it from the indexOf our comment for 1 element only
userCommentIds.splice(userCommentIds.indexOf(id), 1);
// set the the status code
response.status = 204;
} else {
// something went wrong, set the status code
response.status = 404;
}
// return the status code
return response;
}
function upvoteComment(url, request) {
// get the id from the URL as done in other functions
const id = Number(url.split('/').filter(segment => segment)[1]);
// Short circuit evaluation to assign the username similar to before
const username = request.body && request.body.username;
// get and assign the comment using the id we extracted above
let savedComment = database.comments[id];
// set up our response object
const response = {};
// if the id of the comment was a valid one in the comments object
// and the username sent in the request exists in the users database object
if (savedComment && database.users[username]) {
// run the upvote helper function passing it the comment and
// username of the person upvoting it. Update the savedComment variable
// with the new altered comment.
savedComment = upvote(savedComment, username);
// set the body of the response to the altered comment
// after the upvote has been processed
response.body = {comment: savedComment};
// set the status code to ok
response.status = 200;
} else {
// there must be an issue, set the status code accordingly
response.status = 400;
}
// return the response
return response;
}
function downvoteComment(url, request) {
// get the id from the URL as done in other functions
const id = Number(url.split('/').filter(segment => segment)[1]);
// Short circuit evaluation to assign the username similar to before
const username = request.body && request.body.username;
// get and assign the comment using the id we extracted
let savedComment = database.comments[id];
// set up our response object
const response = {};
// if the id of the comment was a valid one in the comments object
// and the username sent in the request exists in the users database object
if (savedComment && database.users[username]) {
// run the downvote helper function passing it the comment and
// username of the person downvoting it. Update the savedComment variable
// with the new altered comment.
savedComment = downvote(savedComment, username);
// set the body of the response to the altered comment
// after the downvote has been processed
response.body = {comment: savedComment};
// set the status code to ok
response.status = 200;
} else {
// there must be an issue, set the status code accordingly
response.status = 400;
}
// return the response
return response;
}
// function to load from a YAML file and populate the database object
// defined at the top
function loadDatabase () {
try {
// load the database file using the YAML module and fs to access the file system
const yamldb = yaml.safeLoad(fs.readFileSync('db.yaml', 'utf8'));
// set the database object equal to what we loaded from the file
database = yamldb;
} catch (e) {
// log any errors
console.log(e);
}
}
// function to save the database object to a file
function saveDatabase () {
// use the yaml module to dump the database into a format ready for
// writing to a file
let yamldb = yaml.safeDump(database);
try {
//// use fs to write the database to the database file
fs.writeFile('db.yaml', yamldb, 'utf8', function() {console.log('File written!')})
} catch (e) {
// log any errors
console.log(e);
}
}
/* ------------------------------------------------------------------ */
function getUser(url, request) {
const username = url.split('/').filter(segment => segment)[1];
const user = database.users[username];
const response = {};
if (user) {
const userArticles = user.articleIds.map(
articleId => database.articles[articleId]);
const userComments = user.commentIds.map(
commentId => database.comments[commentId]);
response.body = {
user: user,
userArticles: userArticles,
userComments: userComments
};
response.status = 200;
} else if (username) {
response.status = 404;
} else {
response.status = 400;
}
return response;
}
function getOrCreateUser(url, request) {
const username = request.body && request.body.username;
const response = {};
if (database.users[username]) {
response.body = {user: database.users[username]};
response.status = 200;
} else if (username) {
const user = {
username: username,
articleIds: [],
commentIds: []
};
database.users[username] = user;
response.body = {user: user};
response.status = 201;
} else {
response.status = 400;
}
return response;
}
function getArticles(url, request) {
const response = {};
response.status = 200;
response.body = {
articles: Object.keys(database.articles)
.map(articleId => database.articles[articleId])
.filter(article => article)
.sort((article1, article2) => article2.id - article1.id)
};
return response;
}
function getArticle(url, request) {
const id = Number(url.split('/').filter(segment => segment)[1]);
const article = database.articles[id];
const response = {};
if (article) {
article.comments = article.commentIds.map(
commentId => database.comments[commentId]);
response.body = {article: article};
response.status = 200;
} else if (id) {
response.status = 404;
} else {
response.status = 400;
}
return response;
}
function createArticle(url, request) {
// see short circuit evaluation https://mzl.la/2OKhoKJ
// basically if (false) && (something), the something never
// gets evaluated becuase the first part of the AND is false.
// in this case if request.body is not set, request.body.article
// definitly won't be set! requestArticle is set to the 2nd
// half as that's where the short circuit ends. If request.body
// was undefined, then becuasse it is an AND, the short-circuit will
// be on the first half -- so requestArticle will be set to undefined.
const requestArticle = request.body && request.body.article;
// define a response object that we'll be returning later
const response = {};
// check we have a request article, then the properties of requestArticle are set
if (requestArticle && requestArticle.title && requestArticle.url &&
requestArticle.username && database.users[requestArticle.username]) {
// crate an article object to save to the articles object / database
const article = {
id: database.nextArticleId++, // set id to an increment of database.nextArticleId
title: requestArticle.title, // define the rest of the properties of the article
url: requestArticle.url,
username: requestArticle.username,
commentIds: [], // can't have any comments yet - the article doesn't even exit
upvotedBy: [], // no one can upvote the article for the same reason as above
downvotedBy: [] // dito as above
};
// save the article object into the articles object
database.articles[article.id] = article;
// Link the article to the user by saving the incremented article id
// to the articleIds array that each user has in the users database object
database.users[article.username].articleIds.push(article.id);
// next set up the response to send back to the user
// in this case, send back the article that was saved as
// the body
response.body = {article: article};
// because everything went well, set the status to 201
response.status = 201;
} else {
// there was an issue so send back the code 400
response.status = 400;
}
// return back the response
return response;
}
function updateArticle(url, request) {
//debugger;
// get the id of the article to update by parsing the URL string
// split the URL string on the forward slash. The result is an array of three elements
// then filter the array to remove the blank first element
// assign id to the second element of the resulting filtered array
// in this case, the number of the article.
const id = Number(url.split('/').filter(segment => segment)[1]);
// get the saved article that is saved in the database object
// using the id we extracted in the last step
const savedArticle = database.articles[id];
// using short-circuit evaluation assign the article text to the
// variable requestArticle
const requestArticle = request.body && request.body.article;
// define the response object we'll be sending back
const response = {};
// if id or requestArticle are not set
// send the 400 status code
if (!id || !requestArticle) {
response.status = 400;
// else if the article is not found, send back a 404 error
} else if (!savedArticle) {
response.status = 404;
} else {
// else everything is OK
// now, if the request contains a title use short circuit evaluation to set it
// if the request doesn't contain a title, set it back to the original, for example.
// same with the URL. This stops the user deleting one of the inputs while editing the other.
// Unlike before the Short Circuit is an OR statement, so if the first half is false it'll try the next
// until it short circuits. If it was an AND it'd short circuit on the first item if it was false. That's
// becuase false and (anything) will always be false, no point in even trying the next item.
savedArticle.title = requestArticle.title || savedArticle.title;
savedArticle.url = requestArticle.url || savedArticle.url;
// Note the above is actually saving the title and URL to the database object because we set savedArticle
// equal to one of the saved articles with the line 'const savedArticle = database.articles[id];'
// set the response body and status code ready to be returned back
response.body = {article: savedArticle};
response.status = 200;
}
//return the response
return response;
}
function deleteArticle(url, request) {
debugger
// gets the id in the same way as updateArticle
const id = Number(url.split('/').filter(segment => segment)[1]);
// assigns the saved article to a variable
const savedArticle = database.articles[id];
// gets the response ready by initialising it
const response = {};
// if there is an article at that id
if (savedArticle) {
// set it to null - i.e. delete it from the
// database object
database.articles[id] = null;
// next we remove all the comments for that article
savedArticle.commentIds.forEach(commentId => {
// for each comment on that article,
const comment = database.comments[commentId];
// delete it from the comments object
database.comments[commentId] = null;
// next we need to remove the comment ids from the users object
// specifically the commentIds array
// assign a variable to hold the commentIds array
const userCommentIds = database.users[comment.username].commentIds;
// next, use indexOf to get the index of the comment given the id
// then use splice to remove the element from the index of the comment to
// just 1. I.e. remove the 1 element starting at the comment.
userCommentIds.splice(userCommentIds.indexOf(id), 1);
});
// get the array of all articles from the users object
const userArticleIds = database.users[savedArticle.username].articleIds;
// use splice to alter the array. Use indexOf to the get the index of the article
// given the article's id, then use splice to remove 1 element starting at that point
// i.e. delete the article from the users object
userArticleIds.splice(userArticleIds.indexOf(id), 1);
// set the the status code
response.status = 204;
} else {
// something went wrong, set the status code
response.status = 400;
}
// return the status code
return response;
}
function upvoteArticle(url, request) {
debugger
// get the id from the url as done in other functions
const id = Number(url.split('/').filter(segment => segment)[1]);
// SHort circuit eval to assign the username similar to before
const username = request.body && request.body.username;
// get and assign the article using the id we extracted
let savedArticle = database.articles[id];
// set up our response object
const response = {};
// if the id of the article was a valid one in the articles object
// and the username sent in the request exists in the users database object
if (savedArticle && database.users[username]) {
// run the upvote helper function passing it the article and
// username of the person upvoting it
savedArticle = upvote(savedArticle, username);
// set the body of the response to the altered article
// after the upvote has been processed
response.body = {article: savedArticle};
// set the status code to ok
response.status = 200;
} else {
// there must be an issue, set the status code accordingly
response.status = 400;
}
// return the response
return response;
}
function downvoteArticle(url, request) {
// get the id from the url sting as before
const id = Number(url.split('/').filter(segment => segment)[1]);
// using short circuit eval, assign the username from the request
const username = request.body && request.body.username;
// let (becuase we'll be changing it later) savedArticle equal the
// saved article in the database object, within the articles object.
let savedArticle = database.articles[id];
// initialise the response object
const response = {};
// check if there is actually a saved article at that id
// and that the users in the request is in the users object
if (savedArticle && database.users[username]) {
// if so, run the downvote helper function passing the savedArticle and
// username. Then assigning the new updated article to savedArticle
savedArticle = downvote(savedArticle, username);
// assign the response body and status code ready to be returned
response.body = {article: savedArticle};
response.status = 200;
} else {
response.status = 400;
}
// return the response
return response;
}
function upvote(item, username) {
// if the person has already downvoted the article, deal with that first
if (item.downvotedBy.includes(username)) {
// remove the downvote
item.downvotedBy.splice(item.downvotedBy.indexOf(username), 1);
}
// if they haven't already upvoted, add them to the upvotedBy array
if (!item.upvotedBy.includes(username)) {
item.upvotedBy.push(username);
}
// return item (either article or comment array) back
return item;
}
// see comments for the upvote function
function downvote(item, username) {
if (item.upvotedBy.includes(username)) {
item.upvotedBy.splice(item.upvotedBy.indexOf(username), 1);
}
if (!item.downvotedBy.includes(username)) {
item.downvotedBy.push(username);
}
return item;
}
// Write all code above this line.
const http = require('http');
const url = require('url');
const port = process.env.PORT || 4000;
const isTestMode = process.env.IS_TEST_MODE;
const requestHandler = (request, response) => {
const url = request.url;
const method = request.method;
const route = getRequestRoute(url);
if (method === 'OPTIONS') {
var headers = {};
headers["Access-Control-Allow-Origin"] = "*";
headers["Access-Control-Allow-Methods"] = "POST, GET, PUT, DELETE, OPTIONS";
headers["Access-Control-Allow-Credentials"] = false;
headers["Access-Control-Max-Age"] = '86400'; // 24 hours
headers["Access-Control-Allow-Headers"] = "X-Requested-With, X-HTTP-Method-Override, Content-Type, Accept";
response.writeHead(200, headers);
return response.end();
}
response.setHeader('Access-Control-Allow-Origin', '*');
response.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
response.setHeader(
'Access-Control-Allow-Headers', 'X-Requested-With,content-type');
if (!routes[route] || !routes[route][method]) {
response.statusCode = 400;
return response.end();
}
if (method === 'GET' || method === 'DELETE') {
const methodResponse = routes[route][method].call(null, url);
!isTestMode && (typeof saveDatabase === 'function') && saveDatabase();
response.statusCode = methodResponse.status;
response.end(JSON.stringify(methodResponse.body) || '');
} else {
let body = [];
request.on('data', (chunk) => {
body.push(chunk);
}).on('end', () => {
body = JSON.parse(Buffer.concat(body).toString());
const jsonRequest = {body: body};
const methodResponse = routes[route][method].call(null, url, jsonRequest);
!isTestMode && (typeof saveDatabase === 'function') && saveDatabase();
response.statusCode = methodResponse.status;
response.end(JSON.stringify(methodResponse.body) || '');
});
}
};
const getRequestRoute = (url) => {
const pathSegments = url.split('/').filter(segment => segment);
if (pathSegments.length === 1) {
return `/${pathSegments[0]}`;
} else if (pathSegments[2] === 'upvote' || pathSegments[2] === 'downvote') {
return `/${pathSegments[0]}/:id/${pathSegments[2]}`;
} else if (pathSegments[0] === 'users') {
return `/${pathSegments[0]}/:username`;
} else {
return `/${pathSegments[0]}/:id`;
}
}
if (typeof loadDatabase === 'function' && !isTestMode) {
const savedDatabase = loadDatabase();
if (savedDatabase) {
for (key in database) {
database[key] = savedDatabase[key] || database[key];
}
}
}
const server = http.createServer(requestHandler);
server.listen(port, (err) => {
if (err) {
return console.log('Server did not start succesfully: ', err);
}
console.log(`Server is listening on ${port}`);
}); | 393ffc7551dc0bc703a84c6ad2f0264f8630f42f | [
"JavaScript"
] | 1 | JavaScript | slatham/project-3-the-scoop | 375c3c8007041043b8daf7bf7138878bdeca6211 | 3e2bab6e67a5420d4f7bd037ec7faee518a10b89 | |
refs/heads/master | <repo_name>goooooouwa/answer-for-test4<file_sep>/main/main.js
module.exports = function main(a) {
// Write your cade here
if(a == "123") {
return 6;
} else if(a == "102") {
return 3;
}
};
| 8a5af98e6c5ff940e30e6d16beea294c03dafaa6 | [
"JavaScript"
] | 1 | JavaScript | goooooouwa/answer-for-test4 | 557077a438ef69c55b8cb929dbc29d7ff7d8880b | e72c9ae820fbd4d67bd411b5d9270007b651af1b | |
refs/heads/master | <repo_name>BeyondTheClouds/VMPlaceS<file_sep>/generate.py
#!/usr/bin/python
# This script generates a specific deployment file for the injection simulator.
# It assumes that the platform will be a cluster.
# Usage: python generate.py scheduling policies nb_nodes
# Example: python generate.py centralized 100000 32 1000
import sys, random
## centralized scheduling
largv=len(sys.argv)
nb_nodes = int(sys.argv[2])
if (sys.argv[1] == 'centralized'):
sys.stderr.write("generate deployment file for entropy");
sys.stdout.write("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n"
" <process host=\"node%d\" function=\"injector.Injector\"> </process>\n"
" <process host=\"node%d\" function=\"simulation.CentralizedResolver\"> </process>\n"
"</platform>" % (nb_nodes +1, nb_nodes));
elif (sys.argv[1] == 'example'):
sys.stderr.write("generate deployment file for the simple example");
sys.stdout.write("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n"
" <process host=\"node%d\" function=\"injector.Injector\"> </process>\n"
" <process host=\"node%d\" function=\"simulation.ExampleResolver\"> </process>\n"
"</platform>" % (nb_nodes +1, nb_nodes));
elif (sys.argv[1] == 'without'):
sys.stderr.write("generate deployment file for entropy");
sys.stdout.write("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n"
" <process host=\"node%d\" function=\"injector.Injector\"> </process>\n"
"</platform>" % (nb_nodes +1));
elif (sys.argv[1] == 'hierarchical'):
nb_servicenodes = int(sys.argv[3])
sys.stderr.write("generate deployment file for snooze");
sys.stdout.write("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n"
" <process host=\"node%d\" function=\"injector.Injector\"> </process>\n"
" <process host=\"node%d\" function=\"simulation.HierarchicalResolver\"> </process>\n"
% (nb_nodes + nb_servicenodes, nb_nodes + nb_servicenodes))
# for i in range(0, nb_nodes):
# line = " <process host=\"node%d\" function=\"scheduling.hierarchical.snooze.LocalController\">\
#<argument value=\"node%d\" /><argument value=\"localController-%d\" />\
#</process>\n" % (i, i, i)
# sys.stdout.write(line)
for i in range(nb_nodes, nb_nodes+nb_servicenodes):
line = " <process host=\"node%d\" function=\"scheduling.hierarchical.snooze.GroupManager\">\
<argument value=\"node%d\" /><argument value=\"groupManager-%d\" />\
</process>\n" % (i, i, i)
sys.stdout.write(line)
sys.stdout.write("</platform>")
elif (sys.argv[1] == 'distributed'):
nb_nodes = int(sys.argv[2])
nb_cpu = int(sys.argv[3])
total_cpu_cap = int(sys.argv[4])
ram = int(sys.argv[5])
port_orig = int(sys.argv[6])
port = port_orig
sys.stdout.write("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n"
" <process host=\"node%d\" function=\"injector.Injector\"> </process>\n" % (nb_nodes))
for i in range(0, nb_nodes - 1):
line = " <process host=\"node%d\" function=\"simulation.DistributedResolver\">\n \
<argument value=\"node%d\" /><argument value=\"%d\" /><argument value=\"%d\" /><argument value=\"%d\" /><argument value=\"%d\" />\n \
<argument value=\"node%d\" /><argument value=\"%d\" />\n \
</process>\n" % (i, i, nb_cpu, total_cpu_cap, ram, port, i+1, port+1)
port+=1
sys.stdout.write(line)
# link the last agent to the first
line = " <process host=\"node%d\" function=\"simulation.DistributedResolver\">\n \
<argument value=\"node%d\" /><argument value=\"%d\" /><argument value=\"%d\" /><argument value=\"%d\" /><argument value=\"%d\" />\n \
<argument value=\"node%d\" /><argument value=\"%d\" />\n \
</process>\n" % (nb_nodes-1, nb_nodes-1, nb_cpu, total_cpu_cap, ram, port, 0, port_orig)
sys.stdout.write(line)
sys.stdout.write("</platform>")
else:
print("Usage: python generate.py scheduling_policy nb_nodes or python generate.py distributed nb_nodes nb_cpu total_cpu_cap ram port > dvms_deploy.xml")
sys.exit(1)
<file_sep>/src/main/java/scheduling/hierarchical/snooze/Test.java
package scheduling.hierarchical.snooze;
import configuration.SimulatorProperties;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import scheduling.hierarchical.snooze.msg.SnoozeMsg;
import scheduling.hierarchical.snooze.msg.TestFailGLMsg;
import scheduling.hierarchical.snooze.msg.TestFailGMMsg;
import simulation.SimulatorManager;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by sudholt on 20/07/2014.
*/
public class Test extends Process {
static String name;
static Host host;
static String inbox;
public boolean testsToBeTerminated = false;
static Multicast multicast;
static GroupLeader gl;
static ConcurrentHashMap<String, GroupManager> gmsCreated = new ConcurrentHashMap<>();
static ConcurrentHashMap<String, GroupManager> gmsJoined = new ConcurrentHashMap<>();
static ConcurrentHashMap<String, LocalController> lcsCreated = new ConcurrentHashMap<>();
static ConcurrentHashMap<String, LCJoined> lcsJoined = new ConcurrentHashMap<>();
static int noGMJoins = 0;
static int noLCJoins = 0;
static String gm = "";
static SnoozeMsg m = null;
public Test(Host host, String name) {
super(host, name);
this.host = host;
this.name = name;
this.inbox = "test";
}
@Override
public void main(String[] strings) throws MsgException {
try {
// procAddLCs();
// procAddGMs();
// procFailGLs();
// procFailGMs();
while (!testsToBeTerminated && !SimulatorManager.isEndOfInjection()) {
dispInfo();
sleep(1000*SnoozeProperties.getInfoPeriodicity());
}
} catch (HostFailureException e) {
testsToBeTerminated = true;
Logger.err("[Test.main] HostFailureException");
}
}
void procAddGMs() throws HostNotFoundException {
new Process(host, host.getName() + "-addGMs") {
public void main(String[] args) throws HostFailureException, HostNotFoundException {
try {
int lcNo = SimulatorProperties.getNbOfHostingNodes(); // no. of statically allocated LCs
int gmNo = SimulatorProperties.getNbOfServiceNodes(); // no. of statically allocated GMs
// for (int i = 0; i < SimulatorProperties.getNbOfServiceNodes()/2 && !testsToBeTerminated; i++) {
for (int i = 0; i < SimulatorProperties.getNbOfServiceNodes() && !testsToBeTerminated; i++) {
sleep(250);
String[] gmArgs = new String[]{"node" + (gmNo + lcNo), "dynGroupManager-" + (gmNo + lcNo)};
GroupManager gm =
new GroupManager(Host.getByName("node" + (gmNo + lcNo)), "dynGroupManager-" + (gmNo + lcNo), gmArgs);
gm.start();
Logger.debug("[Test.addLCs] Dyn. GM added: " + gmArgs[1]);
gmNo++;
}
} catch (HostFailureException e) {
testsToBeTerminated = true;
Logger.err("[Test.procAddGMs] HostFailureException");
}
}
}.start();
}
void procAddLCs() throws HostNotFoundException {
new Process(host, host.getName() + "-addLCs") {
public void main(String[] args) throws HostFailureException, HostNotFoundException {
try {
sleep(6000);
int lcNo = 0; // no. of statically allocated LCs
for (int i = 0; i < SimulatorProperties.getNbOfHostingNodes() && !testsToBeTerminated; i++) {
String[] lcArgs = new String[]{"node" + lcNo, "dynLocalController-" + lcNo};
LocalController lc =
new LocalController(Host.getByName("node" + lcNo), "dynLocalController-" + lcNo, lcArgs);
lc.start();
Logger.info("[Test.addLCs] Dyn. LC added: " + lcArgs[1]);
lcNo++;
sleep(2000);
}
} catch (HostFailureException e) {
testsToBeTerminated = true;
Logger.err("[Test.procAddLCs] HostFailureException");
}
}
}.start();
}
void procFailGLs() throws HostNotFoundException {
new Process(host, host.getName() + "-terminateGMs") {
public void main(String[] args) throws HostFailureException {
try {
sleep(4000);
for (int i = 0; i < SimulatorProperties.getNbOfServiceNodes() / 2 && !testsToBeTerminated; i++) {
if (multicast.gmInfo.size() < 3) {
Logger.debug("[Test.failGLs] #GMs: " + multicast.gmInfo.size());
sleep(3000);
continue;
}
m = new TestFailGLMsg(name, AUX.glInbox(multicast.glHostname), null, null);
m.send();
Logger.imp("[Test.failGLs] GL failure: " + Test.gl.getHost().getName());
sleep(1537);
// break;
}
} catch (HostFailureException e) {
testsToBeTerminated = true;
Logger.err("[Test.procFailGLs] HostFailureException");
}
}
}.start();
}
void procFailGMs() throws HostNotFoundException {
new Process(host, host.getName() + "-terminateGMs") {
public void main(String[] args) throws HostFailureException {
try {
sleep(5000);
for (int i = 0; i < SimulatorProperties.getNbOfServiceNodes() / 2 && !testsToBeTerminated; i++) {
if (multicast.gmInfo.size() < 3) {
Logger.info("[Test.failGMs] #GMs: " + multicast.gmInfo.size());
sleep(1777);
continue;
}
gm = new ArrayList<String>(multicast.gmInfo.keySet()).get(0);
m = new TestFailGMMsg(name, AUX.gmInbox(gm), null, null);
m.send();
Logger.imp("[Test.failGMs] Term. GM: " + gm + ", #GMs: " + multicast.gmInfo.size());
sleep(1777);
// break;
}
} catch (HostFailureException e) {
testsToBeTerminated = true;
Logger.err("[Test.procFailGMs] HostFailureException");
}
}
}.start();
}
void dispInfo() {
int i = 0, al = 0, gmal = 0;
Logger.imp("\n\n[Test.dispInfo] MUL.GL: " + multicast.glHostname +
", #MUL.gmInfo: " + multicast.gmInfo.size() +
", #MUL.lcInfo: " + multicast.lcInfo.size() + ", #Test.gmsCreated " + Test.gmsCreated.size());
Logger.imp(" ----");
for (String gm : multicast.gmInfo.keySet()) {
int mulLCs = 0, testLCsCreated = 0, testLCsJoined = 0;
for (String lc : multicast.lcInfo.keySet()) {
if (multicast.lcInfo.get(lc).gmHost.equals(gm)) {
mulLCs++;
if (Test.lcsCreated.containsKey(lc)) testLCsCreated++;
if (Test.lcsJoined.containsKey(lc)) testLCsJoined += Test.getNoGMsJoinedLC(lc);
}
}
String gmLeader = "";
int gml = 0;
for (String gmn : Test.gmsCreated.keySet()) {
GroupManager gmo = Test.gmsCreated.get(gmn);
if (gmn.equals(gm)) {
gmLeader = gmo.glHostname;
gml = gmo.lcInfo.size();
gmal += gml;
}
}
Logger.imp(" MUL.GM: " + gm + ", MUL.GM.#LCs: " + mulLCs
+ ", Test.GM.#LCs join/create: " + testLCsJoined + "/" + testLCsCreated
+ ", Test.GMLeader: " + gmLeader);
// Logger.imp(" GM.#LCs: " + gml + ", Test.GMLeader: " + gmLeader);
i++;
al += mulLCs;
}
Logger.imp(" ----");
if (gl != null)
Logger.imp(" Test.GL: " + gl.host.getName()
+ ", Test.GL.#GM: " + gl.gmInfo.size() + ", MUL.GM.#LCs: " + al + ", Test.GM.#LCs: " + gmal);
Logger.imp(" No. GM joins: " + noGMJoins + ", No. LC joins: " + noLCJoins + "\n");
}
static class LCJoined {
LocalController lco;
ArrayList<String> gms;
}
static void removeJoinedLC(String lc, String gm, String m) {
if (lcsJoined.containsKey(lc)) {
LCJoined tlj = lcsJoined.get(lc);
if (tlj.gms.contains(gm)) {
tlj.gms.remove(gm);
Logger.debug(m + " removeJoinedLC: LC: " + lc + ", GM: " + gm);
if (tlj.gms.size() == 0) {
lcsJoined.remove(lc);
Logger.debug(m + ", removeJoinedLC: Last GM removed LC: " + lc + ", GM: " + gm);
}
}
}
else Logger.debug(m + ", removeJoinedLC: No LC: " + lc + ", GM: " + gm);
}
static void putJoinedLC(String lc, LocalController lco, String gm, String m) {
if (lcsJoined.containsKey(lc)) {
LCJoined tlj = lcsJoined.get(lc);
if (!tlj.gms.contains(gm)) {
tlj.gms.add(gm);
Logger.debug(m + " putJoinedLC: GM added LC: " + lc + ", GM: " + gm + ", LCO: " + lco);
} else Logger.err(m + " putJoinedLC: Double LC: " + lc + ", GM: " + gm + ", LCO: " + lco);
} else {
LCJoined tlj = new LCJoined();
tlj.lco = lco;
ArrayList<String> al = new ArrayList<>();
al.add(gm);
tlj.gms = al;
lcsJoined.put(lc, tlj);
Logger.debug(m + " putJoinedLC: New LC: " + lc + ", GM: " + gm + ", LCO: " + lco);
}
}
static int getNoGMsJoinedLC(String lc) {
if (!lcsJoined.containsKey(lc)) return 0;
else return lcsJoined.get(lc).gms.size();
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/LocalController.java
package scheduling.hierarchical.snooze;
/**
* Created by sudholt on 25/05/2014.
*/
import configuration.XHost;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import scheduling.hierarchical.snooze.msg.LCAssMsg;
import scheduling.hierarchical.snooze.msg.LCChargeMsg;
import scheduling.hierarchical.snooze.msg.NewLCMsg;
import scheduling.hierarchical.snooze.msg.SnoozeMsg;
import simulation.SimulatorManager;
public class LocalController extends Process {
private String name;
XHost host; //@ Make private
private boolean thisLCToBeStopped = false;
private String gmHostname = "";
private double gmTimestamp;
private int procCharge = 0;
private String inbox, joinMBox;
private String lcCharge; // GM mbox
private boolean joining = true;
public LocalController (Host host, String name, String[] args) throws HostNotFoundException {
super(host, name, args);
}
public void init(XHost host, String name) {
this.host = host;
this.name = name;
this.inbox = AUX.lcInbox(host.getName());
this.joinMBox = inbox + "-join";
}
@Override
public void main(String[] args) {
int n=0;
try {
// Let LCs wait for GM initialization
// sleep(3000);
Test.lcsCreated.remove(this);
Logger.debug("Start LC " + args[0] + ", " + args[1]);
init(SimulatorManager.getXHostByName(args[0]), args[1]);
Test.lcsCreated.put(this.host.getName(), this);
join();
procSendLCChargeToHandleDeadGM();
while (!stopThisLC()) {
try {
SnoozeMsg m = (SnoozeMsg) Task.receive(inbox, AUX.durationToEnd());
handle(m);
// if (Task.listen(inbox)) handle((SnoozeMsg) Task.receive());
gmDead();
if (SnoozeProperties.shouldISleep()) sleep(AUX.DefaultComputeInterval);
} catch (HostFailureException e) {
thisLCToBeStopped = true;
Logger.exc("[LC.main] HostFailureException");
break;
} catch (TimeoutException e) {
gmDead();
} catch (Exception e) {
String cause = e.getClass().getName();
Logger.err("[LC.main] PROBLEM? Exception: " + host.getName() + ": " + cause);
e.printStackTrace();
Logger.err("[LC.main] PROBLEM? Exception, " + host.getName() + ": " + e.getClass().getName());
}
}
thisLCToBeStopped = true;
} catch (HostFailureException e) {
Logger.exc("[LC.main] HostFailureException");
thisLCToBeStopped = true;
}
gmHostname = "";
Logger.debug("[LC.main] LC stopped");
}
boolean stopThisLC() { return thisLCToBeStopped || SimulatorManager.isEndOfInjection(); }
void handle(SnoozeMsg m) throws HostFailureException {
// Logger.debug("[LC.handle] LCIn: " + m);
String cs = m.getClass().getSimpleName();
switch (cs) {
case "TermGMMsg" : handleTermGM(m); break;
case "SnoozeMsg" :
Logger.err("[GM(SnoozeMsg)] Unknown message" + m + " on " + host);
break;
}
}
/**
* Stop LC activity and rejoin
*/
void handleTermGM(SnoozeMsg m) throws HostFailureException {
// TODO: stop LC activity
Logger.err("[LC(TermGM)] GM DEAD, LC rejoins: " + m);
join();
}
/**
* GM dead: rejoin
*/
void gmDead() throws HostFailureException {
if (AUX.timeDiff(gmTimestamp) < AUX.HeartbeatTimeout || joining) return;
Logger.err("[LC.gmDead] GM dead: " + gmHostname + ", " + gmTimestamp);
gmHostname = "";
join();
}
void join() throws HostFailureException {
joining = true;
Logger.info("[LC.join] Entry: " + gmHostname + ", TS: " + gmTimestamp);
String gl, gm;
boolean success = false;
do {
try {
gl = getGL();
if (gl.isEmpty()) continue;
int i = 0;
do {
gm = getGM(gl);
if (!gm.isEmpty()) success = joinGM(gm);
// if (gm.isEmpty()) continue;
// success = joinGM(gm);
i++;
} while (!success && i < 3);
if (!success) continue;
i = 0;
do {
success = joinFinalize(gm);
i++;
} while (!success && i < 3);
if (!success) continue;
} catch (HostFailureException e) {
throw e;
} catch(Exception e) {
Logger.err("[LC.join] Exception");
e.printStackTrace();
success = false;
}
} while (!success && !stopThisLC());
if (!stopThisLC()) {
joining = false;
Test.noLCJoins++;
// Test.lcsJoined.remove(this.host.getName());
Test.removeJoinedLC(this.host.getName(), gmHostname, "[LC.join]"); // Should be superfluous
// Test.lcsJoined.put(this.host.getName(), this);
Test.putJoinedLC(this.host.getName(), this, gmHostname, "[LC.join]");
Logger.imp("[LC.join] Finished, GM: " + gmHostname + ", TS: " + gmTimestamp);
}
}
/**
* Send join request to EP and wait for GroupManager acknowledgement
*/
String getGL() throws HostFailureException {
try {
boolean success = false;
SnoozeMsg m = null;
String gl = "";
// Join GL multicast group
m = new NewLCMsg(null, AUX.multicast + "-newLC", host.getName(), joinMBox);
m.send();
// Logger.debug("[LC.getGL] 1 Request sent: " + m);
// Wait for GL beat
int i = 0;
do {
m = (SnoozeMsg) Task.receive(inbox, AUX.durationToEnd());
// m = (SnoozeMsg) Task.receive(inbox, AUX.HeartbeatTimeout);
i++;
Logger.info("[LC.getGL] Round " + i + ": " + m);
gl = (String) m.getOrigin();
success = m.getClass().getSimpleName().equals("RBeatGLMsg") && !m.getOrigin().isEmpty();
} while (!success && !stopThisLC());
return gl;
// Logger.info("[LC.getGL] 1 Got GL: " + m);
} catch (TimeoutException e) {
Logger.exc("[LC.getGL] PROBLEM? Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return "";
} catch (HostFailureException e) {
throw e;
} catch (Exception e) {
Logger.exc("[LC.getGL] PROBLEM? Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return "";
}
}
/**
* Send join request to EP and wait for GroupManager acknowledgement
*/
String getGM(String gl) throws HostFailureException {
try {
// Send GM assignment request
SnoozeMsg m = new LCAssMsg(host.getName(), AUX.glInbox(gl) + "-lcAssign", host.getName(), joinMBox);
m.send();
Logger.info("[LC.getGM] Assignment message sent: " + m);
// Wait for GM assignment
m = (SnoozeMsg) Task.receive(joinMBox, 5*AUX.MessageReceptionTimeout);
if (!m.getClass().getSimpleName().equals("LCAssMsg")) return "";
String gm = (String) m.getMessage();
if (gm.isEmpty()) {
Logger.err("[LC.getGM] Empty GM: " + m);
return "";
}
Logger.imp("[LC.getGM] GM assigned: " + m);
return gm;
} catch (TimeoutException e) {
Logger.exc("[LC.getGM] Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return "";
} catch (HostFailureException e) {
throw e;
} catch (Exception e) {
Logger.exc("[LC.getGM] PROBLEM? Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return "";
}
}
/**
* Send join request to EP and wait for GroupManager acknowledgement
*/
boolean joinGM(String gm) throws HostFailureException {
try {
// GM integration request
SnoozeMsg m = new NewLCMsg(host.getName(), AUX.gmInbox(gm) + "-newLC", name, joinMBox);
m.send();
Logger.info("[LC.joinGM] Integration message sent: " + m);
m = (SnoozeMsg) Task.receive(joinMBox, 5*AUX.MessageReceptionTimeout);
if (!m.getClass().getSimpleName().equals("NewLCMsg")) {
Logger.err("[LC.joinGM] No NewLC msg.: " + m);
return false;
}
Logger.imp("[LC.joinGM] Integrated by GM: " + m);
return true;
} catch (TimeoutException e) {
Logger.exc("[LC.joinGM] Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return false;
} catch (HostFailureException e) {
throw e;
} catch (Exception e) {
Logger.exc("[LC.joinGM] PROBLEM? Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return false;
}
}
/**
* Send join request to EP and wait for GroupManager acknowledgement
*/
boolean joinFinalize(String gm) throws HostFailureException {
try {
// Leave GL multicast, join GM multicast group
SnoozeMsg m = new NewLCMsg(gm, AUX.multicast + "-newLC", host.getName(), joinMBox);
m.send();
Logger.info("[LC.joinFinalize] GL->GM multicast: " + m);
m = (SnoozeMsg) Task.receive(joinMBox, 5 * AUX.MessageReceptionTimeout);
if (!m.getClass().getSimpleName().equals("NewLCMsg")) return false;
gm = (String) m.getMessage();
if (gm.isEmpty()) {
Logger.err("[LC.joinFinalize] 4 Empty GM: " + m);
return false;
}
Logger.info("[LC.tryJoin] Ok GL->GM multicast: " + m);
gmHostname = gm;
gmTimestamp = Msg.getClock();
Logger.info("[LC.joinFinalize] Finished, GM: " + gm + ", " + gmTimestamp);
return true;
} catch (HostFailureException e) {
throw e;
} catch (Exception e) {
Logger.exc("[LC.joinFinalize] PROBLEM? Exception " + host.getName() + ": " + e.getClass().getName());
e.printStackTrace();
return false;
}
}
void handleGMBeats(SnoozeMsg m) {
String gm = (String) m.getOrigin();
if (gmHostname.isEmpty()) {
Logger.err("[LC.handleGMBeats] No GM: " + host.getName());
}
if (!gmHostname.equals(gm)) {
Logger.err("[LC.handleGMBeats] Multiple GMs: " + host.getName() + ", " + gmHostname + ", " + gm);
} else {
gmTimestamp = Msg.getClock();
Logger.info("[LC.handleGMBeats] " + host.getName() + ", " + gmHostname + ", TS: " + gmTimestamp);
}
}
/**
* Send LC beats to GM
*/
void procSendLCChargeToHandleDeadGM() {
try {
final XHost h = host;
new Process(host.getSGHost(), host.getSGHost().getName() + "-lcCharge") {
public void main(String[] args) {
int chargeCounter = 0;
while (!stopThisLC()) {
chargeCounter++;
try {
if (chargeCounter%4 == 0) {
LCChargeMsg.LCCharge lc = new LCChargeMsg.LCCharge(h.getCPUDemand(), h.getMemDemand(), Msg.getClock());
LCChargeMsg m = new LCChargeMsg(lc, AUX.gmInbox(gmHostname), h.getName(), null);
m.send();
Logger.info("[LC.procSendLCChargeToGM] Charge sent: " + m);
}
gmDead();
sleep(AUX.HeartbeatInterval*1000/4);
} catch (HostFailureException e) {
Logger.exc("[LC.procSendLCChargeToGM] HostFailureException");
thisLCToBeStopped = true;
break;
} catch (Exception e) { e.printStackTrace(); }
}
}
}.start();
} catch (Exception e) { e.printStackTrace(); }
}
void totalHostCapacity() {
HostCapacity hc = new HostCapacity(host.getCPUCapacity(), host.getMemSize());
}
void startVM() {
}
void shutdownVM() {
}
void migrateVM() {
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/RBeatGMMsg.java
package scheduling.hierarchical.snooze.msg;
import scheduling.hierarchical.snooze.GroupManager;
/**
* Created by sudholt on 14/07/2014.
*/
public class RBeatGMMsg extends SnoozeMsg {
/**
* Relay GL heartbeats to EPs, GMs and LCs
* @param timestamp Timestamp
* @param sendBox Target mbox
* @param origin GL host
* @param replyBox null
*/
public RBeatGMMsg(GroupManager gm, String sendBox, String origin, String replyBox) {
super(gm, sendBox, origin, replyBox);
}
}
<file_sep>/visu/template/energy_script.jinja2
#!/usr/bin/Rscript
{% set colors = ["#E04836", "#5696BC", "#F39D41", "#8D5924", "#2F5168"] %}
{% set algos_size = algos|count %}
{% set tuple_size = x_axis|count %}
{% set group_by_nodes_count = group_by_nodes|count %}
pdf(width=10, heigh=7)
# Import the date
{% set i = 0 %}
names <- c({% for name in names %}{% if i > 0 %}, {% endif %}"{{ name }}"{% set i = i+1 %}{% endfor %})
data <- read.table("{{source}}", header=T,sep=",")
attach(data)
# Compute the range of the series
g_range <- range(0{% for algo in algos %},{{ algo }} {% endfor %})
{% set pch_start = 20 %}
{% set lty_start = 3 %}
{% set cex = 1.5 %}
{% set lwd = 1 %}
{% set legend_cex = 1 %}
{% set legend_lwd = 1.5 %}
{% set title_enabled = False %}
# Plot the first serie
{% set count = 0 %}
plot({{algos[0]}}, type="o", col="{{ colors[0] }}", ylim=g_range, pch={{pch_start+count}}, cex={{cex}}, lwd={{lwd}}, lty={{lty_start+count}},
axes=TRUE, ann=FALSE)
# Generate x-axis
{% set count = 0 %}
#axis(1, at=1:{{tuple_size}}, lab=c({% for server_count,vm_count in x_axis %}{% if count > 0 %},{% endif %}"{{server_count}} nodes\n{{vm_count}} vms"{% set count = count +1 %}{% endfor %}))
print(g_range[2])
# Generate x-axis
axis(2)
# Create box around plot
box()
# Graph other series of data
{% set count = 0 %}
{% for algo in algos %}{% if count > 0 %}lines({{ algo }}, type="o", pch={{pch_start+count}}, cex={{cex}}, lwd={{lwd}}, lty={{lty_start+count}}, col="{{ colors[count] }}"){% endif %}{% set count = count +1 %}
{% endfor %}
{% if title_enabled %}
# Create a title with a red, bold/italic font
title(main="{{title|capitalize}}", font.main=4)
{% endif %}
# Label the x and y axes with dark green text
title(xlab="{{x_label}}", col.lab=rgb(0,0,0))
title(ylab="{{y_label}}", col.lab=rgb(0,0,0))
#abline(h=3600,col="black",lty=2)
# text(1.5, (3600+g_range[2]/40), "3600 s", col = "black")
# Create a legend at (1, g_range[2]) that is slightly smaller
# (cex) and uses the same line colors and points used by
# the actual plots
{% set count = 0 %}
legend(1, g_range[2], names, cex={{legend_cex}}, lwd={{legend_lwd}},
col=c({% for algo in algos %}{% if count > 0 %},{% endif %}"{{ colors[count] }}"{% set count = count +1 %}{% endfor %}), pch={{pch_start}}:{{pch_start + algos|length}}, lty={{lty_start}}:{{lty_start + algos|length}});
<file_sep>/visu/generate_data.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet, DistributionNotFound
import sys
working_set = WorkingSet()
# Printing all installed modules
# print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
################################################################################
# Constant and parameters
################################################################################
# duration = 3600
max_duration = 86400
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader(searchpath=".")
templateEnv = jinja2.Environment(loader=templateLoader)
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template(TEMPLATE_FILE)
templateVars = vars
outputText = template.render(templateVars)
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "data"])
execute_cmd(["mkdir", "data"])
execute_cmd(["rm", "-r", "scripts"])
execute_cmd(["mkdir", "scripts"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json") and 'true' not in dirname:
with open("%s/%s" % (dirname, filename), 'r') as f:
print("loading %s/%s" % (dirname, filename))
header_line = f.readline()
print header_line
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if "hierarchical" in algo:
algo = "%s_%s" % (algo, data["algorithm_details"]["lcsRatio"])
if not algo in algos:
algos += [algo]
print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0]) + "-" + str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
print nodes_tuples
print vms_tuples
print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
map_simulation_count = {}
map_compute_time = {}
map_compute_time_per_service_node = {}
map_violation_time = {}
map_migration_time = {}
map_total_time = {}
map_reconfigure_failure_count = {}
map_reconfigure_success_count = {}
map_reconfigure_noreconf_count = {}
map_migration_count = {}
map_success_psize = {}
map_avg_psize = {}
map_migration_avg_duration = {}
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if "hierarchical" in algo:
algo = "%s_%s" % (algo, data["algorithm_details"]["lcsRatio"])
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
if "distributed" in algo:
service_node_count = compute_node_count
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (algo, compute_node_count)
compute_time = 0
violation_time = 0
migrate_time = 0
reconfigure_time = 0
reconfigure_failure_count = 0
reconfigure_success_count = 0
reconfigure_noreconf_count = 0
migration_count = 0
success_psize = 0
servers_involved = 0
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > max_duration:
continue
# print(data)
if data["event"] == "trace_event" and data["value"] == "violation":
violation_time += data["duration"]
if data["event"] == "trace_event" and data["value"] == "reconfigure":
reconfigure_time += data["duration"]
if data["event"] == "trace_event" and data["value"] == "compute":
compute_time += data["duration"]
if data["data"]["state"] == "NO_RECONFIGURATION_NEEDED":
reconfigure_noreconf_count += 1
elif data["data"]["state"] == "SUCCESS":
reconfigure_success_count += 1
servers_involved += data["data"]["psize"]
success_psize += data["data"]["psize"]
else:
reconfigure_failure_count += 1
if data["event"] == "trace_event" and data["state_name"] == "SERVICE" and data[
"value"] == "migrate":
migrate_time += data["duration"]
migration_count += 1
except:
pass
try:
avg_psize = servers_involved / reconfigure_success_count
except:
avg_psize = 0
try:
avg_migration_duration = migrate_time / migration_count
except:
avg_migration_duration = 0
if not map_simulation_count.has_key(nodes_vms_tuple):
map_simulation_count[nodes_vms_tuple] = 0
map_simulation_count[nodes_vms_tuple] += 1
map_compute_time[nodes_vms_tuple] = compute_time
map_compute_time_per_service_node[
nodes_vms_tuple] = compute_time / service_node_count if service_node_count > 0 else -1
map_violation_time[nodes_vms_tuple] = violation_time
map_migration_time[nodes_vms_tuple] = migrate_time
map_total_time[nodes_vms_tuple] = reconfigure_time
map_reconfigure_noreconf_count[nodes_vms_tuple] = reconfigure_noreconf_count
map_reconfigure_failure_count[nodes_vms_tuple] = reconfigure_failure_count
map_reconfigure_success_count[nodes_vms_tuple] = reconfigure_success_count
map_migration_count[nodes_vms_tuple] = migration_count
map_success_psize[nodes_vms_tuple] = (success_psize / reconfigure_success_count
if reconfigure_success_count > 0 else -1
)
map_avg_psize[nodes_vms_tuple] = avg_psize
map_migration_avg_duration[nodes_vms_tuple] = avg_migration_duration
################################################################################
# Group statistics by simulation kind
################################################################################
for key in map_simulation_count:
simulation_count = map_simulation_count[key]
map_compute_time[key] /= simulation_count
map_violation_time[key] /= simulation_count
map_migration_time[key] /= simulation_count
map_total_time[key] /= simulation_count
map_reconfigure_failure_count[key] /= simulation_count
map_reconfigure_success_count[key] /= simulation_count
map_migration_count[key] /= simulation_count
map_avg_psize[key] /= simulation_count
map_migration_avg_duration[key] /= simulation_count
################################################################################
# Generate CSV files from data maps
################################################################################
print map_compute_time
print map_violation_time
print map_migration_time
print map_total_time
print map_reconfigure_failure_count
print map_reconfigure_success_count
print map_migration_count
print map_avg_psize
print map_migration_avg_duration
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_compute_time}, "data/compute_time.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_compute_time_per_service_node},
"data/compute_time_per_service_node.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_violation_time}, "data/violation_time.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_migration_time}, "data/migration_time.csv")
render_template("template/matrix_data.jinja2", {"algos": algos, "server_counts": nodes_tuples, "data": map_total_time},
"data/total_time.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_reconfigure_failure_count},
"data/reconfigure_failure_count.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_reconfigure_success_count},
"data/reconfigure_success_count.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_reconfigure_noreconf_count},
"data/reconfigure_noreconf_count.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_migration_count},
"data/migration_count.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_success_psize}, "data/success_psize.csv")
render_template("template/matrix_data.jinja2",
{"algos": algos, "server_counts": nodes_tuples, "data": map_migration_avg_duration},
"data/migration_avg_duration.csv")
# Add distributed class algorithmes to the group_by_nodes
group_by_nodes = ["distributed"]
# Add hierarchical class algorithmes to the group_by_nodes
for each in algos:
if "hierarchical" in each:
group_by_nodes += [each]
not_group_by_nodes = list(set(algos) - set(group_by_nodes))
print("group_by_nodes -> %s" % (group_by_nodes))
print("not_group_by_nodes -> %s" % (not_group_by_nodes))
render_template("template/matrix_script.jinja2",
{"source": "data/compute_time.csv", "x_label": "Infrastructure sizes", "y_label": "Time (s)",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": group_by_nodes,
"not_group_by_nodes": not_group_by_nodes, "title": "cumulated computation time"},
"scripts/compute_time.r")
render_template("template/matrix_script.jinja2",
{"source": "data/compute_time_per_service_node.csv", "x_label": "Infrastructure sizes",
"y_label": "Time (s)", "algos": algos, "x_axis": zip(nodes_tuples, vms_tuples),
"group_by_nodes": group_by_nodes, "not_group_by_nodes": not_group_by_nodes,
"title": "computation time per service node"}, "scripts/compute_time_per_service_node.r")
render_template("template/matrix_script.jinja2",
{"source": "data/violation_time.csv", "x_label": "Infrastructure sizes", "y_label": "Time (s)",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "cumulated violation time"}, "scripts/violation_time.r")
render_template("template/matrix_script.jinja2",
{"source": "data/migration_time.csv", "x_label": "Infrastructure sizes", "y_label": "Time (s)",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "cumulated migration time"}, "scripts/migration_time.r")
render_template("template/matrix_script.jinja2",
{"source": "data/total_time.csv", "x_label": "Infrastructure sizes", "y_label": "Time (s)",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": group_by_nodes,
"not_group_by_nodes": not_group_by_nodes, "title": "cumulated reconfiguration time"},
"scripts/total_time.r")
render_template("template/matrix_script.jinja2",
{"source": "data/reconfigure_failure_count.csv", "x_label": "Infrastructure sizes", "y_label": "Count",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "failed reconfiguration count"},
"scripts/reconfigure_failure_count.r")
render_template("template/matrix_script.jinja2",
{"source": "data/reconfigure_success_count.csv", "x_label": "Infrastructure sizes", "y_label": "Count",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "successful reconfiguration count"},
"scripts/reconfigure_success_count.r")
render_template("template/matrix_script.jinja2",
{"source": "data/reconfigure_noreconf_count.csv", "x_label": "Infrastructure sizes", "y_label": "Count",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "no reconfiguration count"}, "scripts/reconfigure_noreconf_count.r")
render_template("template/matrix_script.jinja2",
{"source": "data/migration_count.csv", "x_label": "Infrastructure sizes", "y_label": "Count",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "migration count"}, "scripts/migration_count.r")
render_template("template/matrix_script.jinja2",
{"source": "data/success_psize.csv", "x_label": "Infrastructure sizes", "y_label": "Count",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "average number of nodes involved in a successul computation"},
"scripts/success_psize.r")
render_template("template/matrix_script.jinja2",
{"source": "data/migration_avg_duration.csv", "x_label": "Infrastructure sizes", "y_label": "Time (s)",
"algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [],
"not_group_by_nodes": [], "title": "average duration of migration"},
"scripts/migration_avg_duration.r")
<file_sep>/visu/template/repartition_script.jinja2
#!/usr/bin/Rscript
data <- read.table("repartition/data/{{algo}}-{{node_count}}/repartition_{{metric}}.csv", header=T,sep=",")
colnames(data) <- c("type", "value")
attach(data)
dev.new(width=8, height=6)
n <- length(data$value)
plot( 100*(1:n)/n ~ sort(data$value), xlab="{{legend}}", ylab='Cumulated Distribution (percentage)')
lines(100*(1:n)/n ~ sort(data$value))
title(main="{{algo}} {{node_count}} nodes \n(Cumulated Frequency of {{metric}} distribution)")
<file_sep>/src/main/java/scheduling/distributed/dvms2/TimeoutCheckerProcess.java
package scheduling.distributed.dvms2;
import configuration.XHost;
import org.simgrid.msg.HostFailureException;
import org.simgrid.msg.Process;
import simulation.SimulatorManager;
public class TimeoutCheckerProcess extends Process {
public TimeoutCheckerActor timeoutActor;
public TimeoutCheckerProcess(XHost xhost, String name, int port, SGNodeRef ref, DVMSProcess process) {
super(xhost.getSGHost(), String.format("%s-checkout-checker", name, port));
this.timeoutActor = new TimeoutCheckerActor(ref, xhost, process);
}
public class TimeoutCheckerActor extends SGActor {
SGNodeRef ref;
DVMSProcess process;
XHost xhost;
public TimeoutCheckerActor(SGNodeRef ref, XHost xhost, DVMSProcess process) {
super(ref);
this.ref = ref;
this.xhost = xhost;
this.process = process;
}
public void doCheckTimeout() throws HostFailureException {
send(ref, "checkTimeout");
waitFor(1);
}
public void receive(Object message, SGNodeRef sender, SGNodeRef returnCanal) {
}
}
public void main(String args[]) {
try {
while (!SimulatorManager.isEndOfInjection()) {
timeoutActor.doCheckTimeout();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
<file_sep>/config/snooze.properties
/**
* This file is the main configuration file for the Snooze scheduling management (hierarchical)
*
**/
/**
* Main properties
**/
// The identifier of the algorithm variant
// If empty, the id is calculated from the parameters (host no., service host no., ...)
snooze.alg-variant =
// The periodicity for sending the heartbeat (in seconds)
// Default: 2
snooze.hb-periodicity = 3
// The timeout before considering a daemon dead (GL, GM or LC) (in seconds)
// Default: 5
snooze.hb-timeout = 6
// Periodic scheduling
// Default: true
snooze.scheduling-periodic = false
// The periodicity to invoke the scheduling algorithm (in seconds)
// Default: 30 seconds
snooze.scheduling-periodicity = 120
// The information level : 4:err,exc; 3:imp(ortant), 2:info; 1:debug
// Default: 2
snooze.info-level = 1
// Periodicity of the display of information, statistics etc. (in seconds)
// Default: 5s
// TODO add a property to enable/disable test
snooze.info-periodicity = 1
// If you want to simulate costs of local computations, please set the following property to true
// Default: false
snooze.simulate-localcomputation = false
// If you want to simulate a specific fault injection.
// Default: false
snooze.faultmode = true
snooze.glcrash-period = 900
snooze.gmcrash-period = 300<file_sep>/src/main/java/scheduling/hierarchical/snooze/ThreadPool.java
package scheduling.hierarchical.snooze;
import org.simgrid.msg.Host;
import org.simgrid.msg.HostNotFoundException;
import org.simgrid.msg.MsgException;
import org.simgrid.msg.Process;
import scheduling.hierarchical.snooze.msg.SnoozeMsg;
import simulation.SimulatorManager;
import java.lang.reflect.Constructor;
/**
* Created by sudholt on 31/07/2014.
*/
public class ThreadPool {
private final Process[] workers;
private final int numThreads;
private String runClass;
ThreadPool(Object owner, String runClass, int numThreads) {
this.numThreads = numThreads;
workers = new Process[numThreads];
int i = 0;
for (Process w: workers) {
i++;
w = new Worker(Host.currentHost(), "PoolProcess-" + i, owner, runClass);
try {
w.start();
Logger.debug("[ThreadPool] Worker created: " + i + ", " + Host.currentHost()
+ ", " + owner.getClass().getSimpleName() + ", " + runClass);
} catch (Exception e) {
Logger.exc("[ThreadPool] HostNoFound");
// e.printStackTrace();
}
}
}
private class Worker extends Process {
Host host;
String name;
SnoozeMsg m;
String mbox;
Object owner;
String runClassName;
Worker(Host host, String name, Object owner, String runClass) {
super(host, name);
this.host = host;
this.name = name;
this.owner = owner;
this.runClassName = runClass;
}
@Override
public void main(String[] strings) throws MsgException {
while (!SimulatorManager.isEndOfInjection()) {
try {
Class runClass = Class.forName(runClassName);
Constructor<?> constructor = runClass.getDeclaredConstructors()[0];
constructor.setAccessible(true);
Runnable r = (Runnable) constructor.newInstance(owner);
Logger.debug("[ThreadPool.Worker.main] : " + r);
r.run();
} catch (Exception e) {
e.printStackTrace();
}
// TODO it would be better to have a GL notification to inform available GMS instead of an active loop
}
}
}
}<file_sep>/visu/generate_energy.py
#!/usr/bin/python
from __future__ import division, print_function
from pkg_resources import WorkingSet , DistributionNotFound
import sys
import re
import pprint
pp = pprint.PrettyPrinter(indent=4).pprint
working_set = WorkingSet()
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
################################################################################
# Constant and parameters
################################################################################
max_duration = 86400
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print("Running '%s'" % " ".join(args))
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print(err)
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-rf", "energy"])
execute_cmd(["mkdir", "energy"])
execute_cmd(["rm", "-rf", "energy/scripts"])
execute_cmd(["mkdir", "energy/scripts"])
execute_cmd(["mkdir", "-p", "data"])
################################################################################
# Fill data maps with computed metrics
################################################################################
algos = []
for dir in os.listdir('./events'):
algos.append(dir)
print('Algos: ', end='')
pp(algos)
nodes_tuples = []
vms_tuples = []
map_energy = {}
map_energy_per_second = {}
map_energy_per_service_node = {}
for dirname, dirnames, filenames in os.walk('./events'):
# print(path to all subdirectories first.)
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
print("Reading " + os.path.join(dirname, filename))
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = os.path.basename(dirname)
compute_node_count = data["server_count"]
nodes_vms_tuple = "%s-%s" % (algo, compute_node_count)
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data['vm_count'] in vms_tuples:
vms_tuples += [data['vm_count']]
energy = {}
l = 1
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > max_duration:
continue
event_time = int(float(data["time"]))
if data["event"] == "trace_event" and data["value"] == "ENERGY":
if not energy.has_key(data["origin"]):
energy[data["origin"]] = 0
energy[data["origin"]] += data["data"]["value"]
if not map_energy_per_second.has_key(event_time):
map_energy_per_second[event_time] = {}
for a in algos:
map_energy_per_second[event_time][a] = 0
map_energy_per_second[event_time][algo] += data["data"]["value"]
l += 1
except Exception as ex:
print(ex)
print(str(l) + ' ' + line)
map_energy[nodes_vms_tuple] = energy
#map_avg_energy_per_service_node[nodes_vms_tuple] = reduce(lambda x, y: x + y, map_energy[nodes_vms_tuple]) / len(map_energy)
################################################################################
# Generate CSV files from data maps
################################################################################
#pp(map_energy_per_second)
# Make the column names comply with R, also make pretty names
regex = re.compile(r'(\w+)\-(\w+)')
r_algos = map(lambda n: regex.sub(r'\1\2', n), algos)
names = map(lambda n: regex.sub(r'\1 (\2)', n), algos)
render_template("template/energy_data.jinja2", {"algos": r_algos, "data": map_energy_per_second}, "data/energy.csv")
group_by_nodes = []
not_group_by_nodes = []
render_template("template/energy_script.jinja2",
{
"source": "data/energy.csv",
"x_label": "Time",
"y_label": "Joules",
"algos": r_algos,
"names": names,
"x_axis": zip(nodes_tuples, vms_tuples),
"group_by_nodes": group_by_nodes,
"not_group_by_nodes": not_group_by_nodes,
"title": "cumulated computation time"
},
"scripts/energy.r")
<file_sep>/visu/template/cloud_script.jinja2
#!/usr/bin/Rscript
algo1_matrix_data_smp_det_time <- read.table("clouds/data/{{algo1}}-{{node_count}}/violations_smp_det.csv", header=T,sep=",")
colnames(algo1_matrix_data_smp_det_time) <- c("algo1_smp_det_time", "algo1_smp_det_duration")
attach(algo1_matrix_data_smp_det_time)
algo1_matrix_data_smp_hid_time <- read.table("clouds/data/{{algo1}}-{{node_count}}/violations_smp_hid.csv", header=T,sep=",")
colnames(algo1_matrix_data_smp_hid_time) <- c("algo1_smp_hid_time", "algo1_smp_hid_duration")
attach(algo1_matrix_data_smp_hid_time)
algo1_matrix_data_out_det_time <- read.table("clouds/data/{{algo1}}-{{node_count}}/violations_out_det.csv", header=T,sep=",")
colnames(algo1_matrix_data_out_det_time) <- c("algo1_out_det_time", "algo1_out_det_duration")
attach(algo1_matrix_data_out_det_time)
algo1_matrix_data_out_hid_time <- read.table("clouds/data/{{algo1}}-{{node_count}}/violations_out_hid.csv", header=T,sep=",")
colnames(algo1_matrix_data_out_hid_time) <- c("algo1_out_hid_time", "algo1_out_hid_duration")
attach(algo1_matrix_data_out_hid_time)
algo2_matrix_data_smp_det_time <- read.table("clouds/data/{{algo2}}-{{node_count}}/violations_smp_det.csv", header=T,sep=",")
colnames(algo2_matrix_data_smp_det_time) <- c("algo2_smp_det_time", "algo2_smp_det_duration")
attach(algo2_matrix_data_smp_det_time)
algo2_matrix_data_smp_hid_time <- read.table("clouds/data/{{algo2}}-{{node_count}}/violations_smp_hid.csv", header=T,sep=",")
colnames(algo2_matrix_data_smp_hid_time) <- c("algo2_smp_hid_time", "algo2_smp_hid_duration")
attach(algo2_matrix_data_smp_hid_time)
algo2_matrix_data_out_det_time <- read.table("clouds/data/{{algo2}}-{{node_count}}/violations_out_det.csv", header=T,sep=",")
colnames(algo2_matrix_data_out_det_time) <- c("algo2_out_det_time", "algo2_out_det_duration")
attach(algo2_matrix_data_out_det_time)
algo2_matrix_data_out_hid_time <- read.table("clouds/data/{{algo2}}-{{node_count}}/violations_out_hid.csv", header=T,sep=",")
colnames(algo2_matrix_data_out_hid_time) <- c("algo2_out_hid_time", "algo2_out_hid_duration")
attach(algo2_matrix_data_out_hid_time)
dev.new(width=8, height=6)
g_range <- range(algo1_smp_det_duration, algo1_smp_hid_duration, algo1_out_det_duration, algo1_out_hid_duration, algo2_smp_det_duration, algo2_smp_hid_duration, algo2_out_det_duration, algo2_out_hid_duration)
plot( algo1_smp_det_time, algo1_smp_det_duration, pch=7 , col=rgb(255, 153, 51 , 200,maxColorValue=255), ylim=g_range*1.35,
,xlim=c(0, {{duration}}), xlab="Time (s)", ylab="Duration of the violation (s)")
#title(main="Duration of each violation")
points( algo1_smp_hid_time, algo1_smp_hid_duration, pch=0 , col=rgb(255, 153, 51 , 200,maxColorValue=255))
points( algo1_out_det_time, algo1_out_det_duration, pch=13, col=rgb(255, 178, 102, 200,maxColorValue=255))
points( algo1_out_hid_time, algo1_out_hid_duration, pch=1 , col=rgb(255, 178, 102, 200,maxColorValue=255))
points( algo2_smp_det_time, algo2_smp_det_duration, pch=7 , col=rgb(0 , 76 , 153, 200,maxColorValue=255))
points( algo2_smp_hid_time, algo2_smp_hid_duration, pch=0 , col=rgb(0 , 76 , 153, 200,maxColorValue=255))
points( algo2_out_det_time, algo2_out_det_duration, pch=13, col=rgb(0 , 128, 255, 200,maxColorValue=255))
points( algo2_out_hid_time, algo2_out_hid_duration, pch=1 , col=rgb(0 , 128, 255, 200,maxColorValue=255))
legend("topleft", c(
"{{algo1 | capitalize}} violation detected",
"{{algo1 | capitalize}} violation hidden",
"{{algo1 | capitalize}} violation out detected",
"{{algo1 | capitalize}} violation out hidden",
"{{algo2 | capitalize}} violation detected",
"{{algo2 | capitalize}} violation hidden",
"{{algo2 | capitalize}} violation out detected",
"{{algo2 | capitalize}} violation out hidden"
), ncol=2,
col=c(
rgb(255, 153, 51 , 255,maxColorValue=255),
rgb(255, 153, 51 , 255,maxColorValue=255),
rgb(255, 178, 102, 255,maxColorValue=255),
rgb(255, 178, 102, 255,maxColorValue=255),
rgb(0 , 76 , 153, 255,maxColorValue=255),
rgb(0 , 76 , 153, 255,maxColorValue=255),
rgb(0 , 128, 255, 255,maxColorValue=255),
rgb(0 , 128, 255, 255,maxColorValue=255)
), pch=c(7, 0, 13, 1, 7, 0, 13, 1))
<file_sep>/src/main/java/test/TestB.java
package test;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import java.util.Random;
public class TestB extends Process {
private static final int ITERATIONS = 50;
private Random rand;
public TestB(Host host, String name, String[] args) throws HostNotFoundException {
super(host, name, args);
rand = new Random();
}
@Override
public void main(String[] strings) throws MsgException {
for(int i = 0; i < ITERATIONS; i++) {
Task t = new Task("task-" + i, getHost().getSpeed() * rand.nextInt(500), 0);
System.out.println("A: about to execute");
t.execute();
System.out.println(String.format("B: about to go to sleep (%d)", i));
waitFor(500);
System.out.println("B: woke up");
}
System.out.println("B: last sleep");
waitFor(800);
System.out.println("B: I'm dying...");
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/GLElecStopGMMsg.java
package scheduling.hierarchical.snooze.msg;
/**
* Created by sudholt on 20/07/2014.
*/
public class GLElecStopGMMsg extends SnoozeMsg {
public GLElecStopGMMsg(String name, String sendBox, String origin, String replyBox) {
super(name, sendBox, origin, replyBox);
}
}
<file_sep>/src/main/java/configuration/XVM.java
/**
* Copyright 2012-2013-2014. The SimGrid Team. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package.
*
* This class is an extension of the usual VM of the Simgrid MSG abstraction
* Note that the extension is done by aggregation instead of inheritance. This enables to create/destroy the sg VM while
* manipulating the same XVM at the java level.
*
* @author: <EMAIL>
*/
package configuration;
import org.simgrid.msg.HostFailureException;
import org.simgrid.msg.HostNotFoundException;
import org.simgrid.msg.Msg;
import org.simgrid.msg.VM;
public class XVM {
/**
* The MSG VM to extend (extension by aggregation)
*/
private VM vm;
/**
* The dirty page intensity of the VM (currently determined by the class of the VM, see the configureHostsAndVMs method).
* Expressed as a percentage of the netBW (i.e. an integer between 0 and 100)
* @see simulation.SimulatorManager
*/
private int dpIntensity;
/**
* The bandwidth network capability of the VM (expressed as MBytes).
*/
private int netBW;
/**
* The ramsize of the VM
*/
private int ramsize;
/**
* The current load of the VM (i.e. how the CPU is loaded).
* Please note that for the moment, one VM can have only have one vcpu.
* Hence, this value represents the current load of the vcpu (i.e. between 0 and 100%)
*/
private double currentLoadDemand;
/**
* The number of times the load has been changed during the simulation.
* This metric is relevant to check whether one particular VM is more affected than the others
*/
private int NbOfLoadChanges;
/**
* The number of times the VM has been migrated during the simulation.
* This metric is relevant to check whether one particular VM is more affected than the others
*/
private int NbOfMigrations;
/**
* The daemon that runs inside the VM in order to simulate the load.
*/
private Daemon daemon;
/**
* the XHost (i.e the physical machine) of the VM.
* Similarly to the VM abstraction, the MSG Host abstraction has been extended in order to save/manipulate
* additional states in an easier way.
*/
private XHost host;
/**
* Temporary fix due to a simgrid issue
* See https://gforge.inria.fr/tracker/index.php?func=detail&aid=17636&group_id=12&atid=165
*/
private boolean isMigrating; //Temporary fix to prevent migrating the same VM twice
private boolean isSuspended;
/**
* Construcor
* @param host the XHost (i.e. the PM where the VM is currently running)
* @param name the name of the vm (as it is listed by virsh list for instance)
* @param nbCores the nbCores of the VM, please note that right now the injector is able to correctly manage only
* one core VM.
* @param ramsize the size of the RAM (rigid parameter, once it has been assigned this value should not evolve.
* @param netBW the bandwidth of the NIC (expressed in MBytes per second, for instance for a 1Gb/s ethernet NIC, you
* should mention 125 MBytes.
* @param diskPath the path to the disk image (not used for the moment)
* @param diskSize the size of the disk image (not used for the moment)
* @param migNetBW the network bandwidth available for performing the migration (i.e. rigid value, this is the
* maximum value that the migration can expect). In the first version of KVM, the bandwidth for the
* migration was limited to 32MBytes. Although now, kvm uses the whole bandwidth that can be offered
* by the Host NIC, users can define a dedicated value for one VM y using the (virsh migrate_set_speed
* command.
* @param dpIntensity the dirty page intensity, i.e. the refresh rate of the memory as described in the cloudcom
* 2013 paper (Adding a Live Migration Model into SimGrid: One More Step Toward the Simulation of
* Infrastructure-as-a-Service Concerns)
* The parameter is expressed has a percentage of the network bandwidth.
*
*/
public XVM(XHost host, String name,
int nbCores, int ramsize, int netBW, String diskPath, int diskSize, int migNetBW, int dpIntensity){
// TODO, why should we reduce the migNetBW ? (i.e. interest of multiplying the value by 0.9)
this.vm = new VM (host.getSGHost(), name, ramsize, netBW, dpIntensity);
//this.vm = new VM (host.getSGHost(), name, nbCores, ramsize, netBW, diskPath, diskSize, (int)(migNetBW*0.9), dpIntensity);
this.currentLoadDemand = 0;
this.netBW = netBW ;
this. dpIntensity = dpIntensity ;
this.ramsize= ramsize;
this.daemon = new Daemon(this.vm, 100);
this.host = host;
this.NbOfLoadChanges = 0;
this.NbOfMigrations = 0;
this.isMigrating = false;
isSuspended = false;
}
/* Delegation method from MSG VM */
/**
* @return the name of the VM
*/
public String getName() {
return this.vm.getName();
}
/**
* @return the number of core of the VM
*/
public double getCoreNumber() {
return this.vm.getCoreNumber();
}
/**
* Change the load of the VM, please remind that the load of the VM is set to 0 at its beginning.
* TODO, check whether it makes sense to set the load to a minimal load.
* @param expectedLoad expressed as a percentage (i.e. between 0 and 100)
*/
public void setLoad(double expectedLoad){
if (expectedLoad >0) {
this.vm.setBound(this.vm.getSpeed()*expectedLoad/100);
daemon.resume();
}
else if (NbOfLoadChanges > 0){
daemon.suspend();
}
currentLoadDemand = expectedLoad ;
NbOfLoadChanges++;
}
// TODO c'est crade
public double getLoad(){
return this.currentLoadDemand;
}
/**
* @return the daemon process (i.e MSG Process) in charge of simulating the load of the VM
*/
public Daemon getDaemon(){
return this.daemon;
}
/**
* @return the number of times the load has been changed since the begining of the simulation
*/
public int getNbOfLoadChanges() {
return NbOfLoadChanges;
}
/**
* @return the number of times the VM has been migrated since the begining of the simulation
*/
public int getNbOfMigrations() {
return NbOfMigrations;
}
/**
* Override start method in order to start the daemon at the same time that should run inside the VM.
*/
public void start(){
this.vm.start();
try {
daemon.start();
} catch (Exception e) {
e.printStackTrace();
}
this.setLoad(currentLoadDemand);
}
public void shutdown() {
this.vm.shutdown();
}
public boolean isRunning() {
return this.vm.isRunning() == 1;
}
/**
* Migrate a VM from one XHost to another one.
* @param host the host where to migrate the VM
*/
public void migrate(XHost host) throws HostFailureException, DoubleMigrationException {
if (!this.isMigrating) {
this.isMigrating = true;
//Msg.info("Start migration of VM " + this.getName() + " to " + host.getName());
//Msg.info(" currentLoadDemand:" + this.currentLoadDemand + "/ramSize:" + this.ramsize + "/dpIntensity:" + this.dpIntensity + "/remaining:" + this.daemon.getRemaining());
try {
this.vm.migrate(host.getSGHost());
this.NbOfMigrations++;
this.host = host;
this.setLoad(this.currentLoadDemand); //TODO temporary fixed (setBound is not correctly propagated to the new node at the surf level)
//The dummy cpu action is not bounded.
Msg.info("End of migration of VM " + this.getName() + " to node " + host.getName());
} catch (Exception e){
e.printStackTrace();
Msg.info("Something strange occurs during the migration");
Msg.info("TODO Adrien, migrate should return 0 or -1, -2, ... according to whether the migration succeeded or not.");
throw new HostFailureException();
// TODO Adrien, migrate should return 0 or -1, -2, ... according to whether the migration succeeded or not.
// This value can be then use at highler level to check whether the reconfiguration plan has been aborted or not.
}
} else {
Msg.info("You are trying to migrate " + vm.getName() + " twice... it is impossible ! Byebye");
//throw new DoubleMigrationException();
System.exit(12);
}
this.isMigrating = false;
}
/**
* TODO Adrien - Error management & documentation
* @return 0 if success, 1 if should be postponed, -1 if failure, -2 if already suspended
*/
public int suspend() {
// Todo check if 0 means false & if CPU load should be 0 when vm is suspended
if (this.isMigrating()) {
Msg.info("VM " + vm.getName() + " is migrating");
return 1;
}
else {
Msg.info("VM " + vm.getName() + " is not migrating");
if (this.vm.isSuspended() == 0) {
try {
// Msg.info("Start suspension of VM " + this.getName() + " on " + this.host.getName());
//Msg.info(" currentLoadDemand:" + this.currentLoadDemand + "/ramSize:" + this.ramsize + "/dpIntensity:" + this.dpIntensity + "/remaining:" + this.daemon.getRemaining());
this.vm.suspend();
// VM is suspended - we suspend the daemon simulating CPU demand
//Msg.info("End of suspension of VM " + this.getName() + " on " + this.host.getName());
isSuspended = true;
return 0;
} catch (Exception e) {
e.printStackTrace();
Msg.info("An error occurred during the suspension");
// todo throw exc ?
return -1;
}
} else {
Msg.info("You are trying to suspend "+ this.getName() +" that is an already suspended VM.");
System.exit(-1);
return -2;
}
}
}
/**
* TODO Adrien - improve implementation & documentation
* @return 0 if success, -1 if failure, 1 if already running
*/
public int resume() {
if (isSuspended) {
try {
//Msg.info("Start resuming VM " + this.getName() + " on " + this.host.getName());
this.vm.resume();
// VM is resumed - we resume the daemon simulating CPU demand
this.daemon.resume();
//Msg.info(" currentLoadDemand:" + this.currentLoadDemand + "/ramSize:" + this.ramsize + "/dpIntensity:" + this.dpIntensity + "/remaining:" + this.daemon.getRemaining());
//Msg.info("End of resuming of VM " + this.getName() + " on " + this.host.getName());
isSuspended = false;
return 0;
} catch (Exception e) {
e.printStackTrace();
Msg.info("An error occurred during resuming");
// todo throw exc ?
return -1;
}
} else {
Msg.info("You are trying to resume an already running VM " + this.getName());
return 1;
}
}
/**
* @return the size of the RAM in MBytes
*/
public int getMemSize(){
return this.ramsize;
}
/**
* @return the size of the RAM in MBytes
*/
public boolean isMigrating(){
return isMigrating;
}
/**
* @return the current load of the VM
*/
public double getCPUDemand() {
return this.currentLoadDemand;
}
/**
* @return the current location of the VM (i.e. its XHost)
*/
public XHost getLocation() {
return this.host;
}
/**
* @return the load of the network
*/
public long getNetBW() {
return this.netBW;
}
public String toString() {
return String.format("XVM [name=%s, currentLoad=%.2f, dpIntensity=%d, isMigrating=%b, isRunning=%b]",
getName(),
currentLoadDemand,
dpIntensity,
isMigrating,
isRunning());
}
}
<file_sep>/visu/generate_clouds.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet , DistributionNotFound
working_set = WorkingSet()
import itertools
# Printing all installed modules
#print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
import traceback
################################################################################
# Constant and parameters
################################################################################
max_duration = 86400
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "clouds"])
execute_cmd(["mkdir", "clouds"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if not algo in algos:
algos += [algo]
print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0])+"-"+str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
print nodes_tuples
print vms_tuples
print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
def export_csv_data(algo, node_count, violations_smp_detected, violations_smp_hidden, violations_out_detected, violations_out_hidden):
folder_name = "clouds/data/%s-%d" % (algo, node_count)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_smp_detected, "labels": ["smp_det_time", "smp_det_duration", "node", "type"]}, "%s/violations_smp_det.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_smp_hidden, "labels": ["smp_hid_time", "smp_hid_duration", "node", "type"]}, "%s/violations_smp_hid.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_out_detected, "labels": ["out_det_time", "out_det_duration", "node", "type"]}, "%s/violations_out_det.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_out_hidden, "labels": ["out_hid_time", "out_hid_duration", "node", "type"]}, "%s/violations_out_hid.csv" % (folder_name))
map_algos_size = {}
# variable that is used to detect "violation-out", "violation-normal" and "violation-sched":
# it will store the last line about "violations-out" or "violation-det", to detect if the next
# "violation" has been already processed!
last_line = None
max_plot_time = 0
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (data["algorithm"], compute_node_count)
if not map_algos_size.has_key(compute_node_count):
map_algos_size[compute_node_count] = []
map_algos_size[compute_node_count] += [algo]
_violations_det_per_node = {}
_violations_out_per_node = {}
_violations_smp_per_node = {}
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > max_duration - 50:
continue
if float(data["time"]) > max_plot_time:
max_plot_time = float(data["time"])
if data["event"] == "trace_event" and data["value"] == "violation-det":
current_violation_det = (float(data["time"]), float(data["duration"]), data["origin"], "det")
if not _violations_det_per_node.has_key(data["origin"]):
_violations_det_per_node[data["origin"]] = []
_violations_det_per_node[data["origin"]] += [current_violation_det]
if data["event"] == "trace_event" and data["value"] == "violation-out":
current_violation_out = (float(data["time"]), float(data["duration"]), data["origin"], "out")
if not _violations_out_per_node.has_key(data["origin"]):
_violations_out_per_node[data["origin"]] = []
_violations_out_per_node[data["origin"]] += [current_violation_out]
if data["event"] == "trace_event" and data["value"] == "violation":
current_violation_smp = (float(data["time"]), float(data["duration"]), data["origin"], "smp")
if not _violations_smp_per_node.has_key(data["origin"]):
_violations_smp_per_node[data["origin"]] = []
_violations_smp_per_node[data["origin"]] += [current_violation_smp]
except Exception as e:
# print traceback.format_exc()
pass
f.seek(0)
nodes = set(_violations_smp_per_node.keys() + _violations_out_per_node.keys())
violations_smp_detected = []
violations_smp_hidden = []
violations_out_detected = []
violations_out_hidden = []
for node in nodes:
try:
current_violation_det = _violations_det_per_node[node] if _violations_det_per_node.has_key(node) else []
current_violation_out = _violations_out_per_node[node] if _violations_out_per_node.has_key(node) else []
current_violation_smp = _violations_smp_per_node[node] if _violations_smp_per_node.has_key(node) else []
product = itertools.product(current_violation_smp, current_violation_det)
product_filtered = [element for element in product if abs(element[0][0] + element[0][1] - element[1][0] - element[1][1]) < 0.01]
violations_smp_per_node_detected = set([element[0] for element in product_filtered])
violations_smp_per_node_hidden = set([element for element in current_violation_smp if element not in violations_smp_per_node_detected])
if len(violations_smp_per_node_detected) + len(violations_smp_per_node_hidden) != len(current_violation_smp):
print("%s + %s = %s" % (violations_smp_per_node_detected, violations_smp_per_node_hidden, current_violation_smp))
product = itertools.product(current_violation_out, current_violation_det)
product_filtered = [element for element in product if abs(element[0][0] + element[0][1] - element[1][0] - element[1][1]) < 0.01]
violations_out_per_node_detected = set([element[0] for element in product_filtered])
violations_out_per_node_hidden = set([element for element in current_violation_out if element not in violations_out_per_node_detected])
if len(violations_out_per_node_detected) + len(violations_out_per_node_hidden) != len(current_violation_out):
print("%s + %s = %s" % (violations_out_per_node_detected, violations_out_per_node_hidden, current_violation_out))
violations_smp_detected += violations_smp_per_node_detected
violations_smp_hidden += violations_smp_per_node_hidden
violations_out_detected += violations_out_per_node_detected
violations_out_hidden += violations_out_per_node_hidden
except:
pass
violation_total_time = 0
for violation in violations_smp_detected:
violation_total_time += violation[1]
for violation in violations_smp_hidden:
violation_total_time += violation[1]
for violation in violations_out_detected:
violation_total_time += violation[1]
for violation in violations_out_hidden:
violation_total_time += violation[1]
print("%s@%d => %d" % (algo, compute_node_count, violation_total_time))
export_csv_data(algo, compute_node_count, violations_smp_detected, violations_smp_hidden, violations_out_detected, violations_out_hidden)
################################################################################
# Find simulation matching and prepare R scripts
################################################################################
def export_clouds_data(algo1, algo2, node_count):
print("%s and %s with %s" % (algo1, algo2, node_count))
folder_name = "clouds/scripts/%d-%s-%s" % (node_count, algo1, algo2)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_script.jinja2", {"algo1": algo1, "algo2": algo2, "node_count": node_count, "duration": max_plot_time}, "%s/compare.r" % (folder_name))
pass
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
if len(algos) < 1:
continue
elif len(algos) == 1:
algos += [algos[0]]
for element in itertools.combinations(algos, min(2, len(algos))):
export_clouds_data(element[0], element[1], node_count)
################################################################################
# Clean results folder
################################################################################
execute_cmd(["rm", "-r", "clouds/results"])
execute_cmd(["mkdir", "-p", "clouds/results"])
################################################################################
# Generate clouds figures
################################################################################
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
for element in itertools.combinations(algos, min(2, len(algos))):
script_folder_name = "clouds/scripts/%d-%s-%s" % (node_count, element[0], element[1])
out_file_path = "clouds/results/%d-%s-%s.pdf" % (node_count, element[0], element[1])
execute_cmd(["/usr/bin/env", "Rscript", "%s/compare.r" % (script_folder_name)])
execute_cmd(["mv", "Rplots.pdf", out_file_path])
################################################################################
# Prepare R scripts for one simulation
################################################################################
def export_clouds_single_data(algo, node_count):
print("%s with %s" % (algo, node_count))
folder_name = "clouds/scripts/%d-%s" % (node_count, algo)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_single_script.jinja2", {"algo": algo, "node_count": node_count, "duration": max_plot_time}, "%s/compare.r" % (folder_name))
pass
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
for algo in algos:
export_clouds_single_data(algo, node_count)
################################################################################
# Generate clouds figures
################################################################################
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
for algo in algos:
script_folder_name = "clouds/scripts/%d-%s" % (node_count, algo)
out_file_path = "clouds/results/%d-%s.pdf" % (node_count, algo)
execute_cmd(["/usr/bin/env", "Rscript", "%s/compare.r" % (script_folder_name)])
execute_cmd(["mv", "Rplots.pdf", out_file_path])
<file_sep>/src/main/java/migration/Migrator.java
package migration;
import configuration.XHost;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import configuration.XVM;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Random;
public class Migrator extends Process {
public static boolean isEnd = false;
public Migrator(Host host, String name, String[] args) {
super(host, name, args);
}
@Override
public void main(String[] args) throws MsgException {
int load = 90;
int dpIntensity = 90;
// Prepare the hosts
Host host1 = null;
Host host2 = null;
Host host3 = null;
Host host4 = null;
Host host5 = null;
Host host6 = null;
Host host7 = null;
Host host8 = null;
try {
host1 = Host.getByName("node0");
host2 = Host.getByName("node1");
host3 = Host.getByName("node2");
host4 = Host.getByName("node3");
host5 = Host.getByName("node4");
host6 = Host.getByName("node5");
host7 = Host.getByName("node6");
host8 = Host.getByName("node7");
} catch (HostNotFoundException e) {
Msg.critical(e.getMessage());
e.printStackTrace();
System.exit(1);
}
// Start a VM
XVM[] xvms = new XVM[78];
for(int i = 0 ; i < xvms.length; i++) {
Host host = Host.getByName("node" + (i%4));
XHost xhost = new XHost(host, 32*1024, 8, 800, 1250, null);
xvms[i] = new XVM(
xhost, // destination
"vm-" + i, // name
1, // # of VCPU
1 * 1024, // RAM
125, // BW
null, // disk path
-1, // disk size
125, // migration BW
dpIntensity); // dirty page rate
xvms[i].start();
xvms[i].setLoad(load);
Msg.info(xvms[i].getName() + " started on host " + host.getName());
}
// Migrate the VM
double host1Before = host1.getConsumedEnergy();
double host2Before = host2.getConsumedEnergy();
double start = Msg.getClock();
int i = 0;
for(XVM vm: xvms) {
if(i < xvms.length)
asyncMigrate(vm, host5);
else if (i < xvms.length * 2 / 4)
asyncMigrate(vm, host6);
else if (i < xvms.length * 3/ 4)
asyncMigrate(vm, host7);
else
asyncMigrate(vm, host8);
i++;
}
double duration = Msg.getClock() - start;
double host1After = host1.getConsumedEnergy();
double host2After = host2.getConsumedEnergy();
waitFor(10);
isEnd = true;
host1.off();
host2.off();
host5.off();
waitFor(1000);
double watt1 = (host1After - host1Before) / duration;
double watt2 = (host2After - host2Before) / duration;
Msg.info(String.format("End of migration\nConsumed energy:\nHost 1: %.2f\nHost 2: %.2f", watt1, watt2));
try {
File out = new File("migration_energy.dat");
boolean empty = !out.exists();
FileWriter writer = new FileWriter(out, true);
if(empty)
writer.write("# Load\tdpIntensity\ttime\thost 1\thost 2\n");
Msg.info(String.format("%f %f %f", duration, host1Before, host1After));
writer.write(String.format("%d\t%d\t\t%.2f\t%.2f\t%.2f\n", load, dpIntensity, duration, watt1, watt2));
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
exit();
}
public static boolean isEnd() {
return isEnd;
}
int nbThreads = 0;
private void asyncMigrate(XVM vm, Host destHost) {
try {
String[] args = new String[2];
args[0] = vm.getName();
args[1] = destHost.getName();
Random rand = new Random();
Msg.info("Nbre Thread Inc: "+ (++nbThreads));
new Process(Host.currentHost(),"Migrate-"+rand.nextDouble(),args) {
public void main(String[] args) throws HostFailureException {
Host destHost = null;
VM vm = null;
try {
vm = VM.getVMByName(args[0]);
destHost = Host.getByName(args[1]);
} catch (Exception e) {
e.printStackTrace();
System.err.println("You are trying to migrate from/to a non existing node");
}
if(destHost != null){
vm.migrate(destHost);
//waitFor(10.0);
}
Msg.info("End of migration of VM " + args[0] + " to " + args[1]);
Msg.info("Nbre Thread Dec: "+ (--nbThreads));
}
}.start();
} catch (Exception e) {
e.printStackTrace();
}
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/AUX.java
package scheduling.hierarchical.snooze;
import configuration.SimulatorProperties;
import org.simgrid.msg.Host;
import org.simgrid.msg.Msg;
/**
* Created by sudholt on 06/07/2014.
*/
public class AUX {
static final String epInbox = "epInbox"; // EP mbox
static final String multicast = "multicast"; // GL/GM multicast mbox
static Host multicastHost = null;
static final String glElection = "glElection"; // HeartbeatGroup mbox
static final long DefaultComputeInterval = 1;
static final long HeartbeatInterval = SnoozeProperties.getHeartBeatPeriodicity();
static final long HeartbeatTimeout = SnoozeProperties.getHeartBeatTimeout();
static final double DeadTimeout = 600;
// static final long DeadTimeout = SnoozeProperties.getHeartBeatPeriodicity()/3;
static final double MessageReceptionTimeout = 0.2;
static final int glLCPoolSize = Math.max(SimulatorProperties.getNbOfHostingNodes()/10, 1);
static final int glGMPoolSize = Math.max((SimulatorProperties.getNbOfServiceNodes()-1)/10, 1);
static final int gmLCPoolSize =
Math.max(SimulatorProperties.getNbOfHostingNodes()/(SimulatorProperties.getNbOfServiceNodes()-1)/10, 1);
// static final long PoolingTimeout = SimulatorProperties.getDuration(); // Timeout for worker tasks
// constants for variants of Snooze alg.
static final boolean GLElectionForEachNewGM = false;
static final boolean GLElectionStopGM = true;
public static final GroupLeader.AssignmentAlg assignmentAlg = GroupLeader.AssignmentAlg.BESTFIT;
// public static final GroupLeader.AssignmentAlg assignmentAlg = GroupLeader.AssignmentAlg.ROUNDROBIN;
static String glInbox(String glHost) { return glHost + "-glInbox"; }
static String gmInbox(String gmHost) { return gmHost + "-gmInbox"; }
static String lcInbox(String lcHost) { return lcHost + "-lcInbox"; }
static double timeDiff(double oldTime) {
return Msg.getClock()-oldTime;
}
static double durationToEnd() { return SimulatorProperties.getDuration() - Msg.getClock() + 0.01; }
}
<file_sep>/src/main/java/scheduling/distributed/dvms2/DVMSProcess.java
package scheduling.distributed.dvms2;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import scheduling.distributed.dvms2.dvms.dvms2.DvmsActor;
import scheduling.distributed.dvms2.dvms.dvms3.LocalityBasedScheduler;
import simulation.SimulatorManager;
import java.net.UnknownHostException;
//Represents a server running on a worker node
//Currently, this server can only process on request at a time -> less concurrent access to the node object
public class DVMSProcess extends Process {
private SGActor dvms;
Long id;
String name;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Constructor
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public DVMSProcess(Host host, String name, String hostname, int port, SGNodeRef entropyActorRef, SGNodeRef snoozerActorRef) throws UnknownHostException {
super(host, String.format("%s", hostname, port));
this.name = String.format("%s", hostname, port);
this.id = nameToId(hostname);
if(DvmsProperties.isLocalityBasedScheduler()) {
this.dvms = new LocalityBasedScheduler(new SGNodeRef(String.format("%s", hostname, port), id), this, entropyActorRef, snoozerActorRef);
} else {
this.dvms = new DvmsActor(new SGNodeRef(String.format("%s", hostname, port), id), this, entropyActorRef, snoozerActorRef);
}
}
public SGNodeRef self() {
return this.dvms.self();
}
public static Long nameToId(String name) {
Long result = -1L;
try {
result = Long.parseLong(name.substring(4, name.length()));
} catch(Exception e) {
e.printStackTrace();
}
return result;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Other methods
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
String mBox = "";
@Override
public void main(String[] args) throws MsgException {
mBox = this.name;
while(!SimulatorManager.isEndOfInjection()){
try{
MsgForSG req=(MsgForSG) Task.receive(mBox);
Long reqId = nameToId(req.getSender().getHost().getName());
dvms.receive(req.getMessage(), new SGNodeRef(req.getOrigin(), reqId), new SGNodeRef(req.getReplyBox(), -1L));
} catch (Exception e) {
Msg.info(String.format("Failure on %s", mBox));
e.printStackTrace();
}
}
Msg.info("End of server");
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/GMElecMsg.java
package scheduling.hierarchical.snooze.msg;
/**
* Created by sudholt on 09/07/2014.
*/
public class GMElecMsg extends SnoozeMsg {
public GMElecMsg(String name, String sendBox, String origin, String replyBox) {
super(name, sendBox, origin, replyBox);
}
}
<file_sep>/run_example.sh
#! /bin/bash
PID_FILE=/tmp/vmplaces.pid
source xprc
function do_abort() {
echo Killing java process with PID `cat $PID_FILE`
kill -9 `cat $PID_FILE`
rm -f $PID_FILE
exit 2
}
trap do_abort SIGINT
error=0
function run() {
echo '----------------------------------------'
echo "Running $algo $implem with $n_nodes compute and $n_service service nodes turning off hosts: $turn_off, load.mean=$mean, load.std=$std"
echo "Command: java $VM_OPTIONS $SIM_ARGS simulation.Main $PROGRAM_ARGUMENTS"
echo "Command: PROGRAM_ARGUMENTS $PROGRAM_ARGUMENTS"
echo '----------------------------------------'
java $VM_OPTIONS simulation.Main $PROGRAM_ARGUMENTS &
pid=$!
echo $pid > $PID_FILE
wait $pid
ret=$?
echo java returned $ret
if [ $ret -ne 0 ] && [ $ret -ne 134 ]
then
error=1
exit $ret
fi
mkdir -p visu/events/$name
cp events.json visu/events/$name/
}
#######################################
# Main #
#######################################
# Number of hosting nodes
nodes='64'
abort=0
rm -rf logs/ffd
{
run $nodes example scheduling.simple.ExampleReconfigurationPlanner false
} 2>&1 | tee run_all.log
if [ ! $error ]
then
visu/energy_plot.py run_all.log energy.dat
fi
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/LCChargeMsg.java
package scheduling.hierarchical.snooze.msg;
/**
* Created by sudholt on 04/07/2014.
*/
public class LCChargeMsg extends SnoozeMsg {
public LCChargeMsg(LCCharge lc, String sendBox, String origin, String replyBox) {
super(lc, sendBox, origin, replyBox);
}
public static class LCCharge {
private double procCharge;
private int memUsed;
private double timestamp;
public LCCharge(double proc, int mem, double ts) {
this.setProcCharge(proc); this.setMemUsed(mem); this.setTimestamp(ts);
}
public double getProcCharge() {
return procCharge;
}
public void setProcCharge(double procCharge) {
this.procCharge = procCharge;
}
public int getMemUsed() {
return memUsed;
}
public void setMemUsed(int memUsed) {
this.memUsed = memUsed;
}
public double getTimestamp() {
return timestamp;
}
public void setTimestamp(double timestamp) {
this.timestamp = timestamp;
}
}
}
<file_sep>/src/main/java/scheduling/SchedulerBuilder.java
package scheduling;
import configuration.SimulatorProperties;
import configuration.XHost;
import org.simgrid.msg.Msg;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
/**
* @author <NAME>
* Singleton used to build the scheduler given in simulator.properties.
*/
public enum SchedulerBuilder {
/**
* Builder instance.
*/
INSTANCE;
/**
* Scheduler class to instantiate.
*/
private Class<?> schedulerClass = null;
/**
* Gets the scheduler class to instantiate later.
*/
SchedulerBuilder() {
try {
schedulerClass = Class.forName(SimulatorProperties.getImplementation());
} catch (ClassNotFoundException e) {
Msg.critical("Scheduler class not found. Check the value simulator.implementation in the simulator properties file.");
System.err.println(e);
System.exit(-1);
}
}
/**
* Gets the builder instance.
* @return instance
*/
public static SchedulerBuilder getInstance() {
return INSTANCE;
}
/**
* Instantiates the scheduler.
* @param xHosts xHosts
* @return instantiated scheduler
*/
public Scheduler build(Collection<XHost> xHosts) {
Constructor<?> schedulerConstructor;
try {
schedulerConstructor = schedulerClass.getConstructor(Collection.class);
return (Scheduler) schedulerConstructor.newInstance(xHosts);
} catch (Exception e) {
handleExceptions(e);
}
return null;
}
/**
* Instantiates the scheduler.
* @param xHosts xHosts
* @param id id
* @return instantiated scheduler
*/
public Scheduler build(Collection<XHost> xHosts, Integer id) {
Constructor<?> schedulerConstructor;
try {
schedulerClass = Class.forName(SimulatorProperties.getImplementation());
schedulerConstructor = schedulerClass.getConstructor(Collection.class, Integer.class);
return (Scheduler) schedulerConstructor.newInstance(xHosts, id);
} catch (Exception e) {
handleExceptions(e);
}
return null;
}
/**
* Handles builder methods exceptions. This will stop the program.
* @param e thrown exception
*/
private void handleExceptions(Exception e) {
System.err.println(e);
if (e instanceof NoSuchMethodException) {
Msg.critical("Scheduler constructor not found. This should never happen!!");
} else if (e instanceof InstantiationException) {
Msg.critical("Scheduler instantiation issue");
} else if (e instanceof IllegalAccessException) {
Msg.critical("Scheduler constructor could not be accessed");
} else if (e instanceof InvocationTargetException) {
Msg.critical("Invocation target exception while instantiating the scheduler");
} else {
Msg.critical("Unhandled exception");
}
System.exit(-1);
}
}
<file_sep>/visu/generate_detailed_graphs.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet , DistributionNotFound
working_set = WorkingSet()
from numpy import array
import itertools
# Printing all installed modules
#print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
import traceback
################################################################################
# Constant and parameters
################################################################################
duration = 3600
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "detailed"])
execute_cmd(["mkdir", "detailed"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if not algo in algos:
algos += [algo]
# print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0])+"-"+str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
# print nodes_tuples
# print vms_tuples
# print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
def export_csv_data(algo, node_count, computes, migrations, migrations_count, violations, reconfigurations):
folder_name = "detailed/data/%s-%d" % (algo, node_count)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": computes, "labels": ["type", "value"]}, "%s/detailed_computations.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": migrations_count, "labels": ["type", "value"]}, "%s/detailed_migrations_count.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": migrations, "labels": ["type", "value"]}, "%s/detailed_migrations.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_count, "labels": ["type", "value"]}, "%s/detailed_violations_count.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations, "labels": ["type", "value"]}, "%s/detailed_violations.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": reconfigurations, "labels": ["type", "value"]}, "%s/detailed_reconfigurations.csv" % (folder_name))
map_algos_size = {}
metrics = ["migrations_count", "migrations", "computations", "violations_count", "violations", "reconfigurations"]
# variable that is used to detect "violation-out", "violation-normal" and "violation-sched":
# it will store the last line about "violations-out" or "violation-det", to detect if the next
# "violation" has been already processed!
last_line = None
algos_map = {}
for algo in algos:
algos_map[algo] = {}
metrics_map = {}
for metric in metrics:
metrics_map[metric] = {}
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (data["algorithm"], compute_node_count)
if not map_algos_size.has_key(compute_node_count):
map_algos_size[compute_node_count] = []
map_algos_size[compute_node_count] += [algo]
computes = []
migrations_count = 0
migrations = []
violations_count = 0
violations = []
reconfigurations = []
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > duration:
continue
if data["event"] == "trace_event" and data["value"] == "migrate":
migration_duration = data["duration"]
migrations += [migration_duration]
migrations_count += 1
if data["event"] == "trace_event" and data["value"] == "compute":
compute_duration = data["duration"]
compute_result = data["data"]["state"]
computes += [compute_duration]
if data["event"] == "trace_event" and data["value"] == "violation":
violation_duration = data["duration"]
violations += [violation_duration]
violations_count += 1
if data["event"] == "trace_event" and data["value"] == "reconfigure":
reconfiguration_duration = data["duration"]
reconfigurations += [reconfiguration_duration]
except Exception as e:
# print traceback.format_exc()
pass
algos_map[algo][compute_node_count] = {
"migrations_count": [migrations_count],
"migrations": migrations,
"reconfigurations": reconfigurations,
"computations": computes,
"violations_count": [violations_count],
"violations": violations
}
for metric in metrics:
if not metrics_map[metric].has_key(compute_node_count):
metrics_map[metric][compute_node_count] = {}
metrics_map["migrations_count"][compute_node_count][algo] = [migrations_count]
metrics_map["migrations"][compute_node_count][algo] = migrations
metrics_map["computations"][compute_node_count][algo] = computes
metrics_map["violations_count"][compute_node_count][algo] = [violations_count]
metrics_map["violations"][compute_node_count][algo] = violations
metrics_map["reconfigurations"][compute_node_count][algo] = reconfigurations
f.seek(0)
# export_csv_data(algo, compute_node_count, computes, migrations, violations, reconfigurations)
################################################################################
# Clean results folder
################################################################################
execute_cmd(["rm", "-r", "detailed/results"])
execute_cmd(["mkdir", "-p", "detailed/results"])
################################################################################
# Generate mean and std for each metric and algorith
################################################################################
node_numbers = nodes_tuples
legends = {
"migrations_count": "migrations count",
"migrations": "migration time (s)",
"computations": "computation time (s)",
"violations_count": "violations count",
"violations": "violation time (s)",
"reconfigurations": "reconfigurations time (s)"
}
metrics_data = {}
for metric in metrics:
metrics_data[metric] = {}
for node_number in node_numbers:
metrics_data[metric][node_number] = {}
for algo in algos:
metrics_data[metric][node_number][algo] = [0.0, 0.0]
for metric in metrics:
for algo in algos:
for node_number in node_numbers:
nums = array(metrics_map[metric][node_number][algo])
std = nums.std()
mean = nums.mean()
metrics_data[metric][node_number][algo] = ["%6s" % "{0:0.2f}".format(mean), "%6s" % "{0:0.2f}".format(std)]
algos_data = {}
for algo in algos:
algos_data[algo] = {}
for node_number in node_numbers:
algos_data[algo][node_number] = {}
for metric in metrics:
algos_data[algo][node_number][metric] = [0.0, 0.0]
for algo in algos:
for node_number in node_numbers:
for metric in metrics:
nums = array(algos_map[algo][node_number][metric])
std = nums.std()
mean = nums.mean()
algos_data[algo][node_number][metric] = ["%6s" % "{0:0.2f}".format(mean), "%6s" % "{0:0.2f}".format(std)]
# print(metrics_data)
# print(algos_data)
################################################################################
# Prepare R scripts for each simulation
################################################################################
def export_detailed_single_metric(metrics_data, algos, node_numbers, metric, legend):
print("%s with %s" % (metric, node_count))
folder_name = "detailed/latex/%s" % (metric)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/detailed_metric_script.jinja2", {"algos": algos, "node_numbers": sorted(node_numbers), "x_axis": zip(nodes_tuples, vms_tuples), "data": metrics_data, "metric": metric, "algos": algos, "legend": legend}, "%s/detailed_%s.r" % (folder_name, metric))
def export_detailed_single_algo(algos_data, metrics, node_numbers, algo, legend):
print("%s with %s" % (algo, node_count))
folder_name = "detailed/latex/%s" % (algo)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/detailed_algo_script.jinja2", {"algo": algo, "node_numbers": sorted(node_numbers), "x_axis": zip(nodes_tuples, vms_tuples), "data": algos_data, "metrics": metrics, "metrics": metrics, "legend": legend}, "%s/detailed_%s.r" % (folder_name, algo))
legend = "TOTO"
for metric in metrics:
export_detailed_single_metric(metrics_data, algos, node_numbers, metric, legend)
for algo in algos:
export_detailed_single_algo(algos_data, metrics, node_numbers, algo, legend)
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/TestFailGMMsg.java
package scheduling.hierarchical.snooze.msg;
/**
* Created by sudholt on 20/07/2014.
*/
public class TestFailGMMsg extends SnoozeMsg {
public TestFailGMMsg(String name, String sendBox, String origin, String replyBox) {
super(name, sendBox, origin, replyBox);
}
}
<file_sep>/src/main/java/migration/XVM.java
/* Copyright (c) 2014. The SimGrid Team.
* All rights reserved. */
/* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package. */
package migration;
import org.simgrid.msg.Msg;
import org.simgrid.msg.VM;
import org.simgrid.msg.Host;
import org.simgrid.msg.HostNotFoundException;
import org.simgrid.msg.HostFailureException;
public class XVM extends VM {
private int dpIntensity;
private int netBW;
private int ramsize;
private int currentLoad;
private Daemon daemon;
public XVM(Host host, String name,
int nbCores, int ramsize, int netBW, String diskPath, int diskSize, int migNetBW, int dpIntensity){
super(host, name, ramsize, netBW, dpIntensity);
this.currentLoad = 0;
this.netBW = netBW ;
this. dpIntensity = dpIntensity ;
this.ramsize= ramsize;
this.daemon = new Daemon(this, 100);
}
public void setLoad(int load){
if (load >0) {
this.setBound(this.getSpeed()*load/100);
// this.getDaemon().setLoad(load);
daemon.resume();
} else{
daemon.suspend();
}
currentLoad = load ;
}
public void start(){
super.start();
try {
daemon.start();
} catch (Exception e) {
e.printStackTrace();
}
this.setLoad(0);
}
public Daemon getDaemon(){
return this.daemon;
}
public int getLoad(){
System.out.println("Remaining comp:" + this.daemon.getRemaining());
return this.currentLoad;
}
public void migrate(Host host) throws HostFailureException {
Msg.info("Start migration of VM " + this.getName() + " to " + host.getName());
Msg.info(" currentLoad:" + this.currentLoad + "/ramSize:" + this.ramsize + "/dpIntensity:" + this.dpIntensity
+ "/remaining:" + String.format(java.util.Locale.US, "%.2E",this.daemon.getRemaining()));
try{
super.migrate(host);
} catch (Exception e){
Msg.info("Something wrong during the live migration of VM "+this.getName());
throw new HostFailureException();
}
this.setLoad(this.currentLoad); //Fixed the fact that setBound is not propagated to the new node.
Msg.info("End of migration of VM " + this.getName() + " to node " + host.getName());
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/Multicast.java
package scheduling.hierarchical.snooze;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import scheduling.hierarchical.snooze.msg.*;
import simulation.SimulatorManager;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by sudholt on 13/07/2014.
*/
public class Multicast extends Process {
private String name;
private Host host;
private String inbox;
String glHostname; //@ Make private
private double glTimestamp;
private boolean glDead = false;
private double lastPromotionOrElection;
private boolean electingOrPromoting = false;
private ThreadPool newLCPool;
private ThreadPool newGMPool;
ConcurrentHashMap<String, GMInfo> gmInfo = new ConcurrentHashMap<>(); //@ Make private
ConcurrentHashMap<String, LCInfo> lcInfo = new ConcurrentHashMap<>(); //@ Make private
public Multicast(Host host, String name) {
super(host, name);
this.host = host;
this.name = name;
this.inbox = AUX.multicast;
this.glHostname = "";
AUX.multicastHost = host;
}
@Override
public void main(String[] strings) {
int n = 1;
Test.multicast = this;
newLCPool = new ThreadPool(this, RunNewLC.class.getName(), AUX.glLCPoolSize);
newLCPool = new ThreadPool(this, RunNewGM.class.getName(), AUX.glGMPoolSize);
Logger.debug("[MUL.main] noLCWorkers: " + AUX.glLCPoolSize);
procRelayGLBeats();
procRelayGMBeats();
while (!SimulatorManager.isEndOfInjection()) {
try {
SnoozeMsg m = (SnoozeMsg) Task.receive(inbox, AUX.durationToEnd());
handle(m);
glDead();
gmDead();
if(SnoozeProperties.shouldISleep()) sleep(AUX.DefaultComputeInterval);
} catch (HostFailureException e) {
Logger.exc("[MUL.main] HostFailureException");
} catch (TimeoutException e) {
glDead();
gmDead();
} catch (Exception e) {
String cause = e.getClass().getName();
Logger.err("[MUL.main] PROBLEM? Exception: " + host.getName() + ": " + cause);
e.printStackTrace();
}
}
}
/**
* Receive and relay GL heartbeats
* @param m
*/
public void handle(SnoozeMsg m) {
// Logger.debug("New message :" + m);
String cs = m.getClass().getSimpleName();
switch (cs) {
case "GLElecMsg": handleGLElec(m); break;
// case "NewGMMsg" : handleNewGM(m); break;
case "TermGMMsg": handleTermGM(m); break;
case "SnoozeMsg":
Logger.err("[MUL(SnoozeMsg)] Unknown message" + m);
break;
}
}
void handleGLElec(SnoozeMsg m) {
// Logger.debug("[MUL(GLElecMsg)] " + m);
if (AUX.timeDiff(lastPromotionOrElection) > AUX.HeartbeatTimeout || lastPromotionOrElection == 0
|| AUX.GLElectionForEachNewGM) {
// No recent leaderElection
leaderElection();
} else {
// Leader recently elected
Logger.info("[MUL(GLElecMsg)] GL election on-going or recent: " + m);
}
}
void handleTermGM(SnoozeMsg m) {
ArrayList<String> orphanLCs = new ArrayList<String>();
String gm = (String) m.getMessage();
Logger.debug("[MUL(TermGM)] GM, gmInfo: " + gm + ", " + gmInfo.get(gm));
gmInfo.remove(gm);
// for (String lc: lcInfo.keySet()) {
// if (lcInfo.get(lc).equals(gm)) orphanLCs.add(lc);
// }
}
/**
* GL dead
*/
void glDead() {
if (!glDead) return;
Logger.err("[MUL.glDead] GL dead, trigger leader election: " + glHostname);
leaderElection();
}
/**
* GM dead
*/
void gmDead() {
ArrayList<String> deadGMs = new ArrayList<String>();
ArrayList<String> orphanLCs = new ArrayList<String>();
for (String gm: gmInfo.keySet()) {
GMInfo gi = gmInfo.get(gm);
if (gi == null || AUX.timeDiff(gmInfo.get(gm).timestamp) <= AUX.HeartbeatTimeout
|| gi.joining) {
// Logger.err("[MUL.gmDead] GM: " + gm + " TS: " + gi.timestamp);
continue;
}
deadGMs.add(gm);
// Identify LCs of dead GMs
for (String lc: lcInfo.keySet()) {
if (lcInfo.get(lc).gmHost.equals(gm)) orphanLCs.add(lc);
}
}
// Remove dead GMs and associated LCs
for (String gm: deadGMs) {
Logger.imp("[MUL.gmDead] GM removed: " + gm + ", " + gmInfo.get(gm).timestamp);
gmInfo.remove(gm);
// leaderElection();
}
for (String lc: orphanLCs) {
lcInfo.remove(lc);
Logger.imp("[MUL.gmDead] LC removed: " + lc);
}
}
boolean gmPromotion(String gm) {
SnoozeMsg m;
String elecMBox = AUX.gmInbox(gm) + "-MulticastElection";
boolean success = false;
// Send GL creation request to GM
m = new GMElecMsg(null, AUX.gmInbox(gm), null, elecMBox);
m.send();
// Logger.info("[MUL.leaderElection] GM notified: " + m);
boolean msgReceived = false;
try {
m = (SnoozeMsg) Task.receive(elecMBox, AUX.MessageReceptionTimeout);
// Logger.debug("[MUL.leaderElection] Msg.received for GM: " + gm + ", " + m);
} catch (Exception e) {
e.printStackTrace();
}
success =
m.getClass().getSimpleName().equals("GLElecStopGMMsg") && gm.equals((String) m.getMessage());
// Logger.info("[MUL.leaderElection] GM->GL: " + m);
if (success) {
String newLeader = (String) m.getMessage();
if (!glHostname.isEmpty() && !newLeader.equals(glHostname)) {
m = new TermGLMsg(name, AUX.glInbox(glHostname), host.getName(), null);
Logger.debug("[MUL.leaderElection] GL termination message: " + m);
m.send();
}
glHostname = newLeader;
glTimestamp = Msg.getClock();
glDead = false;
m = new GLElecStopGMMsg(name, AUX.gmInbox(gm), null, null);
m.send();
Logger.imp("[MUL.leaderElection] New leader elected: " + m);
} else Logger.err("[MUL.leaderElection] GM promotion failed: " + gm);
return success;
}
/**
* Election of a new GL: promote a GM if possible, create new GL instance
*/
void leaderElection() {
if (gmInfo.isEmpty()) {
// Ex-nihilo GL creation
GroupLeader gl = new GroupLeader(Host.currentHost(), "groupLeader");
try {
gl.start();
} catch (Exception e) {
e.printStackTrace();
}
Test.gl = gl;
// Deployment on the Multicast node! Where should it be deployed?
glHostname = gl.getHost().getName(); // optimization
glTimestamp = Msg.getClock();
glDead = false;
Logger.err("[MUL.leaderElection] New leader ex-nihilo on: " + glHostname);
} else {
SnoozeMsg m = null;
// Leader election: select GM, send promotion message
ArrayList<String> gms = new ArrayList<String>(gmInfo.keySet());
int i = 0;
boolean success = false;
String oldGL = "";
String gm = "";
do {
Logger.debug("[MUL.leaderElection] Round: " + i);
gm = gms.get(i % gms.size());
success = gmPromotion(gm);
i++;
} while (i<10 && !success);
if (!success) {
Logger.err("MUL(GLElec)] Leader election failed 10 times");
return;
} else lastPromotionOrElection = Msg.getClock();
Logger.imp("[MUL.leaderElection] Finished: " + glHostname + ", " + m);
}
}
/**
* Relay GL beats to EP, GMs and joining LCs
*/
void relayGLBeats(SnoozeMsg m) {
// Get timestamp
String gl = (String) m.getOrigin();
if ((glHostname == "" || glDead) && !gl.isEmpty()) {
glHostname = gl;
glDead = false;
Logger.err("[MUL.relayGLBeats] GL initialized: " + glHostname);
}
if (glHostname != gl) {
Logger.err("[MUL.relayGLBeats] Multiple GLs: " + glHostname + ", " + gl);
return;
}
glTimestamp = (double) m.getMessage();
// Relay GL beat to EP, GMs and LCs
int i = 0;
if (!glHostname.isEmpty()) {
new RBeatGLMsg(glTimestamp, AUX.epInbox, glHostname, null).send();
Logger.info("[MUL.relayGLbeat] Beat relayed to: " + AUX.epInbox);
for (String gm : gmInfo.keySet()) {
m = new RBeatGLMsg(glTimestamp, AUX.gmInbox(gm)+"-glBeats", glHostname, null);
// m.send();
try {
GroupManager g = Test.gmsCreated.get(gm);
g.glBeats(m);
} catch (NullPointerException e) {
Logger.exc("[MUL.relayGLBeats] NullPointer, GM: " + gm);
gmInfo.remove(gm);
} catch (Exception e) {
e.printStackTrace();
}
Logger.info("[MUL.relayGLbeats] Beat relayed to GM: " + m);
}
for (String lc : lcInfo.keySet()) {
LCInfo lv = lcInfo.get(lc);
if (lv.joining) {
m = new RBeatGLMsg(glTimestamp, AUX.lcInbox(lv.lcHost), glHostname, null);
m.send();
i++;
// Logger.debug("[MUL.relayGLBeats] To LC: " + m);
}
}
} else Logger.err("[MUL] No GL");
Logger.imp("[MUL.relayGLBeats] GL beat received/relayed: " + glHostname + ", " + glTimestamp
+ ", #GMs: " + gmInfo.size() + ", # join. LCs: " + i);
}
/**
* Relay GM beats to LCs
*/
void relayGMBeats(GroupManager g, double ts) {
String gm = g.host.getName();
RBeatGMMsg m = new RBeatGMMsg(g, AUX.glInbox(glHostname)+"-gmPeriodic", gm, null);
// m.send();
// Test.gl.handleGMInfo(m);
Logger.imp("[MUL.relayGMBeats] " + m);
// Send to LCs
int i = 0;
for (String lc: g.lcInfo.keySet()) {
LCInfo lci = lcInfo.get(lc);
if (lci != null) {
String gmLc = lci.gmHost;
// Logger.debug("[MUL.relayGMBeats] LC: " + lc + ", GM: " + gm);
if (gm.equals(gmLc)) {
GMInfo gmi = gmInfo.get(gm);
m = new RBeatGMMsg(g, AUX.lcInbox(lc) + "-gmBeats", gm, null);
LocalController lco = null;
try {
lco = Test.lcsJoined.get(lc).lco;
lco.handleGMBeats(m);
} catch (NullPointerException e) {
Logger.exc("[MUL.relayGMBeats] NullPointer, LC: " + lc + ", " + m);
lcInfo.remove(lc);
}
i++;
Logger.info("[MUL.relayGMBeats] To LC: " + lc + ", "+ lco + ", " + m);
}
}
}
Logger.imp("[MUL.relayGMBeats] GL beat received/relayed: " + gm + ", " + Msg.getClock()
+ ", #LCs: " + i);
}
public class RunNewGM implements Runnable {
public RunNewGM() {};
@Override
public void run() {
try {
SnoozeMsg m;
m = (SnoozeMsg) Task.receive(inbox + "-newGM", AUX.durationToEnd());
// m = (SnoozeMsg) Task.receive(inbox + "-newGM", AUX.PoolingTimeout);
Logger.info("[MUL.RunNewGM] " + m);
String gm = ((GroupManager) m.getMessage()).host.getName();
gmInfo.put(gm, new GMInfo(AUX.gmInbox(gm), Msg.getClock(), true));
Logger.imp("[MUL(RunNewGM)] GM added: " + gm + ", " + m + ", " + lastPromotionOrElection);
if (!glHostname.isEmpty() && (lastPromotionOrElection == 0.0
|| AUX.timeDiff(lastPromotionOrElection) <= AUX.HeartbeatTimeout)) {
m = new RBeatGLMsg(glTimestamp, AUX.gmInbox(gm) + "-glBeats", glHostname, null);
m.send();
Logger.imp("[MUL(RunNewGM)] No promotion: " + m);
return;
}
boolean success = false;
if (!electingOrPromoting) {
electingOrPromoting = true;
success = gmPromotion(gm);
}
if (!success) Logger.err("[MUL(RunNewGM)] GM Promotion FAILED: " + gm);
else {
lastPromotionOrElection = Msg.getClock();
Logger.imp("[MUL(RunNewGM)] GM Promotion succeeded: " + gm);
}
} catch (TimeoutException e) {
Logger.exc("[MUL.RunNewGM] PROBLEM? Timeout Exception");
} catch (HostFailureException e) {
Logger.err("[MUL.RunNewGM] HostFailure Exception should never happen!: " + host.getName());
} catch (Exception e) {
Logger.exc("[MUL.RunNewGM] Exception");
e.printStackTrace();
}
}
}
public class RunNewLC implements Runnable {
public RunNewLC() {};
@Override
public void run() {
NewLCMsg m;
try {
m = (NewLCMsg) Task.receive(inbox + "-newLC", AUX.durationToEnd());
// m = (NewLCMsg) Task.receive(inbox + "-newLC", AUX.PoolingTimeout);
Logger.info("[MUL.RunNewLC] " + m);
if (m.getMessage() == null) {
// Add LC
lcInfo.put(m.getOrigin(), new LCInfo(m.getOrigin(), "", Msg.getClock(), true));
Logger.info("[MUL.RunNewLC] LC temp. joined: " + m);
} else {
// End LC join phase
String lc = m.getOrigin();
String gm = (String) m.getMessage();
lcInfo.put(lc, new LCInfo(lc, gm, Msg.getClock(), false));
m = new NewLCMsg(gm, m.getReplyBox(), null, null);
m.send();
Logger.imp("[MUL.RunNewLC] LC integrated: " + m);
}
} catch (TimeoutException e) {
Logger.exc("[MUL.RunNewLC] PROBLEM? Timeout Exception");
} catch (HostFailureException e) {
Logger.err("[MUL.RunNewLC] HostFailure Exception should never happen!: " + host.getName());
} catch (Exception e) {
Logger.exc("[MUL.RunNewLC] Exception");
e.printStackTrace();
}
}
}
/**
* Relays GL beats
*/
void procRelayGLBeats() {
try {
new Process(host, host.getName() + "-relayGLBeats") {
public void main(String[] args) {
while (!SimulatorManager.isEndOfInjection()) {
try {
gmDead();
// Get incoming message from GL
SnoozeMsg m = (SnoozeMsg) Task.receive(inbox + "-relayGLBeats", AUX.durationToEnd());
Logger.info("[MUL.procRelayGLBeats] " + m);
relayGLBeats(m);
if(SnoozeProperties.shouldISleep())
sleep(AUX.DefaultComputeInterval);
} catch (HostFailureException e) {
Logger.err("[MUL.main] HostFailure Exc. should never happen!: " + host.getName());
break;
} catch (Exception e) {
Logger.exc("[MUL.procNewLC] Exception");
e.printStackTrace();
}
}
}
}.start();
} catch (Exception e) { e.printStackTrace(); }
}
/**
* Relays GM beats
*/
void procRelayGMBeats() {
try {
new Process(host, host.getName() + "-relayGMBeats") {
public void main(String[] args) {
while (!SimulatorManager.isEndOfInjection()) {
try {
glDead();
SnoozeMsg m = (SnoozeMsg) Task.receive(inbox + "-relayGMBeats", AUX.durationToEnd());
Logger.info("[MUL.procRelayGMBeats] " + m);
String gm = m.getOrigin();
double ts = (double) Msg.getClock();
if (gmInfo.containsKey(gm)) {
GMInfo gi = gmInfo.get(gm);
gmInfo.put(gm, new GMInfo(gi.replyBox, ts, gi.joining));
}
else Logger.err("[MUL.procRelayGMBeats] Unknown GM: " + m);
relayGMBeats(((GroupManager) m.getMessage()), ts);
if(SnoozeProperties.shouldISleep())
sleep(AUX.DefaultComputeInterval);
} catch (HostFailureException e) {
Logger.err("[MUL.procRelayGMBeats] HostFailure Exc. should never happen!: " + host.getName());
} catch (Exception e) {
Logger.exc("[MUL.procRelayGMBeats] Exception");
e.printStackTrace();
}
}
}
}.start();
} catch (Exception e) { e.printStackTrace(); }
}
class GMInfo {
String replyBox;
double timestamp;
boolean joining;
GMInfo(String rb, double ts, boolean joining) {
this.replyBox = rb; this.timestamp = ts; this.joining = joining;
}
}
class LCInfo {
String lcHost;
String gmHost;
double timestamp;
boolean joining;
LCInfo(String lc, String gm, double ts, boolean joining) {
this.lcHost = lc; this.gmHost = gm; this.timestamp = ts; this.joining = joining;
}
}
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/NewGMMsg.java
package scheduling.hierarchical.snooze.msg;
import scheduling.hierarchical.snooze.GroupManager;
/**
* Created by sudholt on 29/06/2014.
*/
public class NewGMMsg extends SnoozeMsg {
public NewGMMsg(GroupManager gm, String sendBox, String origin, String replyBox) {
super(gm, sendBox, origin, replyBox);
}
}
<file_sep>/src/main/java/scheduling/distributed/dvms2/TimeoutSnoozerProcess.java
package scheduling.distributed.dvms2;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
import scheduling.distributed.dvms2.dvms.timeout.TimeoutSnoozerActor;
import simulation.SimulatorManager;
import java.net.UnknownHostException;
/**
* Created by jonathan on 24/11/14.
*/
public class TimeoutSnoozerProcess extends Process {
private SGActor timeoutSnoozerActor;
private String name;
private long id;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Constructor
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public TimeoutSnoozerProcess(Host host, String name, String hostname, int port) throws UnknownHostException {
super(host, String.format("%s-timeoutsnoozer", hostname, port));
this.name = String.format("%s-timeoutsnoozer", hostname, port);
this.id = nameToId(hostname);
this.timeoutSnoozerActor = new TimeoutSnoozerActor(new SGNodeRef(this.name, id), host);
}
public SGNodeRef self() {
return this.timeoutSnoozerActor.self();
}
public static Long nameToId(String name) {
Long result = -1L;
try {
result = Long.parseLong(name.substring(4, name.length()));
} catch(Exception e) {
e.printStackTrace();
}
return result;
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Other methods
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
String mBox = "";
@Override
public void main(String[] args) throws MsgException {
mBox = this.name;
while(!SimulatorManager.isEndOfInjection()){
try{
MsgForSG req=(MsgForSG) Task.receive(mBox);
Long reqId = nameToId(req.getSender().getHost().getName());
timeoutSnoozerActor.receive(req.getMessage(), new SGNodeRef(req.getOrigin(), reqId), new SGNodeRef(req.getReplyBox(), -1L));
} catch (Exception e) {
e.printStackTrace();
}
}
Msg.info("End of server");
}
}
<file_sep>/Dockerfile
FROM ubuntu:20.04
MAINTAINER Badock
ARG DEBIAN_FRONTEND=noninteractive
# Enable the APT via HTTP
RUN apt update
RUN apt install -y apt-transport-https
# Download dependencies
# RUN echo "deb https://dl.bintray.com/sbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list
# RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
RUN apt update
RUN apt install -y simgrid wget
# Install openjdk
RUN apt install -y openjdk-8-jdk
# # Install scala
# RUN apt install -y scala
# Install sbt
#RUN curl -L -o sbt.deb http://dl.bintray.com/sbt/debian/sbt-0.13.15.deb
RUN wget http://dl.bintray.com/sbt/debian/sbt-0.13.15.deb --no-check-certificate -O sbt.deb
RUN dpkg -i sbt.deb
RUN apt update
RUN apt install -y sbt
# Clone projects
# VMPlaceS
RUN apt install -y git
RUN git clone -b master https://github.com/BeyondTheClouds/VMPlaceS.git
# Change the working directory
WORKDIR /VMPlaceS
# Download the jar provided by the Simgrid project
RUN wget http://gforge.inria.fr/frs/download.php/file/37149/simgrid-3_17.jar --no-check-certificate -O lib/simgrid.jar
# Compile the project and create a "fatjar"
RUN sbt clean
RUN sbt update
RUN sbt assembly
# Run the example
RUN java -Xmx4G -d64 -cp target/simulation.jar simulation.SimpleMain --algo=example --duration 1800 --nb_hosts=10 --nb_vms=93 --load_mean=60.0 2>&1 | grep "MSG_main finished; Terminating the simulation..."
<file_sep>/ALGORITHMS.md
# How to change the placement algorithm to test
_This feature is disponible for centralized algorithms only._
Two centralized algorithms are already implemented:
- BtrPlace
- Entropy
You can find them in the `scheduling.centralized` java package.
If you want to test another one, start at step 1. Otherwise, go directly to step 2.
## 1- Implement the algorithm
### 1.1- Create the architecture
Create a package named after the algorithm you are to implement in the `centralized` package.
### 1.2- Write the code
Create a class named after the algorithm you are to implement in the package newly created. It must extends the `AbstractScheduler`, which implements the `Scheduler` interface.
## 2- Choose the algorithm to run
Open the `config/simulator.properties` file and find the `simulator.implementation` property. Write the fully qualified name of the class implementing the algorithm you want to test.<file_sep>/visu/template/load_script.jinja2
#!/usr/bin/Rscript
data <- read.table("loads/data/{{algo}}-{{node_count}}/load.csv", header=T,sep=",")
attach(data)
data2 <- setNames(aggregate(load ~ round(time), data, mean, na.rm=TRUE), c("time", "load"))
detach(data)
attach(data2)
# Second graph
# Calculate violation times for dvms and entropy
g_range <- range(0, 101)
plot(load ~ time, type="o", col="blue", ylim=g_range,
axes=FALSE, ann=FALSE, pch="")
# Make x axis using experiments names
axis(1, c(0, 1000, 2000, 3000, 3600))
axis(2, c(0, 20, 40, 60, 80, 100), c("0%", "20%", "40%", "60%", "80%", "100%"))
# Make y axis with horizontal labels that display ticks at
# every 4 marks. 4*0:g_range[2] is equivalent to c(0,4,8,12).
#load_node0_50.csv
# Create box around plot
box()
# Graph entropy with red dashed line and square points
abline(h=70,col="black",lty=2)
text(1.1, (70+g_range[2]/40), " 70%", col = "black")
# Create a title with a red, bold/italic font
title(main="Load of the cluster\nConsolidation rate= 10VMs/PM, Average VM load = 70%", col.main="red", font.main=4)
# Label the x and y axes with dark green text
title(xlab="Time (s)", col.lab=rgb(0,0.5,0))
title(ylab="Load (%)", col.lab=rgb(0,0.5,0))
# Create a legend at (1, g_range[2]) that is slightly smaller
# (cex) and uses the same line colors and points used by
# the actual plots
legend(1, g_range[2], c("load"), cex=0.8,
col=c("blue"), pch="", lty=1:2);<file_sep>/visu/generate_hidden_violation.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet , DistributionNotFound
working_set = WorkingSet()
import itertools
# Printing all installed modules
#print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
import traceback
################################################################################
# Constant and parameters
################################################################################
duration = 3600
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "clouds"])
execute_cmd(["mkdir", "clouds"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if not algo in algos:
algos += [algo]
print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0])+"-"+str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
print nodes_tuples
print vms_tuples
print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
def export_csv_data(algo, node_count, violations_smp_detected, violations_smp_hidden, violations_out_detected, violations_out_hidden):
folder_name = "clouds/data/%s-%d" % (algo, node_count)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_smp_detected, "labels": ["smp_det_time", "smp_det_duration", "node", "type"]}, "%s/violations_smp_det.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_smp_hidden, "labels": ["smp_hid_time", "smp_hid_duration", "node", "type"]}, "%s/violations_smp_hid.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_out_detected, "labels": ["out_det_time", "out_det_duration", "node", "type"]}, "%s/violations_out_det.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": violations_out_hidden, "labels": ["out_hid_time", "out_hid_duration", "node", "type"]}, "%s/violations_out_hid.csv" % (folder_name))
map_algos_size = {}
map_hidden_violation_count = {}
map_detected_violation_count = {}
map_detected_violation_ratio = {}
# variable that is used to detect "violation-out", "violation-normal" and "violation-sched":
# it will store the last line about "violations-out" or "violation-det", to detect if the next
# "violation" has been already processed!
last_line = None
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (data["algorithm"], compute_node_count)
if not map_algos_size.has_key(compute_node_count):
map_algos_size[compute_node_count] = []
map_algos_size[compute_node_count] += [algo]
_violations_det_per_node = {}
_violations_out_per_node = {}
_violations_smp_per_node = {}
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > duration:
continue
if data["event"] == "trace_event" and data["value"] == "violation-det":
current_violation_det = (float(data["time"]), float(data["duration"]), data["origin"], "det")
if not _violations_det_per_node.has_key(data["origin"]):
_violations_det_per_node[data["origin"]] = []
_violations_det_per_node[data["origin"]] += [current_violation_det]
if data["event"] == "trace_event" and data["value"] == "violation-out":
current_violation_out = (float(data["time"]), float(data["duration"]), data["origin"], "out")
if not _violations_out_per_node.has_key(data["origin"]):
_violations_out_per_node[data["origin"]] = []
_violations_out_per_node[data["origin"]] += [current_violation_out]
if data["event"] == "trace_event" and data["value"] == "violation":
current_violation_smp = (float(data["time"]), float(data["duration"]), data["origin"], "smp")
if not _violations_smp_per_node.has_key(data["origin"]):
_violations_smp_per_node[data["origin"]] = []
_violations_smp_per_node[data["origin"]] += [current_violation_smp]
except Exception as e:
# print traceback.format_exc()
pass
f.seek(0)
nodes = set(_violations_smp_per_node.keys() + _violations_out_per_node.keys())
violations_smp_detected = []
violations_smp_hidden = []
violations_out_detected = []
violations_out_hidden = []
for node in nodes:
try:
current_violation_det = _violations_det_per_node[node] if _violations_det_per_node.has_key(node) else []
current_violation_out = _violations_out_per_node[node] if _violations_out_per_node.has_key(node) else []
current_violation_smp = _violations_smp_per_node[node] if _violations_smp_per_node.has_key(node) else []
product = itertools.product(current_violation_smp, current_violation_det)
product_filtered = [element for element in product if abs(element[0][0] + element[0][1] - element[1][0] - element[1][1]) < 0.01]
violations_smp_per_node_detected = set([element[0] for element in product_filtered])
violations_smp_per_node_hidden = set([element for element in current_violation_smp if element not in violations_smp_per_node_detected])
if len(violations_smp_per_node_detected) + len(violations_smp_per_node_hidden) != len(current_violation_smp):
print("%s + %s = %s" % (violations_smp_per_node_detected, violations_smp_per_node_hidden, current_violation_smp))
product = itertools.product(current_violation_out, current_violation_det)
product_filtered = [element for element in product if abs(element[0][0] + element[0][1] - element[1][0] - element[1][1]) < 0.01]
violations_out_per_node_detected = set([element[0] for element in product_filtered])
violations_out_per_node_hidden = set([element for element in current_violation_out if element not in violations_out_per_node_detected])
if len(violations_out_per_node_detected) + len(violations_out_per_node_hidden) != len(current_violation_out):
print("%s + %s = %s" % (violations_out_per_node_detected, violations_out_per_node_hidden, current_violation_out))
violations_smp_detected += violations_smp_per_node_detected
violations_smp_hidden += violations_smp_per_node_hidden
violations_out_detected += violations_out_per_node_detected
violations_out_hidden += violations_out_per_node_hidden
except:
pass
hidden_violation_count = len(violations_smp_hidden) + len(violations_out_hidden)
detected_violation_count = len(violations_smp_detected) + len(violations_out_detected)
map_hidden_violation_count[nodes_vms_tuple] = hidden_violation_count
map_detected_violation_count[nodes_vms_tuple] = detected_violation_count
map_detected_violation_ratio[nodes_vms_tuple] = detected_violation_count / (detected_violation_count + hidden_violation_count)
# print("%s@%d => %d" % (algo, compute_node_count, violation_total_time))
# export_csv_data(algo, compute_node_count, violations_smp_detected, violations_smp_hidden, violations_out_detected, violations_out_hidden)
################################################################################
# Generate CSV files from data maps
################################################################################
print map_hidden_violation_count
print map_detected_violation_count
print map_detected_violation_ratio
render_template("template/matrix_data.jinja2", {"algos": algos, "server_counts": nodes_tuples, "data": map_hidden_violation_count }, "data/hidden_violation_count.csv")
render_template("template/matrix_data.jinja2", {"algos": algos, "server_counts": nodes_tuples, "data": map_detected_violation_count }, "data/detected_violation_count.csv")
render_template("template/matrix_data.jinja2", {"algos": algos, "server_counts": nodes_tuples, "data": map_detected_violation_ratio }, "data/detected_violation_ratio.csv")
group_by_nodes = ["distributed", "hierarchical"]
not_group_by_nodes = list(set(algos) - set(group_by_nodes))
print("group_by_nodes -> %s" %(group_by_nodes))
print("not_group_by_nodes -> %s" %(not_group_by_nodes))
render_template("template/matrix_script.jinja2", {"source": "data/hidden_violation_count.csv", "x_label": "Configuration", "y_label": "count", "algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [], "not_group_by_nodes": [], "title": "hidden_violation_count"}, "scripts/hidden_violation_count.r")
render_template("template/matrix_script.jinja2", {"source": "data/detected_violation_count.csv", "x_label": "Configuration", "y_label": "count", "algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [], "not_group_by_nodes": [], "title": "detected_violation_count"}, "scripts/detected_violation_count.r")
render_template("template/matrix_script.jinja2", {"source": "data/detected_violation_ratio.csv", "x_label": "Configuration", "y_label": "percentage", "algos": algos, "x_axis": zip(nodes_tuples, vms_tuples), "group_by_nodes": [], "not_group_by_nodes": [], "title": "detected_violation_ratio"}, "scripts/detected_violation_ratio.r")
<file_sep>/visu/generate_loads.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet , DistributionNotFound
working_set = WorkingSet()
import itertools
# Printing all installed modules
#print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
################################################################################
# Constant and parameters
################################################################################
duration = 3600
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "loads"])
execute_cmd(["mkdir", "loads"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if not algo in algos:
algos += [algo]
print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0])+"-"+str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
print nodes_tuples
print vms_tuples
print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
def export_csv_data(algo, node_count, loads):
folder_name = "loads/data/%s-%d" % (algo, node_count)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/load_data.jinja2", {"algo": algo, "loads": loads, "labels": ["time", "load"]}, "%s/load.csv" % (folder_name))
simulations = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (data["algorithm"], compute_node_count)
service_node_name = "node%d" % (node_count)
simulations += [(algo, compute_node_count)]
loads = []
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > 1801:
continue
# print(data)
if data["event"] == "trace_event" and data["state_name"] == "VARIABLE" and data["value"] == "LOAD" and data["origin"] == service_node_name:
loads += [(data["time"], data["data"]["value"])]
except:
pass
export_csv_data(algo, compute_node_count, loads)
################################################################################
# Find simulation matching and prepare R scripts
################################################################################
def export_loads_data(algo, node_count):
folder_name = "loads/scripts/%d-%s" % (node_count, algo)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/load_script.jinja2", {"algo": algo, "node_count": node_count}, "%s/compare.r" % (folder_name))
pass
for simulation in simulations:
algo = simulation[0]
node_count = simulation[1]
export_loads_data(algo, node_count)
################################################################################
# Clean results folder
################################################################################
execute_cmd(["rm", "-r", "loads/results"])
execute_cmd(["mkdir", "-p", "loads/results"])
################################################################################
# Generate loads figures
################################################################################
for simulation in simulations:
algo = simulation[0]
node_count = simulation[1]
export_loads_data(algo, node_count)
script_folder_name = "loads/scripts/%d-%s" % (node_count, algo)
out_file_path = "loads/results/%d-%s.pdf" % (node_count, algo)
execute_cmd(["/usr/bin/env", "Rscript", "%s/compare.r" % (script_folder_name)])
execute_cmd(["mv", "Rplots.pdf", out_file_path])
<file_sep>/visu/generate_repartition.py
#!/usr/bin/python
from __future__ import division
from pkg_resources import WorkingSet , DistributionNotFound
working_set = WorkingSet()
import itertools
# Printing all installed modules
#print tuple(working_set)
# Detecting if module is installed
dependency_found = True
try:
dep = working_set.require('Jinja2')
except DistributionNotFound:
dependency_found = False
pass
if not dependency_found:
try:
# Installing it (anyone knows a better way?)
from setuptools.command.easy_install import main as install
install(['Jinja2'])
print("run again as normal user to process results")
except DistributionNotFound:
print("run this script as sudo to install a missing template engine")
pass
sys.exit(0)
import csv
import subprocess
import time
import os
import json
import jinja2
import traceback
################################################################################
# Constant and parameters
################################################################################
duration = 3600
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
def render_template(template_file_path, vars, output_file_path):
templateLoader = jinja2.FileSystemLoader( searchpath="." )
templateEnv = jinja2.Environment( loader=templateLoader )
TEMPLATE_FILE = template_file_path
template = templateEnv.get_template( TEMPLATE_FILE )
templateVars = vars
outputText = template.render( templateVars )
with open(output_file_path, "w") as text_file:
text_file.write(outputText)
################################################################################
# Clean data and scripts folders
################################################################################
execute_cmd(["rm", "-r", "repartition"])
execute_cmd(["mkdir", "repartition"])
################################################################################
# Detect algorithms used in experiments
################################################################################
algos = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
if not algo in algos:
algos += [algo]
print algos
################################################################################
# Detect (server_count, vm_count) combination used in experiments
################################################################################
nodes_tuples = []
vms_tuples = []
nodes_vms_tuples = []
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
if not compute_node_count in nodes_tuples:
nodes_tuples += [compute_node_count]
if not data["vm_count"] in vms_tuples:
vms_tuples += [data["vm_count"]]
# nodes_vms_tuple = "%s-%s" % (data["server_count"], data["vm_count"])
# if not nodes_vms_tuple in nodes_vms_tuples:
# nodes_vms_tuples += [nodes_vms_tuple]
# Order the tuples
nodes_tuples = sorted(nodes_tuples)
vms_tuples = sorted(vms_tuples)
nodes_vms_tuples = [str(tuple2[0])+"-"+str(tuple2[1]) for tuple2 in zip(nodes_tuples, vms_tuples)]
# nodes_vms_tuples = sorted(nodes_vms_tuples)
print nodes_tuples
print vms_tuples
print nodes_vms_tuples
################################################################################
# Fill data maps with computed metrics
################################################################################
def export_csv_data(algo, node_count, computes, migrations):
folder_name = "repartition/data/%s-%d" % (algo, node_count)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": computes, "labels": ["type", "value"]}, "%s/repartition_computations.csv" % (folder_name))
render_template("template/cloud_data.jinja2", {"algo": algo, "node_count": node_count, "violations": migrations, "labels": ["type", "value"]}, "%s/repartition_migrations.csv" % (folder_name))
map_algos_size = {}
# variable that is used to detect "violation-out", "violation-normal" and "violation-sched":
# it will store the last line about "violations-out" or "violation-det", to detect if the next
# "violation" has been already processed!
last_line = None
for dirname, dirnames, filenames in os.walk('./events'):
# print path to all subdirectories first.
for filename in filenames:
if filename.endswith(".json"):
with open("%s/%s" % (dirname, filename), 'r') as f:
header_line = f.readline()
header_data = json.loads(header_line)
data = header_data["data"]
algo = data["algorithm"]
compute_node_count = data["server_count"]
service_node_count = data["service_node_count"]
node_count = compute_node_count + service_node_count
nodes_vms_tuple = "%s-%s" % (data["algorithm"], compute_node_count)
if not map_algos_size.has_key(compute_node_count):
map_algos_size[compute_node_count] = []
map_algos_size[compute_node_count] += [algo]
computes = []
migrations = []
for line in f.readlines():
try:
data = json.loads(line)
if float(data["time"]) > duration:
continue
if data["event"] == "trace_event" and data["value"] == "migrate":
migration_time = data["duration"]
migrations += [["MIGRATION", migration_time]]
if data["event"] == "trace_event" and data["value"] == "compute":
compute_time = data["duration"]
compute_result = data["data"]["state"]
computes += [[compute_result, compute_time]]
except Exception as e:
# print traceback.format_exc()
pass
f.seek(0)
export_csv_data(algo, compute_node_count, computes, migrations)
################################################################################
# Clean results folder
################################################################################
execute_cmd(["rm", "-r", "repartition/results"])
execute_cmd(["mkdir", "-p", "repartition/results"])
################################################################################
# Prepare R scripts for each simulation
################################################################################
metrics = ["migrations", "computations"]
legends = {
"migrations": "migration time (s)",
"computations": "computation time (s)"
}
def export_repartition_single_data(algo, node_count, metric, legend):
print("%s with %s" % (algo, node_count))
folder_name = "repartition/scripts/%d-%s" % (node_count, algo)
execute_cmd(["mkdir", "-p", folder_name])
render_template("template/repartition_script.jinja2", {"algo": algo, "node_count": node_count, "metric": metric, "legend": legend}, "%s/compare_%s.r" % (folder_name, metric))
pass
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
for algo in algos:
for metric in metrics:
export_repartition_single_data(algo, node_count, metric, legends[metric])
################################################################################
# Generate repartition figures
################################################################################
for key in map_algos_size:
algos = map_algos_size[key]
node_count = key
for algo in algos:
for metric in metrics:
script_folder_name = "repartition/scripts/%d-%s" % (node_count, algo)
out_file_path = "repartition/results/%s-%s-%d.pdf" % (metric, algo, node_count)
execute_cmd(["/usr/bin/env", "Rscript", "%s/compare_%s.r" % (script_folder_name, metric)])
execute_cmd(["mv", "Rplots.pdf", out_file_path])
<file_sep>/src/main/java/scheduling/hierarchical/snooze/msg/LCAssMsg.java
package scheduling.hierarchical.snooze.msg;
/**
* Created by sudholt on 13/07/2014.
*/
public class LCAssMsg extends SnoozeMsg {
public LCAssMsg(String name, String sendBox, String origin, String replyBox) {
super(name, sendBox, origin, replyBox);
}
}
<file_sep>/src/main/java/scheduling/Scheduler.java
package scheduling;
import configuration.XHost;
import java.util.Collection;
/**
* Contract that must be followed by the implemented schedulers.
*/
public interface Scheduler {
ComputingResult computeReconfigurationPlan();
SchedulerResult checkAndReconfigure(Collection<XHost> hostsToCheck);
/**
* Result of the reconfiguration plan computation.
*/
class ComputingResult {
public enum State {
NO_RECONFIGURATION_NEEDED("NO_RECONFIGURATION_NEEDED"),
PLACEMENT_FAILED("PLACEMENT_FAILED"),
RECONFIGURATION_FAILED("RECONFIGURATION_FAILED"),
SUCCESS("SUCCESS");
private String name;
State(String name){
this.name = name;
}
public String toString(){
return name;
}
}
public State state;
/**
* Number of migrations in the computed plan
*/
public int nbMigrations;
/**
* Duration of the computing process
*/
public long duration;
/**
* The cost of the reconfiguration plan.
*/
protected int planCost;
public ComputingResult(State state, long duration, int nbMigrations, int planCost) {
this.state = state;
this.duration = duration;
this.nbMigrations = nbMigrations;
this.planCost = planCost;
}
public ComputingResult(State state, long duration) {
this(state, duration, 0, 0);
}
public ComputingResult() { this(State.SUCCESS, 0, 0, 0); }
}
/**
* Result of the reconfiguration.
*/
class SchedulerResult {
/**
* Result of the Scheduling process.
*/
public enum State {
SUCCESS("SUCCESS"),
RECONFIGURATION_PLAN_ABORTED("RECONFIGURATION_PLAN_ABORTED"),
NO_VIABLE_CONFIGURATION("NO_VIABLE_CONFIGURATION"),
NO_RECONFIGURATION_NEEDED("NO_RECONFIGURATION_NEEDED");
private String name;
State(String name){
this.name = name;
}
public String toString(){
return name;
}
}
/**
* Result of the reconfiguration.
*/
public State state;
/**
* Duration in ms of the reconfiguration.
*/
public long duration;
}
}
<file_sep>/visu/template/cloud_single_script.jinja2
#!/usr/bin/Rscript
algo1_matrix_data_smp_det_time <- read.table("clouds/data/{{algo}}-{{node_count}}/violations_smp_det.csv", header=T,sep=",")
colnames(algo1_matrix_data_smp_det_time) <- c("algo1_smp_det_time", "algo1_smp_det_duration")
attach(algo1_matrix_data_smp_det_time)
algo1_matrix_data_smp_hid_time <- read.table("clouds/data/{{algo}}-{{node_count}}/violations_smp_hid.csv", header=T,sep=",")
colnames(algo1_matrix_data_smp_hid_time) <- c("algo1_smp_hid_time", "algo1_smp_hid_duration")
attach(algo1_matrix_data_smp_hid_time)
algo1_matrix_data_out_det_time <- read.table("clouds/data/{{algo}}-{{node_count}}/violations_out_det.csv", header=T,sep=",")
colnames(algo1_matrix_data_out_det_time) <- c("algo1_out_det_time", "algo1_out_det_duration")
attach(algo1_matrix_data_out_det_time)
algo1_matrix_data_out_hid_time <- read.table("clouds/data/{{algo}}-{{node_count}}/violations_out_hid.csv", header=T,sep=",")
colnames(algo1_matrix_data_out_hid_time) <- c("algo1_out_hid_time", "algo1_out_hid_duration")
attach(algo1_matrix_data_out_hid_time)
dev.new(width=8, height=6)
g_range <- range(algo1_smp_det_duration, algo1_smp_hid_duration, algo1_out_det_duration, algo1_out_hid_duration)
plot( algo1_smp_det_time, algo1_smp_det_duration, pch=7 , col=rgb(255, 128, 0 , 200,maxColorValue=255), ylim=g_range*1.20,
,xlim=c(0, {{duration}}), xlab="Time (s)", ylab="Duration of the violation (s)")
#title(main="Duration of each violation")
points( algo1_smp_hid_time, algo1_smp_hid_duration, pch=0 , col=rgb(255, 128, 0 , 200,maxColorValue=255))
points( algo1_out_det_time, algo1_out_det_duration, pch=13, col=rgb(0 , 102, 204, 200,maxColorValue=255))
points( algo1_out_hid_time, algo1_out_hid_duration, pch=1 , col=rgb(0 , 102, 204, 200,maxColorValue=255))
legend("topleft", c(
"{{algo | capitalize}} violation detected",
"{{algo | capitalize}} violation hidden",
"{{algo | capitalize}} violation out detected",
"{{algo | capitalize}} violation out hidden"
), ncol=2,
col=c(
rgb(255, 128, 0 , 200,maxColorValue=255),
rgb(255, 128, 0 , 200,maxColorValue=255),
rgb(0 , 102, 204, 200,maxColorValue=255),
rgb(0 , 102, 204, 200,maxColorValue=255)
), pch=c(7, 0, 13, 1))
<file_sep>/run_all.sh
#! /bin/bash
PID_FILE=/tmp/vmplaces.pid
source xprc
function do_abort() {
echo Killing java process with PID `cat $PID_FILE`
kill -9 `cat $PID_FILE`
rm -f $PID_FILE
exit 2
}
trap do_abort SIGINT
error=0
# run <nb nodes> <algo> <implem>
# Ex: run 128 centralized scheduling.centralized.entropy2.Entropy2RP
function run() {
n_nodes=$1
algo=$2
implem=$3
turn_off=$4
if [ "$3" != "none" ]
then
implem="-Dsimulator.implementation=$implem"
# Yes, this is ugly
name=`echo ${implem} | rev | cut -d "." -f1 | rev`
name="${algo}-${name}-${n_nodes}-${turn_off}"
else
implem=''
name="${algo}-${n_nodes}-${turn_off}"
fi
n_service='1'
case "$algo" in
"centralized")
n_service=1
;;
"hierarchical")
n_service=$(($n_nodes / 32 + 1))
;;
"distributed")
n_service=0
;;
esac
n_vms=$(($n_nodes * 10))
mean=60
std=20
SIM_ARGS="-Dsimulator.algorithm=$algo $implem"
SIM_ARGS="$SIM_ARGS -Dhostingnodes.number=$n_nodes"
SIM_ARGS="$SIM_ARGS -Dservicenodes.number=$n_service"
SIM_ARGS="$SIM_ARGS -Dvm.number=$n_vms"
SIM_ARGS="$SIM_ARGS -Dhostingnodes.cpunumber=8"
SIM_ARGS="$SIM_ARGS -Dhostingnodes.memorytotal=32768"
SIM_ARGS="$SIM_ARGS -Dhosts.turn_off=$turn_off"
SIM_ARGS="$SIM_ARGS -Dload.mean=$mean"
SIM_ARGS="$SIM_ARGS -Dload.std=$std"
echo '----------------------------------------'
echo "Running $algo $implem with $n_nodes compute and $n_service service nodes turning off hosts: $turn_off, load.mean=$mean, load.std=$std"
echo "Command: java $VM_OPTIONS $SIM_ARGS simulation.Main $PROGRAM_ARGUMENTS"
echo "Command: VM_OPTIONS $VM_OPTIONS"
echo "Command: SIM_ARGS $SIM_ARGS"
echo "Command: PROGRAM_ARGUMENTS $PROGRAM_ARGUMENTS"
echo '----------------------------------------'
java $VM_OPTIONS $SIM_ARGS simulation.Main $PROGRAM_ARGUMENTS &
pid=$!
echo $pid > $PID_FILE
wait $pid
ret=$?
echo java returned $ret
if [ $ret -ne 0 ] && [ $ret -ne 134 ]
then
error=1
exit $ret
fi
mkdir -p visu/events/$name
cp events.json visu/events/$name/
}
#######################################
# Main #
#######################################
# Number of hosting nodes
nodes='64'
abort=0
rm -rf logs/ffd
{
for n in $nodes; do
run $n centralized scheduling.centralized.entropy2.Entropy2RP false
run $n centralized scheduling.centralized.ffd.LazyFirstFitDecreased false
run $n centralized scheduling.centralized.ffd.OptimisticFirstFitDecreased false
run $n centralized scheduling.centralized.entropy2.Entropy2RP true
run $n centralized scheduling.centralized.ffd.LazyFirstFitDecreased true
run $n centralized scheduling.centralized.ffd.OptimisticFirstFitDecreased true
#run $n hierarchical false
#run $n distributed false
done
} 2>&1 | tee run_all.log
if [ ! $error ]
then
visu/energy_plot.py run_all.log energy.dat
fi
<file_sep>/src/main/java/scheduling/hierarchical/snooze/Logger.java
package scheduling.hierarchical.snooze;
import org.simgrid.msg.Msg;
/**
* Created by sudholt on 29/06/2014.
*/
public class Logger {
public static void err(String s) {
if (SnoozeProperties.getInfoLevel() <= 4) Msg.info("ERRSNOO: " + s);
}
public static void exc(String s) {
if (SnoozeProperties.getInfoLevel() <= 4) Msg.info("EXCSNOO: " + s);
}
public static void imp(String s) {
if (SnoozeProperties.getInfoLevel() <= 3) Msg.info("IMPSNOO: " + s);
}
public static void info(String s) {
if (SnoozeProperties.getInfoLevel() <= 2) Msg.info("INFSNOO: " + s);
}
public static void debug(String s) {
if (SnoozeProperties.getInfoLevel() == 1) Msg.info("DEBSNOO: " + s);
}
public static void log(Exception e) {
Msg.info("EXCSNOO: ");
e.printStackTrace(System.err);
}
}<file_sep>/visu/energy_plot.py
#! /usr/bin/env python
from __future__ import print_function
import os, sys, re, json, math
import traceback
import operator
import pprint
pp = pprint.PrettyPrinter(indent=4).pprint
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.path as path
import matplotlib.animation as animation
import matplotlib.ticker as ticker
import locale
locale.setlocale(locale.LC_ALL, 'en_US')
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
# Determines the order of the bars in the plots
ORDER = ['Entropy', 'Lazy FFD', 'Optimistic FFD']
#ORDER = ['Lazy FFD', 'Optimistic FFD']
# Check arguments
if len(sys.argv) != 3:
eprint('Usage: ./energy_plot.py <log file> <energy file>')
sys.exit(1)
# Some functions
def to_bool(string):
if string in ['true', 'True']:
return True
if string in ['false', 'False']:
return False
eprint("%s is not a boolean" % string)
sys.exit(3)
def correct_name(name):
names = {
'LazyFirstFitDecreased': 'Lazy FFD',
'OptimisticFirstFitDecreased': 'Optimistic FFD',
'Entropy2RP': 'Entropy'}
return names[name]
# time_on['Entropy']['node56'] = 17546.57
time_on = {}
last_on = None
def new_experiment(alg):
global last_on
time_on[alg] = {}
last_on = {}
def end_experiment(time, alg):
for node in last_on.keys():
if last_on[node] is not None:
node_off(node, time, alg)
def node_on(name, time, alg):
if name in last_on and last_on[name] is not None:
eprint("Node %s was already on since %.2f" % (name, time))
sys.exit(1)
last_on[name] = time
def node_off(name, time, alg):
if last_on[name] is None:
eprint("None %s was not on" % name)
sys.exit(1)
if name not in time_on[alg]:
time_on[alg][name] = 0
time_on[alg][name] += time - last_on[name]
last_on[name] = None
########################################
# Get the number of turned off hosts
# and of migrations
########################################
n_turn_off = {}
n_migrations = {}
algos = []
n_on = {}
scheduler_ticks = {}
# load and standard deviation must be the same
# for all the experiments in the log file
load = None
std = None
simulation_time = None
n_hosts = None
with open(sys.argv[1], 'r') as f:
turn_off = None
curr = None
# Compile 3 patterns and read the logs
start_pattern = re.compile(r'Running (\w+)(\s-D[\w\.]+\=([\w\.]+))? with (\d+) compute and (\d+) service nodes turning off hosts: (\w+), load.mean=(\d+), load.std=(\d+)')
end_pattern = re.compile(r'\[.*\s(\d+\.\d+)\] \[.*\] End of Injection')
off_pattern = re.compile(r'\[(.*\s)?(\d+\.\d+)\] \[.*\] Turn off (node\d+)')
on_pattern = re.compile(r'\[(.* )?(\d+\.\d+)\] \[.*\] Turn on (node\d+)')
migration_pattern = re.compile(r'End of migration of VM vm-\d+ from node\d+ to node\d+')
scheduler_pattern = re.compile(r'\[(.*)\s(\d+\.\d+)\] \[.*\] Launching scheduler \(id = \d+\) - start to compute')
for line in f:
# This is a new experiment
m = re.search(start_pattern, line)
if m:
turn_off = to_bool(m.group(6))
n_hosts = int(m.group(4))
if n_hosts not in n_turn_off:
n_turn_off[n_hosts] = {True: {}, False: {}}
if n_hosts not in n_migrations:
n_migrations[n_hosts] = {True: {}, False: {}}
if n_hosts not in scheduler_ticks:
scheduler_ticks[n_hosts] = {True: {}, False: {}}
if n_hosts not in n_on:
n_on[n_hosts] = {}
algo = correct_name(m.group(3).split('.')[-1])
if algo not in algos:
algos.append(algo)
scheduler_ticks[n_hosts][turn_off][algo] = []
if turn_off:
n_on[n_hosts][algo] = {}
n_on[n_hosts][algo][0.0] = 0
n_turn_off[n_hosts][turn_off][algo] = 0
n_migrations[n_hosts][turn_off][algo] = 0
curr = turn_off
load = int(m.group(7))
std = int(m.group(8))
new_experiment(algo)
continue
# An experiment is over
m = re.search(end_pattern, line)
if m:
time = float(m.group(1))
end_experiment(time, algo)
simulation_time = int(time)
continue
# The scheduler is running
m = re.search(scheduler_pattern, line)
if m:
if algo not in scheduler_ticks[n_hosts][turn_off]:
scheduler_ticks[n_hosts][turn_off][algo] = []
scheduler_ticks[n_hosts][turn_off][algo].append(float(m.group(2)))
continue
# A node has been turned off
m = re.search(off_pattern, line)
if m:
n_turn_off[n_hosts][curr][algo] += 1
if turn_off:
n_on[n_hosts][algo][float(m.group(2))] = n_on[n_hosts][algo][n_on[n_hosts][algo].keys()[-1]] - 1
node_off(m.group(3), float(m.group(2)), algo)
continue
# A node has been turned on
m = re.search(on_pattern, line)
if m:
if turn_off:
n_on[n_hosts][algo][float(m.group(2))] = n_on[n_hosts][algo][n_on[n_hosts][algo].keys()[-1]] + 1
node_on(m.group(3), float(m.group(2)), algo)
continue
# A VM has been migrated
m = re.search(migration_pattern, line)
if m:
n_migrations[n_hosts][curr][algo] += 1
########################################
# Count the number of on VMs
########################################
n_vms = {}
dir_pattern = re.compile(r'(\w+)-([\w\d]+)-(\d+)-(true|false)')
events = os.path.join('visu', 'events')
# list dir in 'visu/events'
for item in os.listdir(events):
m = re.search(dir_pattern, item)
if m is None:
continue
# look for dirs like 'centralized-algo-64'
if m.group(1) == 'centralized':
algo = correct_name(m.group(2))
turn_off = to_bool(m.group(4))
n_hosts = int(m.group(3))
if n_hosts not in n_vms:
n_vms[n_hosts] = { True: {}, False: {} }
event_file = os.path.join(events, item, 'events.json')
print('Reading ' + event_file)
with open(event_file, 'r') as f:
n_vms[n_hosts][turn_off][algo] = {}
# each line in this file is a JSON document
for line in f:
try:
event = json.loads(line)
if event['value'] == "NB_VM":
time = float(event['time'])
value = int(event['data']['value'])
n_vms[n_hosts][turn_off][algo][time] = value
except:
t, value, tb = sys.exc_info()
print(str(t) + " " + str(value))
print(line)
traceback.print_tb(tb)
sys.exit(1)
if event['value'] != 'NB_VNS_ON':
continue
n_vms[n_hosts][turn_off][algo][float(event['time'])] = int(event['data']['value'])
migration_ordered = []
########################################
# Get the energy metrics
########################################
energy = {}
with open(sys.argv[2], 'r') as f:
p = re.compile(r'(\d+) \w+ (\w+) (\w+) ([\d\.]+)')
for line in f:
m = re.match(p, line)
n_hosts = int(m.group(1))
implem = correct_name(m.group(2))
turn_off = to_bool(m.group(3))
joules = float(m.group(4))
if n_hosts not in energy:
energy[n_hosts] = { True: {}, False: {} }
energy[n_hosts][turn_off][implem] = joules / simulation_time / 1000
########################################
# Make the bar plot
########################################
ind = np.arange(len(algos)) # the x locations for the groups
width = 0.35
ordered_energy = {}
off_ordered = {}
migration_ordered = {}
for n_hosts in energy.keys():
if n_hosts not in ordered_energy:
ordered_energy[n_hosts] = { True: [], False: [] }
for alg in ORDER:
if alg not in energy[n_hosts][True]:
continue
ordered_energy[n_hosts][True].append(energy[n_hosts][True][alg])
ordered_energy[n_hosts][False].append(energy[n_hosts][False][alg])
print("ordered_energy %d:" % n_hosts)
pp(ordered_energy[n_hosts])
fig, ax1 = plt.subplots()
color1 = '#888888'
color2 = '#FFFFFF'
linewidth = 1
rects1 = ax1.bar(ind, ordered_energy[n_hosts][False], width, color=color1, linewidth=linewidth)
rects2 = ax1.bar(ind + width, ordered_energy[n_hosts][True], width, color=color2, linewidth=linewidth)
ax1.set_ylabel('Energy (Megawatts)')
ax1.set_xticks(ind + width)
lim = ax1.get_ylim()
ax1.set_ylim(lim[0], lim[1])
ax1.set_xticklabels(ORDER)
########################################
# Make the line plots
########################################
# Make sure the values here are in the same order as the energy values
off_ordered[n_hosts] = []
migration_ordered[n_hosts] = []
for alg in ORDER:
if alg not in n_turn_off[n_hosts][True]:
continue
off_ordered[n_hosts].append(n_turn_off[n_hosts][True][alg])
migration_ordered[n_hosts].append(n_migrations[n_hosts][True][alg])
print("off_ordered[%d]:" % n_hosts)
pp(off_ordered[n_hosts])
print("migration_ordered[%d]:" % n_hosts)
print(migration_ordered[n_hosts])
ax2 = ax1.twinx()
migration_plot, = ax2.plot(ind + width, migration_ordered[n_hosts], 'k--^', linewidth=linewidth)
lim = ax2.get_ylim()
ax2.set_ylim(lim[0], lim[1])
ax2.set_yticks(range(0, int(math.ceil(max(migration_ordered[n_hosts]))), 500))
for i,j in zip(ind + width, migration_ordered[n_hosts]):
ax2.annotate(str(j), xy=(i,j + .5), va='bottom', weight='bold', size='large')
lgd = ax1.legend((rects1[0], rects2[0], migration_plot),
('Not turning off hosts', 'Turning off hosts', 'No. VM migrations'),
loc='lower right')
def find_filename(format):
i = 0
path = format % i
while os.path.isfile(path):
i += 1
path = format % i
return path
save_path = find_filename('energy_%d_%d_%d_%%d.pdf' % (n_hosts, load, std))
plt.savefig(save_path, transparent=True, bbox_extra_artists=(lgd,), bbox_inches='tight')
print('Saved plot as ' + save_path)
if os.system('which imgcat > /dev/null 2>&1') == 0:
os.system('imgcat ' + save_path)
########################################
# Make n_on plot
########################################
ordered_n_on = {}
plots = {}
styles = ['k-o', 'k-^', 'k-v', 'k-*']
for n_hosts in n_on:
fig, ax1 = plt.subplots()
if n_hosts not in ordered_n_on:
ordered_n_on[n_hosts] = {}
if n_hosts not in plots:
plots[n_hosts] = {}
i = 0
for alg in ORDER:
if alg not in n_on[n_hosts]:
continue
ordered_n_on[n_hosts][alg] = sorted(n_on[n_hosts][alg].items())
plots[n_hosts][alg], = ax1.plot(map(lambda t: t[0], ordered_n_on[n_hosts][alg]),
map(lambda t: t[1], ordered_n_on[n_hosts][alg]), styles[i], linewidth=linewidth, ms=8)
i += 1
print("ordered_n_on[%d]" % n_hosts)
pp(ordered_n_on[n_hosts])
lgd = ax1.legend(plots[n_hosts].values(),
n_on[n_hosts].keys(),
loc='upper right')
ax1.set_xlim(0, simulation_time)
ax1.set_ylim(20, n_hosts)
save_path = find_filename('n_on_%d_%d_%d_%%d.pdf' % (n_hosts, load, std))
plt.savefig(save_path, transparent=True, bbox_extra_artists=(lgd,), bbox_inches='tight')
print('Saved plot as ' + save_path)
if os.system('which imgcat > /dev/null 2>&1') == 0:
os.system('imgcat ' + save_path)
########################################
# Make vm_on plot
########################################
n_vms_ordered = {}
linewidth = 1
for n_hosts in n_vms:
fig, ax1 = plt.subplots()
if n_hosts not in n_vms_ordered:
n_vms_ordered[n_hosts] = {}
i = 0
colors = ['g', 'b', 'm', 'y']
for alg in ORDER:
if alg not in n_vms[n_hosts][True]:
continue
n_vms_ordered[n_hosts][alg] = sorted(n_vms[n_hosts][True][alg].items())
plots[n_hosts][alg], = ax1.plot(map(lambda t: t[0], n_vms_ordered[n_hosts][alg]),
map(lambda t: t[1], n_vms_ordered[n_hosts][alg]), colors[i] + '.-', linewidth=linewidth, ms=8)
#for tick in scheduler_ticks[n_hosts][True][alg]:
# ax1.plot((tick, tick), (450, 512), colors[i] + '-')
i += 1
ax1.set_xlim(0, simulation_time)
lgd = ax1.legend(plots[n_hosts].values(),
n_on[n_hosts].keys(),
loc='lower right')
save_path = find_filename('vms_on_%d_%d_%d_%%d.pdf' % (n_hosts, load, std))
plt.savefig(save_path, transparent=True, bbox_extra_artists=(lgd,), bbox_inches='tight')
print('Saved plot as ' + save_path)
if os.system('which imgcat > /dev/null 2>&1') == 0:
os.system('imgcat ' + save_path)
<file_sep>/visu/README.md
# Visualisation tools
This folder contains a set of tools that helps to generate beautiful diagrams, in order to compare differents scheduling algorithms. Several metrics are reported by the SimgridInjector, scripts contained in this folder generates files from the reported metrics.
## Requirements
* Python
* R language
* easy_install
* Jinja (template engine for python)
## Installation
When ***generate_data.py*** is run, it will try to find *Jinja* by leveraging the *easy_install* library. To install easy_install, run the following:
```
$ apt-get install python-setuptools # Debian
$ brew install easy_install # MacOS
```
Then,
```
$ sudo ./generate_data.py
```
## Generate the diagrams
When a simulation is performed, an ***events.json*** file is generated, containing information about the experiments and the events that occured.
Just put this file in a folder (with the simulation name) inside the ***events*** folder, as illustrated in the following screenshot:

Once it is done, just run the following command:
```
./generate_data.py ; ./generate_figures.py
```
The diagrams will auto-magically appear in the results folder!
<file_sep>/src/main/java/injector/InjectorEvent.java
package injector;
/**
* Created with IntelliJ IDEA.
* User: alebre
* Date: 08/10/13
* Time: 10:03
* To change this template use File | Settings | File Templates.
*/
public interface InjectorEvent {
long getId();
double getTime();
void play();
}
<file_sep>/config/centralizedResolver.properties
/**
* This file is the main configuration file for the centralized scheduling management
*
**/
/**
* Main properties
**/
// The periodicity to invoke the scheduling algorithm (in seconds)
// Default: 30 seconds
centralizedresolver.scheduling-periodicity = 30
<file_sep>/config/dvms.properties
is_locality_based_scheduler = false
minimum_partition_size = 4<file_sep>/src/main/java/scheduling/centralized/ffd/LazyFirstFitDecreased.java
package scheduling.centralized.ffd;
import configuration.SimulatorProperties;
import configuration.XHost;
import configuration.XVM;
import simulation.SimulatorManager;
import java.util.*;
public class LazyFirstFitDecreased extends FirstFitDecreased {
public LazyFirstFitDecreased(Collection<XHost> hosts) {
this(hosts, new Random(SimulatorProperties.getSeed()).nextInt());
}
public LazyFirstFitDecreased(Collection<XHost> hosts, Integer id) {
super(hosts, id);
}
@Override
protected void manageOverloadedHost(List<XHost> overloadedHosts, ComputingResult result) {
// The VMs are sorted by decreasing size of CPU and RAM capacity
TreeSet<XVM> toSchedule = new TreeSet<>(new XVMComparator(true, useLoad));
Map<XVM, XHost> sources = new HashMap<>();
for(XHost host: SimulatorManager.getSGHostingHosts()) {
predictedCPUDemand.put(host, host.getCPUDemand());
predictedMemDemand.put(host, host.getMemDemand());
}
// Remove enough VMs so the overloaded hosts are no longer overloaded
for(XHost host : overloadedHosts) {
Iterator<XVM> vms = host.getRunnings().iterator();
while((host.getCPUCapacity() < predictedCPUDemand.get(host) ||
host.getMemSize() < host.getMemDemand()) && vms.hasNext()) {
XVM vm = vms.next();
toSchedule.add(vm);
sources.put(vm, host);
predictedCPUDemand.put(host, predictedCPUDemand.get(host) - vm.getCPUDemand());
predictedMemDemand.put(host, predictedMemDemand.get(host) - vm.getMemSize());
}
}
for(XVM vm: toSchedule) {
XHost dest = null;
// Try find a new host for the VMs (saneHosts is not sorted)
for(XHost host: SimulatorManager.getSGHostingHosts()) {
if(host.getCPUCapacity() >= predictedCPUDemand.get(host) + vm.getCPUDemand() &&
host.getMemSize() >= predictedMemDemand.get(host) + vm.getMemSize()) {
dest = host;
break;
}
}
if(dest == null) {
result.state = ComputingResult.State.RECONFIGURATION_FAILED;
return;
}
// Schedule the migration
predictedCPUDemand.put(dest, predictedCPUDemand.get(dest) + vm.getCPUDemand());
predictedMemDemand.put(dest, predictedMemDemand.get(dest) + vm.getMemSize());
XHost source = sources.get(vm);
if(!source.getName().equals(dest.getName())) {
migrations.add(new Migration(vm, source, dest));
}
}
}
}
<file_sep>/visu/generate_figures.py
#!/usr/bin/python
import csv
import subprocess
import time
import os
################################################################################
# Constant and parameters
################################################################################
duration = 3600
################################################################################
# Functions of the script
################################################################################
def execute_cmd(args):
print "%s" % args
# return "%s" % args
out, err = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
if not err == "":
print err
return out
################################################################################
# Clean results folder
################################################################################
execute_cmd(["rm", "-r", "results"])
execute_cmd(["mkdir", "results"])
################################################################################
# Regenerate allfigure.r
################################################################################
execute_cmd(["rm", "scripts/allfigure.r"])
open("scripts/allfigure.r", 'a').close()
all_figure_script = open("scripts/allfigure.r", 'a')
with all_figure_script as f:
f.write("#!/usr/bin/env Rscript")
execute_cmd(["chmod", "+x", "scripts/allfigure.r"])
############################################################################
# Append all scripts to allfigure.r
############################################################################
for dirname, dirnames, filenames in os.walk('./scripts'):
# print path to all subdirectories first.
for filename in filenames:
if not filename == "allfigure.r":
# print filename
# ins = open("%s/%s" % (dirname, filename), "r" )
# for line in ins:
# if not line.startswith("#!"):
# popen("ls")
# # execute_cmd(["echo", "toto"])
execute_cmd(["/usr/bin/env", "Rscript", "%s/%s" % (dirname, filename)])
basename_script = filename.split(".")[0]
execute_cmd(["mv", "Rplots.pdf", "results/%s.pdf" % (basename_script)])
<file_sep>/src/main/java/configuration/ExtendedRandom.java
package configuration;
import java.util.Random;
public class ExtendedRandom extends Random {
public ExtendedRandom() { super(); }
public ExtendedRandom(long seed) {super(seed);}
public double nextExp() {
return - Math.log(1.0-nextDouble());
}
public double nextExp(double lambda) {
return lambda*nextExp();
}
public double nextGamma(double shape) {
if(shape == 1.0) {
return nextExp();
} else if (shape < 1.0) {
double uniform, exp;
while(true) {
uniform = nextDouble();
exp = nextExp();
if( uniform <= 1.0 - shape) {
double res = Math.pow(uniform, 1.0/shape);
if(res <= exp) return res;
} else {
double tmp = -Math.log((1-uniform) / shape);
double res = Math.pow(1.0 + shape*(tmp - 1.0), 1.0/shape);
if(res <= exp + tmp) return res;
}
}
} else {
double mshape = shape - (1.0/3.0);
double coef = 1.0/Math.sqrt(9.0 * mshape);
double g, tmp, uniform;
while(true) {
g = nextGaussian();
tmp = 1.0 + coef * g;
while (tmp <= 0.0) {
g = nextGaussian();
tmp = 1.0 + coef * g;
}
tmp = tmp * tmp * tmp;
uniform = nextDouble();
if(uniform < 1.0 - 0.0331 * (g*g*g*g)) return (mshape * tmp);
if(Math.log(uniform) < 0.5*g*g + mshape*(1.0 - tmp + Math.log(tmp))) return (mshape * tmp);
}
}
}
public double nextGamma(double shape, double scale) {
return scale * nextGamma(shape);
}
public double nextBeta(double alpha, double beta) {
if((alpha <= 1.0) && (beta <= 1.0)) {
double p1, p2;
while(true) {
p1 = Math.pow(nextDouble(), 1.0/alpha);
p2 = Math.pow(nextDouble(), 1.0/beta);
if((p1 + p2) <= 1.0)
return (p1/(p1+p2));
}
} else {
double g1 = nextGamma(alpha);
double g2 = nextGamma(beta);
return (g1/(g1+g2));
}
}
/**
*
*/
private static final long serialVersionUID = 1L;
}
<file_sep>/src/main/java/scheduling/hierarchical/snooze/SnoozeProperties.java
package scheduling.hierarchical.snooze;
import configuration.SimulatorProperties;
import scheduling.GeneralProperties;
import java.io.File;
/**
* Created by alebre on 16/07/14.
*/
public class SnoozeProperties extends GeneralProperties {
private static final long serialVersionUID = 7229931356566105645L;
//Default location of the properties file
public static final String DEFAULT_PROP_FILE = "config" + File.separator + "snooze.properties";
//Singleton
public final static SnoozeProperties INSTANCE = new SnoozeProperties();
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Property keys
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public final static String ALG_VARIANT = "snooze.alg-variant";
public final static String GM_NUMBER = "snooze.gm-number";
public final static String HEARTBEAT_PERIODICITY = "snooze.hb-periodicity";
public final static String HEARTBEAT_TIMEOUT = "snooze.hb-timeout";
public final static String SCHEDULING_PERIODIC = "snooze.scheduling-periodic";
public final static String SCHEDULING_PERIODICITY = "snooze.scheduling-periodicity";
public final static String INFO_LEVEL = "snooze.info-level";
public final static String INFO_PERIODICITY = "snooze.info-periodicity";
public final static String SIMULATE_LOCALCOMPUTATION = "snooze.simulate-localcomputation";
public final static String FAULT_MODE = "snooze.faultmode";
public final static String FAULT_MODE_GLPERIOD = "snooze.glcrash-period";
public final static String FAULT_MODE_GMPERIOD = "snooze.gmcrash-period";
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Property default values
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public final static int DEFAULT_GM_NUMBER = SimulatorProperties.getNbOfServiceNodes();
public final static long DEFAULT_HEARTBEAT_PERIODICITY = 2;
public final static boolean DEFAULT_SCHEDULING_PERIODIC = true;
public final static long DEFAULT_SCHEDULING_PERIODICITY = 30;
public final static long DEFAULT_HEARTBEAT_TIMEOUT = 5;
public final static int DEFAULT_INFO_LEVEL = 2;
public final static int DEFAULT_INFO_PERIODICITY = 5;
public static boolean DEFAULT_SIMULATE_LOCALCOMPUTATION = false;
public static boolean DEFAULT_FAULT_MODE = false ;
public static long DEFAULT_FAULT_MODE_GLPERIOD = SimulatorProperties.getDuration()/2 ;
public static long DEFAULT_FAULT_MODE_GMPERIOD = 300 ;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Constructors
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public SnoozeProperties(String file) {
super(file);
}
public SnoozeProperties() {
this(DEFAULT_PROP_FILE);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Class methods
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public static String getAlgVariant() {
String algVariant = "";
algVariant = INSTANCE.getProperty(ALG_VARIANT);
if (algVariant.equals(""))
algVariant = "hierarchical-" +
(getSchedulingPeriodic() ? "periodic" + getSchedulingPeriodicity() + "s-" : "reactive-") +
(SimulatorProperties.getNbOfServiceNodes()-1) + "GMs-" +
SimulatorProperties.getNbOfHostingNodes() + "LCs-" +
"beatFreq" + getHeartBeatPeriodicity() +"s";
return algVariant;
}
public static int getGMNumber() {
return INSTANCE.getPropertyAsInt(GM_NUMBER, DEFAULT_GM_NUMBER);
}
public static long getHeartBeatPeriodicity(){
return INSTANCE.getPropertyAsLong(HEARTBEAT_PERIODICITY, DEFAULT_HEARTBEAT_PERIODICITY);
}
public static boolean getSchedulingPeriodic(){
return INSTANCE.getPropertyAsBoolean(SCHEDULING_PERIODIC, DEFAULT_SCHEDULING_PERIODIC);
}
public static long getSchedulingPeriodicity(){
return INSTANCE.getPropertyAsLong(SCHEDULING_PERIODICITY, DEFAULT_SCHEDULING_PERIODICITY);
}
public static long getHeartBeatTimeout() {
return INSTANCE.getPropertyAsLong(HEARTBEAT_TIMEOUT, DEFAULT_HEARTBEAT_TIMEOUT);
}
public static int getInfoLevel() {
return INSTANCE.getPropertyAsInt(INFO_LEVEL, DEFAULT_INFO_LEVEL);
}
public static int getInfoPeriodicity() {
return INSTANCE.getPropertyAsInt(INFO_PERIODICITY, DEFAULT_INFO_PERIODICITY);
}
public static boolean shouldISleep() {
return INSTANCE.getPropertyAsBoolean(SIMULATE_LOCALCOMPUTATION, DEFAULT_SIMULATE_LOCALCOMPUTATION);
}
public static boolean faultMode() {
return INSTANCE.getPropertyAsBoolean(FAULT_MODE, DEFAULT_FAULT_MODE);
}
public static long getGLFaultPeriodicity(){
return INSTANCE.getPropertyAsLong(FAULT_MODE_GLPERIOD, DEFAULT_FAULT_MODE_GLPERIOD);
}
public static long getGMFaultPeriodicity(){
return INSTANCE.getPropertyAsLong(FAULT_MODE_GMPERIOD, DEFAULT_FAULT_MODE_GMPERIOD);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Methods for properties currently not stored in the properties file
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Other methods
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////
}
<file_sep>/src/main/java/migration/MigrationPlan.java
package migration;
import configuration.*;
/**
* Created by jonathan on 12/10/17.
*/
public class MigrationPlan {
public XHost origin;
public XHost destination;
public configuration.XVM vm;
}
<file_sep>/src/main/java/bug/MyProcess.java
package bug;
import configuration.XHost;
import configuration.XVM;
import org.simgrid.msg.*;
import org.simgrid.msg.Process;
public class MyProcess extends Process {
public MyProcess(Host host, String name, String[] args) throws HostNotFoundException {
super(host, name, args);
}
@Override
public void main(String[] args) throws MsgException {
// Start a bunch of processes on node1
Host node1 = Host.getByName("node1");
XHost host = new XHost(node1, 4096, 8, 800,1000, null);
Msg.info("Got " + node1.getName());
XVM vm = new XVM(host, "vm", 1, 1024, 125, null, 0, 125, 40);
vm.start();
vm.setLoad(20);
waitFor(1000);
vm.suspend();
vm.setLoad(80);
waitFor(10);
vm.resume();
waitFor(500);
vm.shutdown();
Msg.info("This is the end");
}
}
<file_sep>/src/main/java/simulation/SimpleMain.java
/**
* Copyright 2012-2013-2014. The SimGrid Team. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package.
*
* This file is the launcher on the Simgrid VM injector
* The main is composed of three part:
* 1./ Generate the deployment file according to the number of nodes and the algorithm you want to evaluate
* 2./ Configure, instanciate and assign each VM on the different PMs
* 3./ Launch the injector and the other simgrid processes in order to run the simulation.
*
* Please note that all parameters of the simulation are given in the ''simulator.properties'' file available
* in the ''config'' directory
*
* @author: <EMAIL>
*/
package simulation;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import choco.cp.solver.constraints.global.geost.geometricPrim.Obj;
import com.hubspot.jinjava.Jinjava;
import configuration.SimulatorProperties;
import configuration.XHost;
import org.docopt.Docopt;
import org.json.JSONObject;
import org.simgrid.msg.Msg;
import org.simgrid.msg.Process;
import org.slf4j.LoggerFactory;
import scheduling.hierarchical.snooze.AUX;
import trace.Trace;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class SimpleMain {
private static final String doc =
"VMPlaceS simulation launcher (simulation.Main).\n"
+ "\n"
+ "Usage:\n"
+ " simplemain [--platform=FILE] [--deployment=FILE] --algo=<value> [--impl=<value>] [--duration=<value>] [--loadperiod=<value>] [--nb_hosts=<value>] [--nb_vms=<value>] [--netbw=<value>] [--vm_maxcpuconsumption=<value>] [--vm_nbcpuconsumptionslots=<value>] [--load_mean=<value>] [--load_std=<value>] [--dry-run]\n"
+ " simplemain --list-algos\n"
+ "\n"
+ "Options:\n"
+ " -h --help Show this screen.\n"
+ "\n";
private static JSONObject load_json_file(String filename) {
String jsonContent;
try {
jsonContent = new Scanner(new File(filename)).useDelimiter("\\Z").next();
} catch (FileNotFoundException e) {
jsonContent = "{}";
}
return new JSONObject(jsonContent);
}
private static String get_template_content(String template_path) {
String template_content;
try {
template_content = new Scanner(new File(template_path)).useDelimiter("\\Z").next();
} catch (FileNotFoundException e) {
template_content = "";
}
return template_content;
}
private static String get_deployment_template(String algorithm_name) {
JSONObject algorithms_description = load_json_file("config/algorithms.json");
String template_path = algorithms_description.getJSONObject("algorithms").getJSONObject(algorithm_name).getString("deploy_xml_template");
return get_template_content(template_path);
}
private static Map<String, Object> build_context(Map<String, Object> opts) {
JSONObject algorithms_description = load_json_file("config/algorithms.json");
Map<String, Object> context = new HashMap<String, Object>();
Object selected_algo = opts.get("--algo") != null ? opts.get("--algo") : SimulatorProperties.getAlgo();
context.put("algo", selected_algo);
String defaultImpl = algorithms_description.getJSONObject("algorithms").
getJSONObject(selected_algo.toString()).
getJSONArray("scheduling_algorithm").
get(0)
.toString();
Object selected_impl = opts.get("--impl") != null? opts.get("--impl") : defaultImpl;
context.put("impl", selected_impl);
Object nb_hosts = opts.get("--nb_hosts") != null ? opts.get("--nb_hosts") : SimulatorProperties.getNbOfHostingNodes();
context.put("nb_hosts", nb_hosts);
Object nb_vms = opts.get("--nb_vms") != null ? opts.get("--nb_vms") : SimulatorProperties.getNbOfVMs();
context.put("nb_vms", nb_vms);
Object nb_service_nodes = opts.get("--nb_service_nodes") != null ? opts.get("--nb_service_nodes") : SimulatorProperties.getNbOfServiceNodes();
context.put("nb_service_nodes", nb_service_nodes);
Object nb_cpus = opts.get("--nb_cpus") != null ? opts.get("--nb_cpus") : SimulatorProperties.getNbOfCPUs();
context.put("nb_cpus", nb_cpus);
Object cpu_capacity = opts.get("--cpu_capacity") != null ? opts.get("--cpu_capacity") : SimulatorProperties.getCPUCapacity();
context.put("cpu_capacity", cpu_capacity);
Object ram_capacity = opts.get("--ram_capacity") != null ? opts.get("--ram_capacity") : SimulatorProperties.getMemoryTotal();
context.put("ram_capacity", ram_capacity);
Object netbw = opts.get("--netbw") != null ? opts.get("--netbw") : SimulatorProperties.getNetCapacity();
context.put("netbw", netbw);
Object vm_maxcpuconsumption = opts.get("--vm_maxcpuconsumption") != null ? opts.get("--vm_maxcpuconsumption") : SimulatorProperties.getVMMAXCPUConsumption();
context.put("vm_maxcpuconsumption", vm_maxcpuconsumption);
Object vm_nbcpuconsumptionslots = opts.get("--vm_nbcpuconsumptionslots") != null ? opts.get("--vm_nbcpuconsumptionslots") : SimulatorProperties.getNbOfCPUConsumptionSlots();
context.put("vm_nbcpuconsumptionslots", vm_nbcpuconsumptionslots);
Object load_mean = opts.get("--load_mean") != null ? opts.get("--load_mean") : SimulatorProperties.getMeanLoad();
context.put("load_mean", load_mean);
Object load_std = opts.get("--load_std") != null ? opts.get("--load_std") : SimulatorProperties.getStandardDeviationLoad();
context.put("load_std", load_std);
Object duration = opts.get("--duration") != null ? opts.get("--duration") : SimulatorProperties.getDuration();
context.put("duration", duration);
Object loadperiod = opts.get("--loadperiod") != null ? opts.get("--loadperiod") : SimulatorProperties.getLoadPeriod();
context.put("loadperiod", loadperiod);
Object port = opts.get("--port") != null ? opts.get("--port") : 23000;
context.put("port", port);
List<Integer> range = IntStream.range(0, Integer.parseInt(nb_hosts.toString())).boxed().collect(Collectors.toList());
context.put("node_range", range);
return context;
}
private static boolean generate_deployment_file(String output_path, Map<String, Object> context) {
Jinjava jinjava = new Jinjava ();
String templateContent = get_deployment_template(SimulatorProperties.getAlgo());
String renderedTemplate = jinjava.render(templateContent, context);
try( PrintWriter out = new PrintWriter(output_path) ){
out.println(renderedTemplate);
} catch (FileNotFoundException e) {
return false;
}
return true;
}
private static boolean generate_platform_file(String output_path, Map<String, Object> context) {
Jinjava jinjava = new Jinjava ();
String templateContent = get_template_content("./templates/cluster_platform.xml");
String renderedTemplate = jinjava.render(templateContent, context);
try( PrintWriter out = new PrintWriter(output_path) ){
out.println(renderedTemplate);
} catch (FileNotFoundException e) {
return false;
}
return true;
}
private static boolean generate_simulation_config_file(String output_path, Map<String, Object> context) {
Jinjava jinjava = new Jinjava ();
String templateContent = get_template_content("./templates/simulator.properties");
String renderedTemplate = jinjava.render(templateContent, context);
try( PrintWriter out = new PrintWriter(output_path) ){
out.println(renderedTemplate);
} catch (FileNotFoundException e) {
return false;
}
return true;
}
/**
* The Simulator launcher
*
* @param args
*/
public static void main(String[] args) throws Exception {
Map<String, Object> opts =
new Docopt(doc).withVersion("VMPlaceS 1.0").parse(args);
System.out.println(opts);
Map<String, Object> context = build_context(opts);
// Historical fix to get the internal logs of Entropy correctly
// assume SLF4J is bound to logback in the current environment
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
// print logback's internal status
StatusPrinter.print(lc);
// Save the begining time
SimulatorManager.setBeginTimeOfSimulation(System.currentTimeMillis());
// Create temporary folder
File file = new File("./tmp/config");
if (!file.exists()) {
file.mkdirs();
}
// Generate configuration file
String generated_config_path = "./tmp/config/simulator.properties";
boolean configuration_is_generated = generate_simulation_config_file(generated_config_path, context);
if (! configuration_is_generated) {
throw new Exception(String.format("Could not generate '%s'", generated_config_path));
}
SimulatorProperties.setInstance(new SimulatorProperties(generated_config_path));
// Generate platform file
String generated_platform_path = "./tmp/config/cluster_platform.xml";
boolean platform_is_generated = generate_platform_file(generated_platform_path, context);
if (! platform_is_generated) {
throw new Exception(String.format("Could not generate '%s'", generated_platform_path));
}
// Generate a deployment file
String generated_deploy_path = "./tmp/config/generated_deploy.xml";
boolean deployment_is_generated = generate_deployment_file(generated_deploy_path, context);
if (! deployment_is_generated) {
throw new Exception(String.format("Could not generate '%s'", generated_deploy_path));
}
String platformConfigurationLocation = "./tmp/config/cluster_platform.xml";
String deploymentConfigurationLocation = "./tmp/config/generated_deploy.xml";
if (opts.containsKey("--dry-run") && opts.get("--dry-run").toString().equals("true")) {
return;
}
// Init. internal values
Msg.energyInit();
String[] classpathOptions = {
platformConfigurationLocation,
deploymentConfigurationLocation,
"--cfg=cpu/optim:Full",
"--cfg=tracing:1",
"--cfg=tracing/filename:simu.trace",
"--cfg=tracing/platform:1"
};
Msg.init(classpathOptions);
Msg.createEnvironment(platformConfigurationLocation);
Msg.deployApplication(deploymentConfigurationLocation);
/* Create all VM instances and assign them on the PMs */
/* The initial deployment is based on a round robin fashion */
System.out.println("Configure simulation" + new Date().toString());
SimulatorManager.cleanLog();
// True means round robin placement.
SimulatorManager.configureHostsAndVMs(SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfServiceNodes(), SimulatorProperties.getNbOfVMs(), true);
SimulatorManager.writeCurrentConfiguration();
String algorithmName = SimulatorProperties.getAlgo();
String algorithmDetails = "{}";
if (algorithmName.equals("hierarchical")) {
int lcsRatio = SimulatorProperties.getNbOfHostingNodes() / (SimulatorProperties.getNbOfServiceNodes() -1 );
algorithmDetails = String.format("{\"assignmentAlgorithm\": \"%s\", \"lcsRatio\": %d}", AUX.assignmentAlg, lcsRatio);
}
if(algorithmName.equals("centralized"))
algorithmName = SimulatorProperties.getImplementation().substring(SimulatorProperties.getImplementation().lastIndexOf('.') + 1);
Trace.simulationDeclare(algorithmName, SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfServiceNodes(), SimulatorProperties.getNbOfVMs(), algorithmDetails);
/* Prepare TRACE variables */
System.out.println("Prepare TRACE module" + new Date().toString());
// A node can be underloaded
Trace.hostStateDeclare("PM");
Trace.hostStateDeclareValue("PM", "underloaded", "0 1 1");
Trace.hostStateDeclareValue("PM", "normal", "1 1 1");
Trace.hostStateDeclareValue("PM", "violation", "1 0 0");
Trace.hostStateDeclareValue("PM", "violation-det", "0 1 0");
Trace.hostStateDeclareValue("PM", "violation-out", "1 0 0");
Trace.hostStateDeclare("SERVICE");
Trace.hostStateDeclareValue("SERVICE", "free", "1 1 1");
Trace.hostStateDeclareValue("SERVICE", "booked", "0 0 1");
Trace.hostStateDeclareValue("SERVICE", "compute", "1 0 1");
Trace.hostStateDeclareValue("SERVICE", "reconfigure", "1 1 0");
Trace.hostStateDeclareValue("SERVICE", "migrate", "1 0 0");
Trace.hostVariableDeclare("LOAD");
Trace.hostVariableDeclare("NB_MC"); // Nb of microcosms (only for DVMS)
Trace.hostVariableDeclare("NB_MIG"); //Nb of migration
Trace.hostVariableDeclare("NB_VM"); //To follow number of VMs.
Trace.hostVariableDeclare("NB_VM_TRUE"); //To follow the true number of VMs.
Trace.hostVariableDeclare("ENERGY");
Trace.hostVariableDeclare("NB_OFF"); //Nb of hosts turned off
Trace.hostVariableDeclare("NB_ON"); //Nb of hosts turned on
for(XHost host: SimulatorManager.getSGHosts()) {
Trace.hostVariableSet(host.getName(), "NB_ON", 1);
Trace.hostVariableSet(host.getName(), "NB_OFF", 0);
}
// Turn off the hosts that we don't need
int nOff = 0;
if(SimulatorProperties.getHostsTurnoff()) {
for (XHost h : SimulatorManager.getSGHostingHosts())
if (h.getRunnings().size() <= 0) {
SimulatorManager.turnOff(h);
nOff++;
}
Msg.info(String.format("Turned off unused %d nodes before starting", nOff));
}
/* execute the simulation. */
System.out.println("Launcher: begin Msg.run()" + new Date().toString());
notify(String.format("Started %s with %d hosts and %d VMs", SimulatorProperties.getImplementation(), SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfVMs()));
Msg.run();
System.out.println("Launcher: end of Msg.run()" + new Date().toString());
Trace.close();
Msg.info("End of run");
notify(String.format("End of simulation %s", SimulatorProperties.getImplementation()));
Process.killAll(-1);
Msg.info(String.format("There are still %d processes running", Process.getCount()));
}
private static void notify(String message) {
Msg.info(message);
}
}
<file_sep>/docs/javadoc/index-files/index-18.html
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_131) on Wed Dec 13 18:37:12 CET 2017 -->
<title>T-Index</title>
<meta name="date" content="2017-12-13">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="T-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-17.html">Prev Letter</a></li>
<li><a href="index-19.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-18.html" target="_top">Frames</a></li>
<li><a href="index-18.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">K</a> <a href="index-11.html">L</a> <a href="index-12.html">M</a> <a href="index-13.html">N</a> <a href="index-14.html">O</a> <a href="index-15.html">P</a> <a href="index-16.html">R</a> <a href="index-17.html">S</a> <a href="index-18.html">T</a> <a href="index-19.html">U</a> <a href="index-20.html">V</a> <a href="index-21.html">W</a> <a href="index-22.html">X</a> <a name="I:T">
<!-- -->
</a>
<h2 class="title">T</h2>
<dl>
<dt><a href="../scheduling/hierarchical/snooze/msg/TermGLMsg.html" title="class in scheduling.hierarchical.snooze.msg"><span class="typeNameLink">TermGLMsg</span></a> - Class in <a href="../scheduling/hierarchical/snooze/msg/package-summary.html">scheduling.hierarchical.snooze.msg</a></dt>
<dd>
<div class="block">Created by sudholt on 20/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/msg/TermGLMsg.html#TermGLMsg-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">TermGLMsg(String, String, String, String)</a></span> - Constructor for class scheduling.hierarchical.snooze.msg.<a href="../scheduling/hierarchical/snooze/msg/TermGLMsg.html" title="class in scheduling.hierarchical.snooze.msg">TermGLMsg</a></dt>
<dd> </dd>
<dt><a href="../scheduling/hierarchical/snooze/msg/TermGMMsg.html" title="class in scheduling.hierarchical.snooze.msg"><span class="typeNameLink">TermGMMsg</span></a> - Class in <a href="../scheduling/hierarchical/snooze/msg/package-summary.html">scheduling.hierarchical.snooze.msg</a></dt>
<dd>
<div class="block">Created by sudholt on 04/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/msg/TermGMMsg.html#TermGMMsg-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">TermGMMsg(String, String, String, String)</a></span> - Constructor for class scheduling.hierarchical.snooze.msg.<a href="../scheduling/hierarchical/snooze/msg/TermGMMsg.html" title="class in scheduling.hierarchical.snooze.msg">TermGMMsg</a></dt>
<dd> </dd>
<dt><a href="../test/package-summary.html">test</a> - package test</dt>
<dd> </dd>
<dt><a href="../scheduling/hierarchical/snooze/Test.html" title="class in scheduling.hierarchical.snooze"><span class="typeNameLink">Test</span></a> - Class in <a href="../scheduling/hierarchical/snooze/package-summary.html">scheduling.hierarchical.snooze</a></dt>
<dd>
<div class="block">Created by sudholt on 20/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/Test.html#Test-org.simgrid.msg.Host-java.lang.String-">Test(Host, String)</a></span> - Constructor for class scheduling.hierarchical.snooze.<a href="../scheduling/hierarchical/snooze/Test.html" title="class in scheduling.hierarchical.snooze">Test</a></dt>
<dd> </dd>
<dt><a href="../test/TestA.html" title="class in test"><span class="typeNameLink">TestA</span></a> - Class in <a href="../test/package-summary.html">test</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../test/TestA.html#TestA-org.simgrid.msg.Host-java.lang.String-java.lang.String:A-">TestA(Host, String, String[])</a></span> - Constructor for class test.<a href="../test/TestA.html" title="class in test">TestA</a></dt>
<dd> </dd>
<dt><a href="../test/TestB.html" title="class in test"><span class="typeNameLink">TestB</span></a> - Class in <a href="../test/package-summary.html">test</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../test/TestB.html#TestB-org.simgrid.msg.Host-java.lang.String-java.lang.String:A-">TestB(Host, String, String[])</a></span> - Constructor for class test.<a href="../test/TestB.html" title="class in test">TestB</a></dt>
<dd> </dd>
<dt><a href="../scheduling/hierarchical/snooze/msg/TestFailGLMsg.html" title="class in scheduling.hierarchical.snooze.msg"><span class="typeNameLink">TestFailGLMsg</span></a> - Class in <a href="../scheduling/hierarchical/snooze/msg/package-summary.html">scheduling.hierarchical.snooze.msg</a></dt>
<dd>
<div class="block">Created by sudholt on 19/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/msg/TestFailGLMsg.html#TestFailGLMsg-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">TestFailGLMsg(String, String, String, String)</a></span> - Constructor for class scheduling.hierarchical.snooze.msg.<a href="../scheduling/hierarchical/snooze/msg/TestFailGLMsg.html" title="class in scheduling.hierarchical.snooze.msg">TestFailGLMsg</a></dt>
<dd> </dd>
<dt><a href="../scheduling/hierarchical/snooze/msg/TestFailGMMsg.html" title="class in scheduling.hierarchical.snooze.msg"><span class="typeNameLink">TestFailGMMsg</span></a> - Class in <a href="../scheduling/hierarchical/snooze/msg/package-summary.html">scheduling.hierarchical.snooze.msg</a></dt>
<dd>
<div class="block">Created by sudholt on 20/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/msg/TestFailGMMsg.html#TestFailGMMsg-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">TestFailGMMsg(String, String, String, String)</a></span> - Constructor for class scheduling.hierarchical.snooze.msg.<a href="../scheduling/hierarchical/snooze/msg/TestFailGMMsg.html" title="class in scheduling.hierarchical.snooze.msg">TestFailGMMsg</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/Test.html#testsToBeTerminated">testsToBeTerminated</a></span> - Variable in class scheduling.hierarchical.snooze.<a href="../scheduling/hierarchical/snooze/Test.html" title="class in scheduling.hierarchical.snooze">Test</a></dt>
<dd> </dd>
<dt><a href="../scheduling/hierarchical/snooze/ThreadPool.html" title="class in scheduling.hierarchical.snooze"><span class="typeNameLink">ThreadPool</span></a> - Class in <a href="../scheduling/hierarchical/snooze/package-summary.html">scheduling.hierarchical.snooze</a></dt>
<dd>
<div class="block">Created by sudholt on 31/07/2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.html#timeoutActor">timeoutActor</a></span> - Variable in class scheduling.distributed.dvms2.<a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.html" title="class in scheduling.distributed.dvms2">TimeoutCheckerProcess</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.TimeoutCheckerActor.html#TimeoutCheckerActor-scheduling.distributed.dvms2.SGNodeRef-configuration.XHost-scheduling.distributed.dvms2.DVMSProcess-">TimeoutCheckerActor(SGNodeRef, XHost, DVMSProcess)</a></span> - Constructor for class scheduling.distributed.dvms2.<a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.TimeoutCheckerActor.html" title="class in scheduling.distributed.dvms2">TimeoutCheckerProcess.TimeoutCheckerActor</a></dt>
<dd> </dd>
<dt><a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.html" title="class in scheduling.distributed.dvms2"><span class="typeNameLink">TimeoutCheckerProcess</span></a> - Class in <a href="../scheduling/distributed/dvms2/package-summary.html">scheduling.distributed.dvms2</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.html#TimeoutCheckerProcess-configuration.XHost-java.lang.String-int-scheduling.distributed.dvms2.SGNodeRef-scheduling.distributed.dvms2.DVMSProcess-">TimeoutCheckerProcess(XHost, String, int, SGNodeRef, DVMSProcess)</a></span> - Constructor for class scheduling.distributed.dvms2.<a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.html" title="class in scheduling.distributed.dvms2">TimeoutCheckerProcess</a></dt>
<dd> </dd>
<dt><a href="../scheduling/distributed/dvms2/TimeoutCheckerProcess.TimeoutCheckerActor.html" title="class in scheduling.distributed.dvms2"><span class="typeNameLink">TimeoutCheckerProcess.TimeoutCheckerActor</span></a> - Class in <a href="../scheduling/distributed/dvms2/package-summary.html">scheduling.distributed.dvms2</a></dt>
<dd> </dd>
<dt><a href="../scheduling/distributed/dvms2/TimeoutSnoozerProcess.html" title="class in scheduling.distributed.dvms2"><span class="typeNameLink">TimeoutSnoozerProcess</span></a> - Class in <a href="../scheduling/distributed/dvms2/package-summary.html">scheduling.distributed.dvms2</a></dt>
<dd>
<div class="block">Created by jonathan on 24/11/14.</div>
</dd>
<dt><span class="memberNameLink"><a href="../scheduling/distributed/dvms2/TimeoutSnoozerProcess.html#TimeoutSnoozerProcess-org.simgrid.msg.Host-java.lang.String-java.lang.String-int-">TimeoutSnoozerProcess(Host, String, String, int)</a></span> - Constructor for class scheduling.distributed.dvms2.<a href="../scheduling/distributed/dvms2/TimeoutSnoozerProcess.html" title="class in scheduling.distributed.dvms2">TimeoutSnoozerProcess</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../configuration/VMClasses.VMClass.html#toString--">toString()</a></span> - Method in class configuration.<a href="../configuration/VMClasses.VMClass.html" title="class in configuration">VMClasses.VMClass</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../configuration/XVM.html#toString--">toString()</a></span> - Method in class configuration.<a href="../configuration/XVM.html" title="class in configuration">XVM</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../injector/FaultEvent.html#toString--">toString()</a></span> - Method in class injector.<a href="../injector/FaultEvent.html" title="class in injector">FaultEvent</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../injector/LoadEvent.html#toString--">toString()</a></span> - Method in class injector.<a href="../injector/LoadEvent.html" title="class in injector">LoadEvent</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../injector/VMSuspendResumeEvent.html#toString--">toString()</a></span> - Method in class injector.<a href="../injector/VMSuspendResumeEvent.html" title="class in injector">VMSuspendResumeEvent</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/distributed/dvms2/SGNodeRef.html#toString--">toString()</a></span> - Method in class scheduling.distributed.dvms2.<a href="../scheduling/distributed/dvms2/SGNodeRef.html" title="class in scheduling.distributed.dvms2">SGNodeRef</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/hierarchical/snooze/msg/SnoozeMsg.html#toString--">toString()</a></span> - Method in class scheduling.hierarchical.snooze.msg.<a href="../scheduling/hierarchical/snooze/msg/SnoozeMsg.html" title="class in scheduling.hierarchical.snooze.msg">SnoozeMsg</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/Scheduler.ComputingResult.State.html#toString--">toString()</a></span> - Method in enum scheduling.<a href="../scheduling/Scheduler.ComputingResult.State.html" title="enum in scheduling">Scheduler.ComputingResult.State</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../scheduling/Scheduler.SchedulerResult.State.html#toString--">toString()</a></span> - Method in enum scheduling.<a href="../scheduling/Scheduler.SchedulerResult.State.html" title="enum in scheduling">Scheduler.SchedulerResult.State</a></dt>
<dd> </dd>
<dt><a href="../trace/package-summary.html">trace</a> - package trace</dt>
<dd> </dd>
<dt><a href="../trace/Trace.html" title="class in trace"><span class="typeNameLink">Trace</span></a> - Class in <a href="../trace/package-summary.html">trace</a></dt>
<dd>
<div class="block">Created by jonathan on 17/09/14.</div>
</dd>
<dt><span class="memberNameLink"><a href="../trace/Trace.html#Trace--">Trace()</a></span> - Constructor for class trace.<a href="../trace/Trace.html" title="class in trace">Trace</a></dt>
<dd> </dd>
<dt><a href="../trace/TraceImpl.html" title="class in trace"><span class="typeNameLink">TraceImpl</span></a> - Class in <a href="../trace/package-summary.html">trace</a></dt>
<dd>
<div class="block">Copyright 2012-2013-2014.</div>
</dd>
<dt><span class="memberNameLink"><a href="../trace/TraceImpl.html#TraceImpl--">TraceImpl()</a></span> - Constructor for class trace.<a href="../trace/TraceImpl.html" title="class in trace">TraceImpl</a></dt>
<dd> </dd>
<dt><a href="../trace/TraceTest.html" title="class in trace"><span class="typeNameLink">TraceTest</span></a> - Class in <a href="../trace/package-summary.html">trace</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../trace/TraceTest.html#TraceTest--">TraceTest()</a></span> - Constructor for class trace.<a href="../trace/TraceTest.html" title="class in trace">TraceTest</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../configuration/XHost.html#turnOff--">turnOff()</a></span> - Method in class configuration.<a href="../configuration/XHost.html" title="class in configuration">XHost</a></dt>
<dd>
<div class="block">turnOff a host (the host should be off, otherwise nothing happens)</div>
</dd>
<dt><span class="memberNameLink"><a href="../simulation/SimulatorManager.html#turnOff-configuration.XHost-">turnOff(XHost)</a></span> - Static method in class simulation.<a href="../simulation/SimulatorManager.html" title="class in simulation">SimulatorManager</a></dt>
<dd>
<div class="block">Turn off the XHost host</div>
</dd>
<dt><span class="memberNameLink"><a href="../configuration/XHost.html#turnOn--">turnOn()</a></span> - Method in class configuration.<a href="../configuration/XHost.html" title="class in configuration">XHost</a></dt>
<dd>
<div class="block">Turn on a host (the host should have been turn off previously), otherwise nothing happens</div>
</dd>
<dt><span class="memberNameLink"><a href="../simulation/SimulatorManager.html#turnOn-configuration.XHost-">turnOn(XHost)</a></span> - Static method in class simulation.<a href="../simulation/SimulatorManager.html" title="class in simulation">SimulatorManager</a></dt>
<dd>
<div class="block">Turn on the XHost host</div>
</dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">D</a> <a href="index-5.html">E</a> <a href="index-6.html">F</a> <a href="index-7.html">G</a> <a href="index-8.html">H</a> <a href="index-9.html">I</a> <a href="index-10.html">K</a> <a href="index-11.html">L</a> <a href="index-12.html">M</a> <a href="index-13.html">N</a> <a href="index-14.html">O</a> <a href="index-15.html">P</a> <a href="index-16.html">R</a> <a href="index-17.html">S</a> <a href="index-18.html">T</a> <a href="index-19.html">U</a> <a href="index-20.html">V</a> <a href="index-21.html">W</a> <a href="index-22.html">X</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-17.html">Prev Letter</a></li>
<li><a href="index-19.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-18.html" target="_top">Frames</a></li>
<li><a href="index-18.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
<file_sep>/src/main/java/scheduling/centralized/ffd/OptimisticFirstFitDecreased.java
package scheduling.centralized.ffd;
import configuration.SimulatorProperties;
import configuration.XHost;
import configuration.XVM;
import org.simgrid.msg.Msg;
import simulation.SimulatorManager;
import java.util.*;
public class OptimisticFirstFitDecreased extends FirstFitDecreased {
public OptimisticFirstFitDecreased(Collection<XHost> hosts) {
this(hosts, new Random(SimulatorProperties.getSeed()).nextInt());
}
public OptimisticFirstFitDecreased(Collection<XHost> hosts, Integer id) {
super(hosts, id);
}
@Override
protected void manageOverloadedHost(List<XHost> overloadedHosts, ComputingResult result) {
TreeSet<XVM> toSchedule = new TreeSet<>(new XVMComparator(true, useLoad));
Map<XVM, XHost> sources = new HashMap<>();
for(XHost host: SimulatorManager.getSGHostingHosts()) {
predictedCPUDemand.put(host, host.getCPUDemand());
predictedMemDemand.put(host, host.getMemDemand());
}
// Remove all VMs from the overloaded hosts
for(XHost host: overloadedHosts) {
for(XVM vm: host.getRunnings()) {
toSchedule.add(vm);
sources.put(vm, host);
}
predictedCPUDemand.put(host, 0D);
predictedMemDemand.put(host, 0);
}
for(XVM vm: toSchedule) {
XHost dest = null;
// Try find a new host for the VMs
for(XHost host: SimulatorManager.getSGHostingHosts()) {
if(host.getCPUCapacity() >= predictedCPUDemand.get(host) + vm.getCPUDemand() &&
host.getMemSize() >= predictedMemDemand.get(host) + vm.getMemSize()) {
dest = host;
break;
}
}
if(dest == null) {
result.state = ComputingResult.State.RECONFIGURATION_FAILED;
return;
}
if(predictedCPUDemand.get(dest) >= dest.getCPUCapacity())
System.out.println("!!");
// Schedule the migration
predictedCPUDemand.put(dest, predictedCPUDemand.get(dest) + vm.getCPUDemand());
predictedMemDemand.put(dest, predictedMemDemand.get(dest) + vm.getMemSize());
XHost source = sources.get(vm);
if(!source.getName().equals(dest.getName())) {
migrations.add(new Migration(vm, source, dest));
}
}
}
}
<file_sep>/src/main/java/injector/VMSuspendResumeEvent.java
package injector;
import configuration.XVM;
import org.simgrid.msg.*;
import simulation.SimulatorManager;
import trace.Trace;
public class VMSuspendResumeEvent implements InjectorEvent{
private long id ;
private double time;
private XVM vm;
private boolean state; // suspend = 0 ; resume = 1
public VMSuspendResumeEvent(long id, double time, XVM vm, boolean state) {
this.id = id;
this.time = time;
this.vm = vm;
this.state = state ;
}
public long getId(){
return this.id;
}
public double getTime() {
return this.time;
}
public XVM getVM(){
return this.vm;
}
public void play(){
if (this.state) {
SimulatorManager.resumeVM(vm.getName(), vm.getLocation().getName());
Trace.hostVariableAdd(SimulatorManager.getInjectorNodeName(), "NB_VM_TRUE", 1);
} else {
SimulatorManager.suspendVM(vm.getName(), vm.getLocation().getName());
Trace.hostVariableSub(SimulatorManager.getInjectorNodeName(), "NB_VM_TRUE", 1);
}
}
public String toString(){
return this.getTime() + "/" + this.getVM().getName() + "/" + this.state;
}
public boolean getState() {
return state;
}
}
<file_sep>/README.md
# VMPlaceS
[](https://travis-ci.org/BeyondTheClouds/VMPlaceS)
This repository contains the source of a dedicated framework to evaluate and compare VM placement algorithms.
For more details, a deeper scientific description of the project can be found [here](http://beyondtheclouds.github.io/VMPlaceS).
## Requirements
* sbt
* java 8 : **openjdk-8**
* python
* r language (optional: visualisation)
## Installation
### 1- Simgrid
#### 1.1- Get Simgrid
```
git clone git://scm.gforge.inria.fr/simgrid/simgrid.git
```
#### 1.2- Compile and install Simgrid
In the simgrid folder, run the following:
```
cmake -Denable_tracing=ON -Denable_documentation=OFF -Denable_java=ON -Denable_smpi=OFF .
```
and then
```
make
```
please note that you can compile the src code faster by using -j argument of make command (man make for further information)
```
make install
```
file named **simgrid.jar**, containing java bindings to simgrid should be located in the simgrid folder:
```
jonathan@artoo ~/simgrid (master)> ls -lh *.jar
-rw-r--r-- 1 jonathan staff 43K Nov 4 17:28 simgrid.jar
```
This file will be used during step **2.2**.
### 2- SBT
#### 2.1- Installation of sbt:
Please follow the instructions corresponding to your operating system [here](http://www.scala-sbt.org/release/tutorial/Setup.html).
#### 2.2- Installation of dependencies
Inside the project source folder, run the following:
```
$ sbt update
```
and then copy the **simgrid.jar** from **step 1.2** in the lib folder
### 3- Running the program
You can decide which algorithm you want to test (see [here](http://github.com/BeyondTheClouds/VMPlaceS/blob/master/ALGORITHMS.md)).
#### 3.1- IntelliJ
##### 3.1.1- Make the project compatible with IntelliJ
Inside the project source folder, run the following:
```
$ sbt gen-idea
```
and open the folder in intelliJ: a fully configured project has been generated.
##### 3.1.2- Set the configuration
Click on **Run > Edit configurations...** and then the **plus (+)** button in the top left corner. Choose **Application**.
Name it as you wish, for instance _VMPlaceS_, and set the options:
Option Type | Value
------------------------|-------------
Main class | simulation.Main
VM_OPTIONS | -Xmx4G -d64 -Dlogback.configurationFile=config/logback.xml
PROGRAM_ARGUMENTS | ./config/cluster_platform.xml ./config/generated_deploy.xml --cfg=cpu/optim:Full --cfg=tracing:1 --cfg=tracing/filename:simu.trace --cfg=tracing/platform:1
Use classpath of module | VMPlaceS
##### 3.1.3- Run the program
Click **Run > Run 'VMPlaceS'**.
#### 3.2- Command line
##### 3.2.1- Set the environment variables
Environment var | Value
-------------------|-------------
VM_OPTIONS | -Xmx4G -d64 -Dlogback.configurationFile=config/logback.xml
PROGRAM_ARGUMENTS | ./config/cluster_platform.xml ./config/generated_deploy.xml --cfg=cpu/optim:Full --cfg=tracing:1 --cfg=tracing/filename:simu.trace --cfg=tracing/platform:1
##### 3.2.2- Create the jar
Inside the project source folder, run the following command:
```
sbt assembly
```
it results in the creation of a **fat-jar** named **simulation.jar** in the target folder:
```
jonathan@artoo ~/D/w/VMPlaceS (master)> ls -lh target/*.jar
-rw-r--r-- 1 jonathan staff 12M Dec 18 14:21 target/simulation.jar
```
This jar contains all dependencies and can be run with the java command. Please not that the jar must be located in a folder that contains the **config** folder.
##### 3.2.3- Run the program
Thus it is possible to run the jar with the following command:
```
java -jar $VM_OPTIONS simulation.Main $PROGRAM_ARGUMENTS
```
### 4- Running experiments on grid'5000
We developed some scripts to ease the conduction of experiments on grid'5000. These scripts are located on the Rennes site, in the folder /home/jpastor.
Further documentation will arrive later: in case you plan to use it now, do not hesitate to contact us!
### 5- Running energy comparison of centralized algorithms
Run all the algorithms sequentially, turning off/not turning off the unused hosts:
```
$ rm -f energy.dat && sbt assembly && ./run_all.sh
```
Make the plots:
```
$ visu/energy_plot.py run_all.log energy.dat
```
<file_sep>/src/main/java/simulation/SimulatorManager.java
/**
* Copyright 2012-2013-2014. The SimGrid Team. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package.
*
* This class aims at controlling the interactions between the different components of the injector simulator.
* It is mainly composed of static methods. Although it is rather ugly, this is the direct way to make a kind of
* singleton ;)
*
* @author <EMAIL>
* @contributor <EMAIL>
*/
package simulation;
import configuration.*;
import org.simgrid.msg.Host;
import org.simgrid.msg.HostNotFoundException;
import org.simgrid.msg.Msg;
import org.simgrid.msg.MsgException;
import scheduling.Scheduler;
import scheduling.hierarchical.snooze.LocalController;
import scheduling.hierarchical.snooze.Logger;
import trace.Trace;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.*;
/**
* Created with IntelliJ IDEA.
* User: alebre
* Date: 14/01/14
* Time: 10:49
* To change this template use File | Settings | File Templates.
*/
public class SimulatorManager {
public static int iSuspend = 0;
public static int iResume = 0;
/**
* Stupid variable to monitor the duration of the simulation
*/
private static double beginTimeOfSimulation = -1;
/**
* Stupid variable to monitor the duration of the simulation
*/
private static double endTimeOfSimulation = -1;
/**
* The list of XVMs that are considered as off (i.e. the hosting machine is off)
* @see configuration.XVM
*/
private static HashMap<String,XVM> sgVMsOff = null;
/**
* The list of XVMs that run
* @see configuration.XVM
*/
private static HashMap<String,XVM> sgVMsOn = null;
/**
* The list of XVMs that should be suspend (right now, it is impossible to suspend VMs that are currently migrated)
* This collection is used to suspend VMs after the completion of the migration process.
* @see configuration.XVM
*/
public static HashMap<String,XVM> sgVMsToSuspend = null;
/**
* The list of XHosts that are off
* @see configuration.XHost
*/
private static HashMap<String,XHost> sgHostsOff= null;
/**
* The list of Xhosts that are running
*/
private static HashMap<String,XHost> sgHostsOn= null;
/**
* The list of all XHosts
* @see configuration.XHost
*/
private static HashMap<String,XHost> sgHostingHosts= null;
/**
* The list of Xhosts that are running
*/
private static HashMap<String, XHost> sgServiceHosts= null;
/**
* Just a stupid sorted table to have a reference toward each host and vm
* Used by the injector when generating the different event queues.
*/
private static XHost[] xhosts = null;
private static XVM[] xvms = null;
/**
* Average CPU demand of the infrastructure (just a hack to avoid to compute the CPUDemand each time (computing the CPU demand is O(n)
*/
// TODO Adrien the currentCPUDemand is currently not correctly assigned (this should be done in the update function)
private static double currentCPUDemand = 0;
/**
* The previous energy consumption
*/
private static Map<XHost, Double> lastEnergy = new HashMap<>();
/**
* Reference toward the scheduler
*/
private static boolean isSchedulerActive;
public static boolean isSchedulerActive() {
return isSchedulerActive;
}
public static void setSchedulerActive(boolean val) {
isSchedulerActive=val;
}
/**
* Set the scheduler
*/
/**
* When the injection is complete, we turn the endOfInjection boolean to true and kill the running daemon inside each VM
*/
public static void setEndOfInjection() {
endTimeOfSimulation = System.currentTimeMillis();
}
public static void finalizeSimulation(){
Msg.info(String.format("Hosts up: %d/%d", sgHostsOn.size(), getSGHosts().size()));
Msg.info(String.format("VMs up: %d/%d", sgVMsOn.size(), getSGVMs().size()));
for (XHost host : SimulatorManager.getSGHosts()) {
Msg.info(host.getName() + " has been turned off "+host.getTurnOffNb()+" times and violated "+host.getNbOfViolations());
}
// Kill all VMs daemons in order to finalize the simulation correctly
for (XVM vm : SimulatorManager.getSGVMs()) {
Msg.info(vm.getName() + " load changes: "+vm.getNbOfLoadChanges() + "/ migrated: "+vm.getNbOfMigrations());
if(vm.isRunning()) {
Msg.info("VM is running");
Msg.info("VM is migrating: " + vm.isMigrating());
Msg.info("Daemon is suspended: " + vm.getDaemon().isSuspended());
vm.getDaemon().kill();
}
else {
Msg.info("VM is suspended");
}
}
Msg.info("Duration of the simulation in ms: "+(endTimeOfSimulation - beginTimeOfSimulation));
Msg.info(Daemon.n_daemon + " daemons are still running");
}
/**
* @return whether the injection is completed or not
*/
public static boolean isEndOfInjection(){
return (endTimeOfSimulation != -1);
}
/**
* @return the collection of XVMs: all VMs, the running and the ones that are considered as dead
* (i.e. hosted on hosts that have been turned off)
*/
public static Collection<XVM> getSGVMs(){
LinkedList<XVM> tmp = new LinkedList<>(sgVMsOn.values());
tmp.addAll(sgVMsOff.values());
return tmp;
}
/**
* @return the collection of running XVMs
*/
public static Collection<XVM> getSGVMsOn(){
return sgVMsOn.values();
}
/**
* @return the collection of the XVMs considered as dead
*/
public static Collection<XVM> getSGVMsOff(){
return sgVMsOff.values();
}
/**
* @return the collection of XHosts (i.e. the hosts that composed the infrastructure).
* Please note that the returned collection is not sorted. If you need a sorted structure, you should call getSGHostsToArray() that returns an simple array
*/
public static Collection<XHost> getSGHosts(){
LinkedList<XHost> tmp = new LinkedList<XHost>(sgHostingHosts.values());
tmp.addAll(sgServiceHosts.values());
return tmp;
}
/**
* @return the collection of XHosts (i.e. the hosts that composed the infrastructure).
* Please note that the returned collection is not sorted. If you need a sorted structure, you should call getSGHosts() that returns an simple array
*/
public static XHost[] getSGHostsToArray(){
return xhosts;
}
public static XVM[] getSGVMsToArray() {
return xvms;
}
/**
* @return the collection of XHosts that have been declared as hosting nodes (i.e. that can host VMs)
* Please note that all HostingHosts are returned (without making any distinctions between on and off hosts)
*/
public static Collection<XHost> getSGHostingHosts(){
return sgHostingHosts.values();
}
/**
* @return the collection of XHosts that have been declared as hosting nodes (i.e. that can host VMs) and that are turned on.
*/
public static Collection<XHost> getSGTurnOnHostingHosts() {
LinkedList<XHost> tmp = new LinkedList<XHost>();
for (XHost h: sgHostingHosts.values())
if (!h.isOff())
tmp.add(h);
return tmp;
}
public static Collection<XHost> getSGTurnOffHostingHosts() {
LinkedList<XHost> tmp = new LinkedList<XHost>();
for(XHost h: sgHostingHosts.values())
if(h.isOff())
tmp.add(h);
return tmp;
}
/**
* @return the collection of XHosts that have been declared as services nodes (i.e. that cannot host VMs)
*/
public static Collection<XHost> getSGServiceHosts(){
return sgServiceHosts.values();
}
/**
* @return the name of the service node (generally node0, if you do not change the first part of the main regarding the generation
* of the deployment file).
* If you change it, please note that you should then update the getInjectorNodeName code.
*/
public static String getInjectorNodeName() {
return "node"+(SimulatorProperties.getNbOfHostingNodes()+SimulatorProperties.getNbOfServiceNodes());
}
/**
* For each MSG host (but the service node), the function creates an associated XHost.
* As a reminder, the XHost class extends the Host one by aggregation.
* At the end, all created hosts have been inserted into the sgHosts collection (see getSGHostingHosts function)
* @param nbOfHostingHosts the number of hosts that will be used to host VMs
* @param nbOfServiceHosts the number of hosts that will be used to host services
*/
public static void initHosts(int nbOfHostingHosts, int nbOfServiceHosts){
// Since SG does not make any distinction between Host and Virtual Host (VMs and Hosts belong to the Host SG table)
// we should retrieve first the real host in a separated table
// Please remind that node0 does not host VMs (it is a service node) and hence, it is managed separately (getInjectorNodeName())
sgHostsOn = new HashMap<String,XHost>();
sgHostsOff = new HashMap<String,XHost>();
sgHostingHosts = new HashMap<String,XHost>();
sgServiceHosts = new HashMap<String,XHost>();
xhosts = new XHost[nbOfHostingHosts+nbOfServiceHosts];
XHost xtmp;
// Hosting hosts
for(int i = 0 ; i < nbOfHostingHosts ; i ++){
try {
Host tmp = Host.getByName("node" + i);
// The SimulatorProperties.getCPUCapacity returns the value indicated by nodes.cpucapacity in the simulator.properties file
xtmp = new XHost (tmp, SimulatorProperties.getMemoryTotal(), SimulatorProperties.getNbOfCPUs(), SimulatorProperties.getCPUCapacity(), SimulatorProperties.getNetCapacity(), "127.0.0.1");
xtmp.turnOn();
sgHostsOn.put("node"+i, xtmp);
sgHostingHosts.put("node" + i, xtmp);
xhosts[i]=xtmp;
} catch (HostNotFoundException e) {
e.printStackTrace();
}
}
//Service hosts
for(int i = nbOfHostingHosts ; i < nbOfHostingHosts+nbOfServiceHosts ; i ++){
try {
Host tmp = Host.getByName("node" + i);
// The SimulatorProperties.getCPUCapacity returns the value indicated by nodes.cpucapacity in the simulator.properties file
xtmp = new XHost (tmp, SimulatorProperties.getMemoryTotal(), SimulatorProperties.getNbOfCPUs(), SimulatorProperties.getCPUCapacity(), SimulatorProperties.getNetCapacity(), "127.0.0.1");
xtmp.turnOn();
sgHostsOn.put("node" + i, xtmp);
sgServiceHosts.put("node" + i, xtmp);
xhosts[i]=xtmp;
} catch (HostNotFoundException e) {
e.printStackTrace();
}
}
}
/**
* Create and assign the VMs on the different hosts.
* For the moment, the initial placement follows a simple round robin strategy
* The algorithm fill the first host with the n first VMs until it reaches either the memory limit, then it switches to the second host and so on.
* Note that if the ''balance'' mode is enabled then the initial placement will put the same number of VMs on each node.
* The function can crash if there are two many VMs for the physical resources.
* At the end the collection SimulatorManager.getSGVMs() is filled.
* @param nbOfHostingHosts the number of the hosts composing the infrastructure
* @param nbOfServiceHosts the number of the hosts composing the infrastructure
* @param nbOfVMs the number of the VMs to instanciate
*/
public static void configureHostsAndVMs(int nbOfHostingHosts, int nbOfServiceHosts, int nbOfVMs, boolean balance) {
int nodeIndex = 0;
int[] nodeMemCons = new int[nbOfHostingHosts];
int[] nodeCpuCons = new int[nbOfHostingHosts];
int vmIndex= 0;
int nbVMOnNode;
Random r = new Random(SimulatorProperties.getSeed());
int nbOfVMClasses = VMClasses.CLASSES.size();
VMClasses.VMClass vmClass;
initHosts(nbOfHostingHosts, nbOfServiceHosts);
sgVMsOn = new HashMap<String,XVM>();
sgVMsOff = new HashMap<String,XVM>();
sgVMsToSuspend = new HashMap<String,XVM>();
xvms = new XVM[nbOfVMs];
XVM sgVMTmp;
Iterator<XHost> sgHostsIterator = SimulatorManager.getSGHostingHosts().iterator();
XHost sgHostTmp = sgHostsIterator.next();
nodeMemCons[nodeIndex]=0;
nodeCpuCons[nodeIndex]=0;
nbVMOnNode =0;
//Add VMs to each node, preventing memory over provisioning
while(vmIndex < nbOfVMs){
// Select the class for the VM
vmClass = VMClasses.CLASSES.get(r.nextInt(nbOfVMClasses));
//Check whether we can put this VM on the current node if not get the next one
//The first condition controls the memory over provisioning issue while the second one enables to switch to
// the next node if the ''balance'' mode is enabled.
// If there is no more nodes, then we got an exception and the simulator.properties should be modified.
double vmsPerNodeRatio = ((double) nbOfVMs)/nbOfHostingHosts;
try {
while ((nodeMemCons[nodeIndex] + vmClass.getMemSize() > sgHostTmp.getMemSize()
|| nodeCpuCons[nodeIndex] + SimulatorProperties.getMeanLoad() > sgHostTmp.getCPUCapacity())
|| (balance && nbVMOnNode >= vmsPerNodeRatio)) {
sgHostTmp = sgHostsIterator.next();
nodeMemCons[++nodeIndex] = 0;
nodeCpuCons[nodeIndex] = 0;
nbVMOnNode = 0;
}
} catch(NoSuchElementException ex){
System.err.println("There is not enough memory on the physical hosts to start all VMs");
System.err.println(String.format("Number of hosts: %d", nbOfHostingHosts));
System.err.println(String.format("Number of VMs: %d", nbOfVMs));
System.err.println(String.format("VM placed: %d", vmIndex));
System.err.println("(Please fix simulator.properties parameters and you should dive in the SimulatorManager.configureHostsAndVMs() function");
System.exit(1);
}
// Creation of the VM
sgVMTmp = new XVM(sgHostTmp, "vm-" + vmIndex,
vmClass.getNbOfCPUs(), vmClass.getMemSize(), vmClass.getNetBW(), null, -1, vmClass.getMigNetBW(), vmClass.getMemIntensity());
sgVMsOn.put("vm-"+vmIndex, sgVMTmp);
xvms[vmIndex] = sgVMTmp;
vmIndex++;
Msg.info(String.format("vm: %s, %d, %d, %s",
sgVMTmp.getName(),
vmClass.getMemSize(),
vmClass.getNbOfCPUs(),
"NO IPs defined"
));
Msg.info("vm " + sgVMTmp.getName() + " is " + vmClass.getName() + ", dp is " + vmClass.getMemIntensity());
// Assign the new VM to the current host.
sgHostTmp.start(sgVMTmp); // When the VM starts, its getCPUDemand equals 0
nbVMOnNode ++;
nodeMemCons[nodeIndex] += sgVMTmp.getMemSize();
nodeCpuCons[nodeIndex] += SimulatorProperties.getMeanLoad();
}
}
/**
* write the current configuration in the ''logs/simulatorManager/'' directory
*/
public static void writeCurrentConfiguration(){
try {
File file = new File("logs/simulatorManager/conf-"+ System.currentTimeMillis() + ".txt");
file.getParentFile().mkdirs();
BufferedWriter bw = new BufferedWriter(new FileWriter(file));
for (XHost h: SimulatorManager.getSGHostingHosts()){
bw.write(h.getName()+":");
for (XVM vm: h.getRunnings()){
bw.write(" "+vm.getName());
}
bw.write("\n");
bw.flush();
}
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Remove all logs from the previous run
*/
public static void cleanLog(){
try {
Runtime.getRuntime().exec("rm -rf ./logs/simulatorManager");
Runtime.getRuntime().exec("rm -rf ./logs/entropy");
Runtime.getRuntime().exec("rm -rf ./logs/entropy.log");
Runtime.getRuntime().exec("rm -rf ./logs/btrplace");
Runtime.getRuntime().exec("rm -rf ./logs/btrplace.log");
Runtime.getRuntime().exec("rm -rf ./logs/ffd");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* @return whether the current placement is viable or not (i.e. if every VM gets its expectations).
* Please note that we are considering only the hosts that are running.
* Complexity O(n)
*/
public static boolean isViable() {
for (XHost h: sgHostsOn.values()){
if(!h.isViable())
return false;
}
return true;
}
/**
* @return the average expected load at a particular moment (i.e. average load of each node)
* Please note that we are considering only the hosts hosting VMs and that are up.
*/
public static double computeCPUDemand() {
double globalCpuDemand = 0.0;
int globalCpuCapacity = 0;
for(XHost h: sgHostingHosts.values()){
if(h.isOn()) {
globalCpuDemand += h.getCPUDemand();
globalCpuCapacity += h.getCPUCapacity();
}
}
return 100 * globalCpuDemand / globalCpuCapacity;
}
public static double getCPUDemand(){
// TODO Adrien, maintain the current CPU Demand in order to avoid O(n)
//return currentCPUDemand;
return computeCPUDemand();
}
/**
* @return the number of hosts that are active (i.e. that host at least one VM)
* Complexity O(n)
*/
public static int getNbOfUsedHosts() {
int i=0;
for (XHost h: sgHostsOn.values()){
if(h.getNbVMs()>0)
i++;
}
return i;
}
/**
* Return the XHost entitled ''name'', if not return null (please note that the search is performed by considering
* all hosts (i.e. On/Off and Hosting/Service ones)
* @param name the name of the host requested
* @return the corresponding XHost instance (null if there is no corresponding host in the sgHosts collection)
*/
public static XHost getXHostByName(String name) {
XHost tmp = sgHostingHosts.get(name);
if (tmp == null)
tmp = sgServiceHosts.get(name);
return tmp;
}
/**
* Return the XVM entitled ''name'', if not return null please note that the search is performed by considering
* all VMs (i.e. event the off ones)
* @param name the name of the vm requested
* @return the corresponding XVM instance (null if there is no corresponding vm in the sgVMs collection)
*/
public static XVM getXVMByName(String name) {
XVM tmp = sgVMsOn.get(name);
if (tmp == null)
tmp = sgVMsOff.get(name);
if(tmp == null)
tmp = sgVMsToSuspend.get(name);
if(tmp == null) {
Msg.error("No ");
}
return tmp;
}
/**
* Change the load of a VM.
* Please note that we do not detect violations on off hosts (i.e. if the nodes that hosts the VM is off, we change
* the load of the vm for consistency reasons but we do not consider the violation that may state from this change).
* @param sgVM the VM that should be updated
* @param load the new expected load
*/
public static void updateVM(XVM sgVM, double load) {
if(sgVM.isRunning()) {
XHost tmpHost = sgVM.getLocation();
boolean previouslyViable = tmpHost.isViable();
// A simple hack to avoid computing on-the-fly the CPUDemand of each host
double vmPreviousLoad = sgVM.getCPUDemand();
double hostPreviousLoad = tmpHost.getCPUDemand();
// Msg.info("Previous Load was" + hostPreviousLoad);
tmpHost.setCPUDemand(hostPreviousLoad - vmPreviousLoad + load);
// Msg.info("New Load is "+ tmpHost.getCPUDemand());
sgVM.setLoad(load);
// If the node is off, we change the VM load but we do not consider it for possible violation and do not update
// neither the global load of the node nor the global load of the cluster.
// Violations are detected only on running node
if (!tmpHost.isOff()) {
// Msg.info("Current getCPUDemand "+SimulatorManager.getCPUDemand()+"\n");
if (previouslyViable && (!tmpHost.isViable())) {
Msg.info("STARTING VIOLATION ON " + tmpHost.getName() + "\n");
tmpHost.incViolation();
Trace.hostSetState(tmpHost.getName(), "PM", "violation");
} else if ((!previouslyViable) && (tmpHost.isViable())) {
Msg.info("ENDING VIOLATION ON " + tmpHost.getName() + "\n");
Trace.hostSetState(tmpHost.getName(), "PM", "normal");
}
// else Do nothing the state does not change.
// Update getCPUDemand of the host
Trace.hostVariableSet(tmpHost.getName(), "LOAD", tmpHost.getCPUDemand());
// TODO this is costly O(HOST_NB) - SHOULD BE FIXED
//Update global getCPUDemand
Trace.hostVariableSet(SimulatorManager.getInjectorNodeName(), "LOAD", SimulatorManager.getCPUDemand());
}
double energy = tmpHost.getSGHost().getConsumedEnergy();
if (lastEnergy.containsKey(tmpHost))
energy -= lastEnergy.get(tmpHost);
Trace.hostVariableSet(tmpHost.getName(), "ENERGY", energy);
lastEnergy.put(tmpHost, tmpHost.getSGHost().getConsumedEnergy());
} else { // VM is suspended: just update the load for consistency reason (i.e. when the VM will be resumed, we should assign the expected load
sgVM.setLoad(load);
}
}
public static boolean willItBeViableWith(XVM sgVM, int load){
XHost tmpHost = sgVM.getLocation();
double hostPreviousLoad = tmpHost.getCPUDemand();
double vmPreviousLoad = sgVM.getCPUDemand();
return ((hostPreviousLoad-vmPreviousLoad+load) <= tmpHost.getCPUCapacity());
}
/**
* Turn on the XHost host
* @param host the host to turn on
*/
public static void turnOn(XHost host) {
String name = host.getName();
if(host.isOff()) {
Msg.info("Turn on node "+name);
host.turnOn();
Trace.hostVariableAdd(host.getName(), "NB_ON", 1);
sgHostsOff.remove(name);
sgHostsOn.put(name, host);
// If your turn on an hosting node, then update the LOAD
if(sgHostingHosts.containsKey(name)) {
for (XVM vm: host.getRunnings()){
Msg.info("TURNING NODE "+name+"ON - ADD VM "+vm.getName());
sgVMsOff.remove(vm.getName());
sgVMsOn.put(vm.getName(), vm);
}
// Update getCPUDemand of the host
Trace.hostVariableSet(name, "LOAD", host.getCPUDemand());
// TODO test whether the node is violated or not (this can occur)
//Update global getCPUDemand
Trace.hostVariableSet(SimulatorManager.getInjectorNodeName(), "LOAD", SimulatorManager.getCPUDemand());
}
if (SimulatorProperties.getAlgo().equals("hierarchical")) {
int hostNo = Integer.parseInt(name.replaceAll("\\D", ""));
if (hostNo < SimulatorProperties.getNbOfHostingNodes()) {
try {
String[] lcArgs = new String[]{name, "dynLocalController-" + hostNo};
LocalController lc =
new LocalController(host.getSGHost(), "dynLocalController-" + hostNo, lcArgs);
lc.start();
Logger.info("[SimulatorManager.turnOn] Dyn. LC added: " + lcArgs[1]);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} else{
Msg.info("Weird... you are asking to turn on a host that is already on !");
}
}
/**
* Turn off the XHost host
* @param host the host to turn off
*/
public static void turnOff(XHost host) {
if(host.isOnGoingMigration()){
Msg.info("WARNING = WE ARE NOT GOING TO TURN OFF HOST "+host.getName()+" BECAUSE THERE IS AN ON-GOING MIGRATION");
return;
}
if(!host.isOff()) {
Msg.info("Turn off "+host.getName());
// if this is an hosting host, then you should deal with VM aspects
if(sgHostingHosts.containsKey(host.getName())) {
// First remove all VMs hosted on the node from the global collection
// The VMs are still referenced on the node
for (XVM vm : host.getRunnings()) {
Msg.info("TURNING NODE "+host.getName()+"OFF - REMOVE VM "+vm.getName());
sgVMsOn.remove(vm.getName());
sgVMsOff.put(vm.getName(), vm);
}
// Update getCPUDemand of the host
Trace.hostVariableSet(host.getName(), "LOAD", 0);
// TODO if the node is violated then it is no more violated
//Update global getCPUDemand
Trace.hostVariableSet(SimulatorManager.getInjectorNodeName(), "LOAD", SimulatorManager.getCPUDemand());
}
int previousCount = org.simgrid.msg.Process.getCount();
// Turn the node off
host.turnOff();
// Finally, remove the node from the collection of running host and add it to the collection of off ones
sgHostsOn.remove(host.getName());
sgHostsOff.put(host.getName(), host);
// Msg.info("Nb of remaining processes on " + host.getName() + ": " + (previousCount - org.simgrid.msg.Process.getCount()));
Trace.hostVariableAdd(host.getName(), "NB_OFF", 1);
}
else{
Msg.info("Weird... you are asking to turn off a host that is already off !");
}
}
private static int getProcessCount(XHost host) {
Msg.info ("TODO");
System.exit(-1);
return -1;
}
/**
* Stupid variable to monitor the duration of the simulation
*/
public static void setBeginTimeOfSimulation(double beginTimeOfSimulation) {
SimulatorManager.beginTimeOfSimulation = beginTimeOfSimulation;
}
/**
* Stupid variable to monitor the duration of the simulation
*/
public static void setEndTimeOfSimulation(double endTimeOfSimulation) {
SimulatorManager.endTimeOfSimulation = endTimeOfSimulation;
}
/**
* Stupid variable to monitor the duration of the simulation
*/
public static double getSimulationDuration() {
return (endTimeOfSimulation != -1) ? endTimeOfSimulation - beginTimeOfSimulation : endTimeOfSimulation;
}
public static void writeEnergy(String logPath) {
Double energy = 0D;
for(XHost h: SimulatorManager.getSGHosts())
energy += h.getSGHost().getConsumedEnergy();
try {
String message = null;
if(SimulatorProperties.getAlgo().equals("centralized")) {
String implem = SimulatorProperties.getImplementation();
implem = implem.substring(implem.lastIndexOf('.') + 1, implem.length());
message = String.format(Locale.US, "%d %s %s %b %f\n", SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getAlgo(), implem, SimulatorProperties.getHostsTurnoff(), energy);
}
else
message = String.format(Locale.US, "%d %s %b %f\n", SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getAlgo(), SimulatorProperties.getHostsTurnoff(), energy);
Files.write(Paths.get(logPath), message.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
} catch (IOException e) {
e.printStackTrace();
}
}
public static boolean suspendVM(String vmName, String hostName){
boolean correctlyCompleted= true;
Msg.info("Suspending VM " + vmName + " on " + hostName);
if (vmName != null) {
XVM vm = SimulatorManager.getXVMByName(vmName);
XHost host = SimulatorManager.getXHostByName(hostName);
if (vm != null) {
double timeStartingSuspension = Msg.getClock();
Trace.hostPushState(vmName, "SERVICE", "suspend", String.format("{\"vm_name\": \"%s\", \"on\": \"%s\"}", vmName, hostName));
boolean previouslyViable = host.isViable();
// 0 if success, 1 should be postponed, -1 if failure, -2 if already suspended
int res = host.suspendVM(vm);
Trace.hostPopState(vmName, "SERVICE", String.format("{\"vm_name\": \"%s\", \"state\": %d}", vmName, res));
double suspensionDuration = Msg.getClock() - timeStartingSuspension;
switch (res) {
case 0:
// Msg.info("End of suspension operation of VM " + vmName + " on " + hostName);
if (!previouslyViable && host.isViable()){
Msg.info("END OF VIOLATION ON " + host.getName() + "\n");
Trace.hostSetState(host.getName(), "PM", "normal");
}
SimulatorManager.iSuspend++;
/* Export that the suspension has finished */
Trace.hostSetState(vmName, "suspension", "finished", String.format(Locale.US, "{\"vm_name\": \"%s\", \"on\": \"%s\", \"duration\": %f}", vmName, hostName, suspensionDuration));
Trace.hostPopState(vmName, "suspension");
if (sgVMsOn.remove(vm.getName()) == null && sgVMsToSuspend.remove(vm.getName())== null){
System.err.println("You are trying to suspend a VM which is not on... weird");
System.exit(-1);
}
sgVMsOff.put(vm.getName(), vm);
Trace.hostVariableSub(SimulatorManager.getInjectorNodeName(), "NB_VM", 1);
break;
case 1:
Msg.info("Suspension of VM has been postponed" + vmName + " on " + hostName);
Trace.hostSetState(vmName, "suspension", "postponed", String.format(Locale.US, "{\"vm_name\": \"%s\", \"on\": \"%s\", \"duration\": %f}", vmName, hostName, suspensionDuration));
Trace.hostPopState(vmName, "suspension");
sgVMsOn.remove(vm.getName());
sgVMsToSuspend.put(vm.getName(), vm);
break;
default:
correctlyCompleted = false;
System.err.println("Unexpected state from XHost.suspend()");
System.exit(-1);
}
}
} else {
System.err.println("You are trying to suspend a non-existing VM");
System.exit(-1);
}
return correctlyCompleted;
}
public static boolean resumeVM(String vmName, String hostName){
boolean correctlyCompleted = true;
Msg.info("Resuming VM " + vmName + " on " + hostName);
if (vmName != null) {
XVM vm = SimulatorManager.getXVMByName(vmName);
XHost host = SimulatorManager.getXHostByName(hostName);
if (vm != null) {
double timeStartingSuspension = Msg.getClock();
Trace.hostPushState(vmName, "SERVICE", "resume", String.format("{\"vm_name\": \"%s\", \"on\": \"%s\"}", vmName, hostName));
boolean previouslyViable = host.isViable();
// 0 if success, -1 if failure, 1 if already running
int res = host.resumeVM(vm);
Msg.info(vm.getName() + " resume returned " + res);
Trace.hostPopState(vmName, "SERVICE", String.format("{\"vm_name\": \"%s\", \"state\": %d}", vmName, res));
double suspensionDuration = Msg.getClock() - timeStartingSuspension;
switch (res) {
case 0:
// Msg.info("End of operation resume of VM " + vmName + " on " + hostName);
if (sgVMsOff.remove(vmName) == null) { // If the VM is not marked off, there is an issue
System.err.println("Unexpected state from XHost.resume()");
System.exit(-1);
}
sgVMsOn.put(vm.getName(), vm);
Trace.hostVariableAdd(SimulatorManager.getInjectorNodeName(), "NB_VM", 1);
SimulatorManager.iResume++;
if ((previouslyViable) && (!host.isViable())) {
Msg.info("STARTING VIOLATION ON " + host.getName() + "\n");
Trace.hostSetState(host.getName(), "PM", "violation");
}
Trace.hostSetState(vmName, "resume", "finished", String.format(Locale.US, "{\"vm_name\": \"%s\", \"on\": \"%s\", \"duration\": %f}", vmName, hostName, suspensionDuration));
Trace.hostPopState(vmName, "resume");
break;
case 1:
if(sgVMsToSuspend.remove(vmName) == null) { // If the VM is not marked off, there is an issue
System.err.println("Unexpected state from XHost.resume()");
System.exit(-1);
}
sgVMsOn.put(vm.getName(), vm);
//SimulatorManager.iResume++;
/* Export that the suspension has finished */
Trace.hostSetState(vmName, "resume", "cancelled", String.format(Locale.US, "{\"vm_name\": \"%s\", \"on\": \"%s\", \"duration\": %f}", vmName, hostName, suspensionDuration));
Trace.hostPopState(vmName, "resume");
break;
default:
correctlyCompleted = false;
System.err.println("Unexpected state from XHost.resume()");
System.exit(-1);
}
}
} else {
System.err.println("You are trying to resume a non-existing VM");
System.exit(-1);
}
return correctlyCompleted;
}
/**
* Migrate a VM
* @param vmName
* @param sourceName
* @param destName
* @return true migration has been correctly performed, false migration cannot complete.
*/
public static boolean migrateVM(String vmName, String sourceName, String destName) {
boolean completionOk = true;
double timeStartingMigration = Msg.getClock();
Trace.hostPushState(vmName, "SERVICE", "migrate", String.format("{\"vm_name\": \"%s\", \"from\": \"%s\", \"to\": \"%s\"}", vmName, sourceName, destName));
XHost sourceHost = SimulatorManager.getXHostByName(sourceName);
XHost destHost = SimulatorManager.getXHostByName(destName);
int res = sourceHost.migrate(vmName, destHost);
// TODO, we should record the res of the migration operation in order to count for instance how many times a migration crashes ?
// To this aim, please extend the hostPopState API to add meta data information
Trace.hostPopState(vmName, "SERVICE", String.format("{\"vm_name\": \"%s\", \"state\": %d}", vmName, res));
double migrationDuration = Msg.getClock() - timeStartingMigration;
if (res == 0) {
Msg.info("End of migration of VM " + vmName + " from " + sourceName + " to " + destName);
if (!destHost.isViable()) {
Msg.info("ARTIFICIAL VIOLATION ON " + destHost.getName() + "\n");
// If Trace.hostGetState(destHost.getName(), "PM").equals("normal")
Trace.hostSetState(destHost.getName(), "PM", "violation-out");
}
if (sourceHost.isViable()) {
Msg.info("END OF VIOLATION ON " + sourceHost.getName() + "\n");
Trace.hostSetState(sourceHost.getName(), "PM", "normal");
}
/* Export that the migration has finished */
Trace.hostSetState(vmName, "migration", "finished", String.format(Locale.US, "{\"vm_name\": \"%s\", \"from\": \"%s\", \"to\": \"%s\", \"duration\": %f}", vmName, sourceName, destName, migrationDuration));
Trace.hostPopState(vmName, "migration");
// Patch to handle postponed supsend that may have been requested during the migration.
XVM suspendedVm = SimulatorManager.sgVMsToSuspend.remove(vmName);
if (suspendedVm != null) { // The VM has been marked to be suspended, so do it
Msg.info("The VM " + vmName + "has been marked to be suspended after migration");
SimulatorManager.sgVMsOn.put(vmName, suspendedVm);
SimulatorManager.suspendVM(vmName, destName);
}
} else {
Trace.hostSetState(vmName, "migration", "failed", String.format(Locale.US, "{\"vm_name\": \"%s\", \"from\": \"%s\", \"to\": \"%s\", \"duration\": %f}", vmName, sourceName, destName, migrationDuration));
Trace.hostPopState(vmName, "migration");
Msg.info("Something was wrong during the migration of " + vmName + " from " + sourceName + " to " + destName);
Msg.info("Reconfiguration plan cannot be completely applied so abort it");
completionOk = false;
}
return completionOk;
}
}
<file_sep>/src/main/java/simulation/Main.java
/**
* Copyright 2012-2013-2014. The SimGrid Team. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the license (GNU LGPL) which comes with this package.
*
* This file is the launcher on the Simgrid VM injector
* The main is composed of three part:
* 1./ Generate the deployment file according to the number of nodes and the algorithm you want to evaluate
* 2./ Configure, instanciate and assign each VM on the different PMs
* 3./ Launch the injector and the other simgrid processes in order to run the simulation.
*
* Please note that all parameters of the simulation are given in the ''simulator.properties'' file available
* in the ''config'' directory
*
* @author: <EMAIL>
*/
package simulation;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import configuration.SimulatorProperties;
import configuration.XHost;
import org.simgrid.msg.Msg;
import org.simgrid.msg.Process;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Date;
import scheduling.hierarchical.snooze.AUX;
import trace.Trace;
public class Main {
/**
* The Simulator launcher
*
* @param args
*/
public static void main(String[] args) {
// Historical fix to get the internal logs of Entropy correctly
// assume SLF4J is bound to logback in the current environment
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
// print logback's internal status
StatusPrinter.print(lc);
// Save the begining time
SimulatorManager.setBeginTimeOfSimulation(System.currentTimeMillis());
// Automatically generate deployment file that is mandatory for launching the simgrid simulation.
// TODO - implement a more generic way to generate the deployment file
try {
String[] cmd = null;
if (SimulatorProperties.getAlgo().equals("distributed")) {
Msg.info("Distributed scheduling selected (generating deployment file)");
cmd = new String[] {"/bin/sh", "-c", "python generate.py " + SimulatorProperties.getAlgo() + " " +
SimulatorProperties.getNbOfHostingNodes() + " " +
SimulatorProperties.getNbOfCPUs() + " " +
SimulatorProperties.getCPUCapacity() + " " +
SimulatorProperties.getMemoryTotal() + " 23000 > config/generated_deploy.xml"};
//"Usage: python generate.py nb_nodes nb_cpu total_cpu_cap ram port >
} else if (SimulatorProperties.getAlgo().equals("hierarchical")) {
Msg.info("Hierarchical scheduling selected (generating deployment file for hierarchical approach)");
//"Usage: python generate.py nb_nodes
cmd = new String[] {"/bin/sh", "-c", "python generate.py " + SimulatorProperties.getAlgo() + " " + SimulatorProperties.getNbOfHostingNodes() + " " + SimulatorProperties.getNbOfServiceNodes() + " > config/generated_deploy.xml"};
} else if (SimulatorProperties.getAlgo().equals("example")) {
Msg.info("Simple scheduling selected (generating deployment file for simple approach)");
//"Usage: python generate.py nb_nodes
cmd = new String[] {"/bin/sh", "-c", "python generate.py " + SimulatorProperties.getAlgo() + " " + SimulatorProperties.getNbOfHostingNodes() + " " + SimulatorProperties.getNbOfServiceNodes() + " > config/generated_deploy.xml"};
} else { //(SimulatorProperties.getAlgo().equals("centralized"))
Msg.info("Default selected (generating deployment file for centralized approach)");
//"Usage: python generate.py nb_nodes
cmd = new String[] {"/bin/sh", "-c", "python generate.py " + SimulatorProperties.getAlgo() + " " + SimulatorProperties.getNbOfHostingNodes() + " > config/generated_deploy.xml"};
}
try {
Runtime.getRuntime().exec(cmd).waitFor();
} catch (InterruptedException e) {
// Ignore
}
} catch (IOException e) {
e.printStackTrace();
}
// Init. internal values
Msg.energyInit();
Msg.init(args);
/* construct the platform and deploy the application */
Msg.createEnvironment(args[0]);
Msg.deployApplication(args[1]);
/* Create all VM instances and assign them on the PMs */
/* The initial deployment is based on a round robin fashion */
System.out.println("Configure simulation" + new Date().toString());
SimulatorManager.cleanLog();
// True means round robin placement.
SimulatorManager.configureHostsAndVMs(SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfServiceNodes(), SimulatorProperties.getNbOfVMs(), true);
SimulatorManager.writeCurrentConfiguration();
String algorithmName = SimulatorProperties.getAlgo();
String algorithmDetails = "{}";
if (algorithmName.equals("hierarchical")) {
int lcsRatio = SimulatorProperties.getNbOfHostingNodes() / (SimulatorProperties.getNbOfServiceNodes() -1 );
algorithmDetails = String.format("{\"assignmentAlgorithm\": \"%s\", \"lcsRatio\": %d}", AUX.assignmentAlg, lcsRatio);
}
if(algorithmName.equals("centralized"))
algorithmName = SimulatorProperties.getImplementation().substring(SimulatorProperties.getImplementation().lastIndexOf('.') + 1);
Trace.simulationDeclare(algorithmName, SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfServiceNodes(), SimulatorProperties.getNbOfVMs(), algorithmDetails);
/* Prepare TRACE variables */
System.out.println("Prepare TRACE module" + new Date().toString());
// A node can be underloaded
Trace.hostStateDeclare("PM");
Trace.hostStateDeclareValue("PM", "underloaded", "0 1 1");
Trace.hostStateDeclareValue("PM", "normal", "1 1 1");
Trace.hostStateDeclareValue("PM", "violation", "1 0 0");
Trace.hostStateDeclareValue("PM", "violation-det", "0 1 0");
Trace.hostStateDeclareValue("PM", "violation-out", "1 0 0");
Trace.hostStateDeclare("SERVICE");
Trace.hostStateDeclareValue("SERVICE", "free", "1 1 1");
Trace.hostStateDeclareValue("SERVICE", "booked", "0 0 1");
Trace.hostStateDeclareValue("SERVICE", "compute", "1 0 1");
Trace.hostStateDeclareValue("SERVICE", "reconfigure", "1 1 0");
Trace.hostStateDeclareValue("SERVICE", "migrate", "1 0 0");
Trace.hostVariableDeclare("LOAD");
Trace.hostVariableDeclare("NB_MC"); // Nb of microcosms (only for DVMS)
Trace.hostVariableDeclare("NB_MIG"); //Nb of migration
Trace.hostVariableDeclare("NB_VM"); //To follow number of VMs.
Trace.hostVariableDeclare("NB_VM_TRUE"); //To follow the true number of VMs.
Trace.hostVariableDeclare("ENERGY");
Trace.hostVariableDeclare("NB_OFF"); //Nb of hosts turned off
Trace.hostVariableDeclare("NB_ON"); //Nb of hosts turned on
for(XHost host: SimulatorManager.getSGHosts()) {
Trace.hostVariableSet(host.getName(), "NB_ON", 1);
Trace.hostVariableSet(host.getName(), "NB_OFF", 0);
}
// Turn off the hosts that we don't need
int nOff = 0;
if(SimulatorProperties.getHostsTurnoff()) {
for (XHost h : SimulatorManager.getSGHostingHosts())
if (h.getRunnings().size() <= 0) {
SimulatorManager.turnOff(h);
nOff++;
}
Msg.info(String.format("Turned off unused %d nodes before starting", nOff));
}
/* execute the simulation. */
System.out.println("Launcher: begin Msg.run()" + new Date().toString());
notify(String.format("Started %s with %d hosts and %d VMs", SimulatorProperties.getImplementation(), SimulatorProperties.getNbOfHostingNodes(), SimulatorProperties.getNbOfVMs()));
Msg.run();
System.out.println("Launcher: end of Msg.run()" + new Date().toString());
Trace.close();
Msg.info("End of run");
notify(String.format("End of simulation %s", SimulatorProperties.getImplementation()));
Process.killAll(-1);
Msg.info(String.format("There are still %d processes running", Process.getCount()));
}
private static void notify(String message) {
Msg.info(message);
}
}
| b98273a1a4cab384054c8ca7018c74a5b89dccf2 | [
"HTML",
"Markdown",
"INI",
"Java",
"Python",
"R",
"Dockerfile",
"Shell"
] | 58 | Python | BeyondTheClouds/VMPlaceS | f29817aac6de0d11885ff1bd9062c087e0e4ef37 | e860c01e2d134d23df9f5c920b176a9219ee8783 | |
refs/heads/master | <file_sep><?php
namespace App\Http\Controllers\Secure;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Models\Usuario;
use App\Http\Requests\ValCrearUsuario;
class UsuariosController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$usuarios = Usuario::orderBy('id', 'ASC')->paginate(5);
return view('admin/usuarios/index')-> with('usuarios', $usuarios);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
return view('secure/create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(ValCrearUsuario $request)
{
$usuario = new Usuario($request->all());
$usuario->password = <PASSWORD>($request->password);
$usuario->rol =2;
$usuario->save();
flash('Usuario registrado exitosamente')->success();
return redirect()->route('login')->with('mensaje', 'Se ha registrado con exito');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Models\Tasa;
use App\Models\Pais;
use App\Models\Moneda;
use Illuminate\Support\Facades\DB;
class TasaController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$tasas = DB::table('tasas')->select('tasas.id as idTasa', 'tasas.tasa as tasa', 'paises.nombre as pais', 'paises.moneda as pmoneda',
'monedas.moneda as moneda')
->join('paises', 'tasas.pais', 'paises.id')
->join('monedas', 'tasas.moneda', 'monedas.id')
->orderBy('paises.id', 'ASC')->paginate(5);
//dd($tasas);
return view('admin/tasas/index')->with('tasas', $tasas);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
$paises = Pais::pluck('nombre', 'id');
$monedas = Moneda::pluck('moneda', 'id');
return view('admin/tasas/create', compact('paises', 'monedas'));
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$Tasas = new Tasa($request->all());
$Tasas->save();
flash('Tasa registrada')->success();
return redirect()->route('tasas.index');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$tasa = Tasa::select('tasas.id as idTasa', 'tasas.tasa as tasa', 'paises.nombre as pais', 'monedas.moneda as moneda')
->join('paises', 'tasas.pais', 'paises.id')
->join('monedas', 'tasas.moneda', 'monedas.id')
->where('tasas.id', $id)->get()->toArray();
//dd($tasa);
return view('admin/tasas/edit')->with('tasa', $tasa);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
$tasa = Tasa::find($id);
$tasa->tasa = $request->tasa;
$tasa->save();
flash('Tasa actualizada')->success();
return redirect()->route('tasas.index');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
$user = Tasa::find($id);
$user->delete();
flash('Tasa eliminada con éxito')->error();
return redirect()->route('tasas.index');
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class TablaPaisSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('paises')->insert([
['nombre' => 'Colombia', 'bandera' => null ,'moneda' => 'COP'],
['nombre' => 'Venezuela', 'bandera' => null ,'moneda' => 'BsS'],
['nombre' => 'Chile', 'bandera' => null ,'moneda' => 'CLP'],
['nombre' => 'Perú', 'bandera' => null ,'moneda' => 'PEN'],
['nombre' => 'EEUU', 'bandera' => null ,'moneda' => 'USD'],
['nombre' => 'Todos', 'bandera' => null ,'moneda' => null],
]);
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Domicilio extends Model
{
protected $table = "domicilios";
protected $fillable = ['usuario', 'pais', 'ciudad', 'codigo_postal', 'direccion'];
protected $guarded = ['id'];
public function usuario()
{
return $this->belongsTo(Usuario::class, 'usuarios');
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class TablaMonedaSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('monedas')->insert([
[
'moneda' => 'Peso Colombiano',
'abrev' => 'COP'
],
[
'moneda' => 'Bolivar Soberano',
'abrev' => 'BsS'
],
[
'moneda' => 'Peso Chileno',
'abrev' => 'CLP'
],
[
'moneda' => 'Sol',
'abrev' => 'PEN'
],
[
'moneda' => 'Dólar Americano',
'abrev' => 'USD'
],
[
'moneda' => 'PayPal',
'abrev' => 'PPL'
]
]);
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Foundation\Auth\User as Autenticable;
use Illuminate\Support\Facades\Session;
class Usuario extends Autenticable
{
protected $remember_token = false;
protected $table = "usuarios";
protected $fillable = ['email', 'password'];
protected $guarded = ['id'];
public function rol()
{
return $this->belongsTo(Rol::class, 'rol');
}
public function setSession($rol)
{
$usd= Usuario::where('id', $this->id)->get()->toArray();
$datosp = DatosPersonales::where('usuario', $this->id)->get()->toArray();
$datosd = Domicilio::select('domicilios.*', 'paises.nombre as paisd')->where('usuario', $this->id)->
join('paises', 'domicilios.pais', 'paises.id')->get()->toArray();
Session::put(
[
'rol' => $rol[0]['rol'],
'rol_id' => $this->rol,
'idUsuario' => $this->id,
'emailUsuario' => $this->email,
]);
if ($datosp) {
Session::put([
'idDat' => $datosp[0]['id'],
'nombre' => $datosp[0]['nombre'],
'apellido' => $datosp[0]['apellido'],
'iddoctype' => $datosp[0]['iddoctype'],
'iddocnum' => $datosp[0]['iddocnum'],
]);
}else {
$datp = new DatosPersonales($usd);
$datp->usuario = $usd[0]['id'];
$datp->save();
$datosp = DatosPersonales::where('usuario', $this->id)->get()->toArray();
Session::put([
'idDat' => $datosp[0]['id'],
'nombre' => 'no especifica',
'apellido' => 'no especifica',
'iddoctype' => 'no especifica',
'iddocnum' => 'no especifica',
]);
}
if ($datosd) {
Session::put([
'pais' => $datosd[0]['pais'],
'idDom' => $datosd[0]['id'],
'paisd' => $datosd[0]['paisd'],
'ciudad' => $datosd[0]['ciudad'],
'codigopl' => $datosd[0]['codigo_postal'],
'direccion' => $datosd[0]['direccion']
]);
}else {
$datd = new Domicilio($usd);
$datd->usuario = $usd[0]['id'];
$datd->save();
$datosd = Domicilio::where('usuario', $usd[0]['id'])->get()->toArray();
Session::put([
'pais' => 'no especifica',
'idDom' => $datosd[0]['id'],
'paisd' => 'no especifica',
'ciudad' => 'no especifica',
'codigopl' => 'no especifica',
'direccion' => 'no especifica'
]);
}
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class TablaDatosPersonalesSeeders extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('datos_personales')->insert([
'usuario' => 1,
'nombre' => 'John',
'apellido' => 'Doe',
'iddoctype' => 'V',
'iddocnum' => '12345678998'
]);
DB::table('datos_personales')->insert([
'usuario' => 2,
'nombre' => 'Jane',
'apellido' => 'Doe',
'iddoctype' => 'E',
'iddocnum' => '89987654321'
]);
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class DatabaseSeeder extends Seeder
{
/**
* Seed the application's database.
*
* @return void
*/
public function run()
{
$this->truncateTablas([
'roles', 'usuarios', 'datos_personales', 'paises', 'bancos', 'monedas', 'tasas'
]);
$this->call(TablaPaisSeeder::class);
$this->call(TablaRolSeeder::class);
$this->call(TablaUsuarioSeeder::class);
$this->call(TablaDatosPersonalesSeeders::class);
$this->call(TablaBancosSeeder::class);
$this->call(TablaMonedaSeeder::class);
$this->call(TablaTasasSeeder::class);
}
protected function truncateTablas(array $tablas)
{
DB::statement('SET FOREIGN_KEY_CHECKS = 0;');
foreach($tablas as $tabla){
DB::table($tabla)->truncate();
}
DB::statement('SET FOREIGN_KEY_CHECKS = 1;');
}
}
<file_sep><?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CrearTablaDatosPersonales extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('datos_personales', function (Blueprint $table) {
$table->bigIncrements('id');
$table->timestamps();
$table->unsignedBigInteger('usuario');
$table->foreign('usuario')->references('id')->on('usuarios')->onDelete('cascade')->onUpdate('cascade');
$table->string('nombre',20)->nullable();
$table->string('apellido',20)->nullable();
$table->string('iddoctype', 2)->nullable();
$table->string('iddocnum', 20)->unique()->nullable();
$table->charset ='utf8mb4';
$table->collation = 'utf8mb4_spanish_ci';
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('datos_personales');
}
}
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', 'Secure\LoginController@index')->name('login');
Route::get('/secure/login', 'Secure\LoginController@index')->name('login');
Route::post('/secure/login', 'Secure\LoginController@login')->name('login_post');
Route::get('secure/logout', 'Secure\loginController@logout')->name('logout');
Route::get('/midred', 'MidRedController@index')->name('midred');
Route::get('/secure/create', 'Secure\UsuariosController@create');
Route::get('/secure/store', 'Secure\UsuariosController@store');
//RUTAS MODO ADMINISTRADOR
Route::group(['prefix' => 'admin', 'namespace' => 'Admin', 'middleware' => 'auth'], function(){
Route::get('/adminhome', 'AdminController@index')->name('adminhome');
//PAISES
Route::resource('paises', 'PaisController');
//BANCOS
Route::resource('bancos', 'BancosController');
Route::get('bancos/{id}/destroy', [
'uses' => 'BancosController@destroy',
'as' => 'bancos.destroy'
]);
//TASAS
Route::resource('tasas', 'TasaController');
Route::get('tasas/{id}/destroy', [
'uses' => 'TasaController@destroy',
'as' => 'tasas.destroy'
]);
//USUARIOS
Route::resource('usuarios', 'UsuariosController');
//PERFIL DE USUARIO
Route::resource('perfil', 'PerfilUsuarioController');
Route::resource('datospersonales', 'DatosPersonalesController');
Route::resource('datosbancarios', 'DatosBancariosController');
Route::resource('domicilios', 'DomicilioController');
});
//RUTAS MODO USUARIO
Route::group(['prefix' => 'user', 'namespace' => 'User', 'middleware' => 'auth'], function(){
Route::get('/usershome', 'UserController@index')->name('usershome');
//BANCOS
//USUARIOS
});
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class DatosPersonales extends Model
{
protected $table = "datos_personales";
protected $fillable = ['nombre', 'apellido', 'iddoctype', 'iddocnum'];
protected $guarded = ['id'];
public function usuario()
{
return $this->belongsTo(Usuario::class, 'usuarios');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
class MidRedController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
if(session()->get('rol_id') == 1){
return redirect('/admin/adminhome');
}
else {
return redirect('/user/usershome');
}
}
}
<file_sep><?php
namespace App\Http\Controllers\Secure;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use Illuminate\Foundation\Auth\AuthenticatesUsers;
class LoginController extends Controller
{
use AuthenticatesUsers;
protected $redirectTo = '/midred';
public function __construct()
{
$this->middleware('guest')->except('logout');
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
return view('secure/index');
}
protected function authenticated(Request $request, $user)
{
$rol = $user->rol()->get();
auth()->user()->setSession($rol->toArray());
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
class TablaBancosSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('bancos')->insert([
'pais' => 2,
'nombre' => 'Banco Nacional de Credito',
'abrev' => 'BNC',
'prefijo' => '0191',
]);
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Models\Pais;
use App\Models\Domicilio;
use Illuminate\Support\Facades\Session;
class DomicilioController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
//
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
//
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$paises = Pais::pluck('nombre', 'id');
return view('admin/usuarios/editarDomicilio')->with('paises', $paises);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
$dom = Domicilio::find($id);
$dom->pais = $request->pais;
$dom->ciudad = $request->ciudad;
$dom->codigo_postal = $request->codigopl;
$dom->direccion = $request->direccion;
$dom->save();
$datosd = Domicilio::select('domicilios.*', 'paises.nombre as paisd')->where('usuario', session()->get('idUsuario'))->
join('paises', 'domicilios.pais', 'paises.id')->get()->toArray();
Session::put(
[
'paisd' => $datosd[0]['paisd'],
'pais' => $datosd[0]['pais'],
'ciudad' => $datosd[0]['ciudad'],
'codigopl' => $datosd[0]['codigo_postal'],
'direccion' => $datosd[0]['direccion']
]);
flash('Domicilio actualizado')->success();
return redirect()->route('perfil.index');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep><?php
namespace App\Http\Requests;
use Illuminate\Foundation\Http\FormRequest;
class ValCrearUsuario extends FormRequest
{
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'email' => 'required|max:50|unique:usuarios,email,' . $this->route('id'),
'password' => '<PASSWORD>|max:12|min:10',
'checkpp' => 'required'
];
}
public function messages()
{
return [
'email.required' => 'El campo correo es obligatorio',
'pass.required' => 'El campo contraseña es obligatorio',
'checkpp.required' => 'Para registrarse, debe aceptar los términos y condiciones',
];
}
}
<file_sep><?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CrearTablaTasas extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('tasas', function (Blueprint $table) {
$table->bigIncrements('id');
$table->timestamps();
$table->unsignedBigInteger('pais');
$table->foreign('pais')->references('id')->on('paises')->onDelete('cascade')->onUpdate('cascade');
$table->unsignedBigInteger('moneda');
$table->foreign('moneda')->references('id')->on('monedas')->onDelete('cascade')->onUpdate('cascade');
$table->float('tasa', 255,5)->nullable();
$table->charset ='utf8mb4';
$table->collation = 'utf8mb4_spanish_ci';
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('tasas');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Models\Tasa;
use App\Models\Pais;
use App\Models\PerfilUsuario;
use Illuminate\Support\Facades\Session;
use App\Models\DatosPersonales;
use App\Models\Domicilio;
class AdminController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$tasas = Tasa::join('paises', 'tasas.pais', 'paises.id' )
->join('monedas', 'tasas.moneda', 'monedas.id')
->whereIn('tasas.pais', [session()->get('pais'), 6])->get()->toArray();
//dd($datosp);
return view('/admin/admin/inicio', compact('tasas'));
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Pais extends Model
{
protected $table ="paises";
protected $fillable = ['nombre', 'bandera', 'moneda'];
protected $guarded = ['id'];
public function pais()
{
return $this->belongsTo(Pais::class, 'paises');
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Tasa extends Model
{
protected $table = "tasas";
protected $fillable = ['pais', 'moneda', 'tasa'];
protected $guarded = ['id'];
public function pais()
{
return $this->belongsTo(Pais::class, 'paises');
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class DatosPersonales extends Model
{
protected $table = "domicilios";
protected $fillable = ['usuario', 'pais', 'ciudad', 'codigo_postal', 'direccion'];
protected $guarded = ['id'];
public function usuario()
{
return $this->belongsTo(Usuario::class, 'usuarios');
}
public function pais()
{
return $this->belongsTo(Pais::class, 'paises');
}
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Banco extends Model
{
protected $table = "bancos";
protected $fillable = ['nombre',
'prefijo'
];
}
<file_sep><?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class DatosBancarios extends Model
{
protected $table = "datos_bancarios";
protected $fillable = ['usuario', 'banco', 'tcuenta', 'cuenta'];
protected $guarded = ['id'];
public function usuario()
{
return $this->belongsTo(Usuario::class, 'usuarios');
}
public function banco()
{
return $this->belongsTo(Banco::class, 'bancos');
}
}
<file_sep><?php
namespace App\Http\Controllers\User;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Models\Tasa;
class UserController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$tasas = Tasa::join('paises', 'tasas.pais', 'paises.id' )
->join('monedas', 'tasas.moneda', 'monedas.id')
->whereIn('tasas.pais', [session()->get('pais'), 6])->get()->toArray();
return view('/users/users/inicio', compact('tasas'));
}
}
| f5d2478d5628ab34e59077b2cce7c65b9bdfb3cb | [
"PHP"
] | 24 | PHP | kyraxtec/CambiosCarupano | a2687f2dec324f963dddddba0330535413dc5fef | 365e8921e901d4d628aa363fb937a35772e8cf63 | |
refs/heads/master | <file_sep># emoji-totext
npm package to translate emojis to text
## Installation
`npm install emoji-totext`
## Usage
```js
var convertemoji = require('emoji-totext');
var str = "I want to eat 🍕";
convertemoji.toText(str,function(err,sentence){
console.log(sentence);
})
```
Output should be: 'I want to eat a slice of pizza`
## Limitations
- At this point can only perform evaluations of sentences with only one emoji
- Not all emojis are "readable"
<file_sep>var iconv = require('iconv-lite');
var fs = require('fs');
var os = require('os');
var nodepos = require('node-pos');
var fastcsv = require('fast-csv');
//var str = "I think it's <NAME> standing in a dark room,and he seems 😁. ";
var preposition = "";
var str;
exports.toText = function (sentence, callback) {
str = sentence;
var utf8 = unescape(encodeURIComponent(str));
var arr = []
//convers to unicode
for (var i = 0; i < utf8.length; i++) {
arr.push(utf8.charCodeAt(i).toString(16));
}
var unicode = []
var u = 0;
for(var i = 0; i < arr.length; i++){
if(arr[i] == "f0" || arr[i] == "e2"){
unicode[u] = "\\x" + arr[i].toUpperCase()+
"\\x" + arr[i+1].toUpperCase()+
"\\x" + arr[i+2].toUpperCase()+
"\\x" + arr[i+3].toUpperCase();
u++;
}
}
var stream = fs.createReadStream(__dirname+"/emDict.csv");
var newstr = -1;
var csvStream = fastcsv.parse({objectMode:true,headers: true, delimiter:";"})
.on("data", function(data){
for(var u = 0; u < unicode.length ; u++){
var unicodeSmile = unicode[u].replace(/\\\\/g,"\\");
if(data.Bytes == unicodeSmile){
str = generatePreposition(str,data.Description,data.Native);
var str_split = str.toString().split(data.Native)
var index
if(str_split[0].length == 0){ //in the begining
index = 1
}else if(str_split[str_split.length-1].length == 0){ //in the end
index = 0
}else{ //in the middle
index = -1
}
if(index == 0){
newstr = str_split[index]+ preposition + data.Description.toLowerCase();
}else if(index == 1){
newstr = preposition+data.Description.toLowerCase()+","+str_split[index];
newstr = newstr.toString().charAt(0).toUpperCase() + newstr.toString().slice(1).toLowerCase();
}else{
if(str_split[1].charAt(0) != " ")
str_split[1]= " "+str_split[1]
newstr = str_split[0] + data.Description.toLowerCase()+str_split[1];
}
}
}
})
.on("end", function(){
callback(null, newstr);
});
stream.pipe(csvStream);
};
function generatePreposition(str,desc,emoji) {
var sentence = str.split(" ");
var e_index = -1;
var found = false;
var withWord = false;
//get emoji index
for(var e = 0; e < sentence.length; e++){
if(sentence[e] == emoji){
e_index = e;
found = true;
}
}
console.log(str);
var tmp_str = str.split(emoji)
if(!found){
if( str.indexOf(emoji) > -1 ){
e_index = str.indexOf(emoji);
withWord = true;
}
}
if(withWord){
if(str.charAt(e_index-1) != " "){ //has space before emoji
str = tmp_str[0]+" "+emoji + tmp_str[1];
console.log(str);
}else if(str.charAt(e_index+1) != " "){ //has space after emoji
str = tmp_str[0]+emoji+ " " + tmp_str[1];
}
}
if(desc.indexOf("FACE") > -1){
preposition = "with a "
}else{
if(e != 0){
if(str[e-2] != "a" )
preposition = "a "
}
}
return str;
}
| e73b65a67fa248e0fa2b73734c21fd3b9f5ef0c6 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | malayka66/emoji-totext | 39d8c011d3140b3053a72eea8e003da718ba5b04 | a237f969251e5e0cd5b4a3c4b53149449617adb6 | |
refs/heads/master | <repo_name>markmeehan99/CalendarScript<file_sep>/README.md
# CalendarScript
This is a small project I developed so that I could easily check on my Google Calendar which events are soon to come. By running this script, and granting access to your Google Account, you can:
- Check which events are next in line
- Add events by type, just by selecting the date and time. Colors will be generated accordingly, taking into account the event type
- Delete event
This project was built by accessing [Google Calendar's API](https://developers.google.com/calendar)
<file_sep>/script.py
from __future__ import print_function
import datetime
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/calendar'] #READONLY scope
#TODO: VERIFY SPECIFIC COLOR CODES FOR EACH EVENT
EXAM_COLOR = "1"
LEISURE_COLOR = "3"
DELIVERY_COLOR = "6"
YELLOW = "5"
BLUEBERRY = "9"
def selectEventType():
print("What type of event would you like to choose?")
print("1 - Exam/Test")
print('2 - Project delivery')
print('3 - Leisure event')
type = str(input())
return type
def setEvent(service):
type = selectEventType();
if type == 1:
color = EXAM_COLOR
elif type == 2:
color = DELIVERY_COLOR
elif type == 3:
color = LEISURE_COLOR
else:
color = BLUEBERRY
GMT_0FF = '+00:00'
name = raw_input("What name do you wish your event to have?")
startTime = raw_input("What day/time does your event start? Please enter (YYYY)-(MM)-(DD)T(HH):(MM):(SS)")
endTime = raw_input("What day/time does your event end? Please enter (YYYY)-(MM)-(DD)T(HH):(MM):(SS)")
EVENT = {
'summary': '%s' % name,
'start': {'dateTime' : '%s%s' % (dayTime, GMT_0FF)},
'end' : {'dateTime' : '%s%s' % (dayTime, GMT_0FF)},
"colorId": "%s" % color
}
e = service.events().insert(calendarId = 'primary', sendNotifications = False, body = EVENT).execute()
print('****%r event added!****' % e['summary'].encode('utf-8'))
print('Start: %s' % e['start']['dateTime'])
print('End: %s' % e['end']['dateTime'])
def deleteEvent(service):
print('Event name?')
name = raw_input()
events = getEvents(service, 100)
for event in events:
if event['summary'] == name:
id = event['id']
print(id)
e = service.events().delete(calendarId='primary', eventId=id).execute()
print('****%r event deleted!****' % e['summary'].encode('utf-8'))
def printEvents(events):
eventCounter = 0
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
print(start, event['summary'])
eventCounter = eventCounter + 1
def getEvents(service, numEvents):
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
events_result = service.events().list(
calendarId='primary',
timeMin=now,
maxResults=numEvents,
singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
if not events:
print('No upcoming events found.')
return
return events
def main():
"""Shows basic usage of the Google Calendar API.
Prints the start and name of the next 10 events on the user's calendar.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('calendar', 'v3', credentials=creds)
run = True
while(run):
print("What would you like to do?")
print("1 - Check next events")
print("2 - Set event")
print('3 - Delete event')
print('4 - Quit')
# colors = service.colors().get().execute()
# print(colors)
option = input()
option = int(option)
if option == 1:
print("How many events would you like to view?")
numEvents = input()
printEvents(getEvents(service, numEvents))
if option == 2:
setEvent(service)
if (option == 3):
deleteEvent(service)
if option == 4:
run = False
if __name__ == '__main__':
main() | e9dae8217ec1dbd3809aeb76c1d814e92ee83673 | [
"Markdown",
"Python"
] | 2 | Markdown | markmeehan99/CalendarScript | aae660b08728ca3af7e5aab3e83311cdd82092a7 | d0f496eaffb1ab6a8febf756de8300266b85e44c | |
refs/heads/master | <file_sep>package com.medgenome.controller;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.medgenome.model.Customer;
import com.medgenome.repo.CustomerRepository;
@RestController
public class WebController {
@Autowired
CustomerRepository repository;
@RequestMapping("/save")
public String process() {
repository.save(new Customer("Jack", "Smith"));
repository.save(new Customer("Adam", "Johnson"));
repository.save(new Customer("Kim", "Smith"));
repository.save(new Customer("David", "Williams"));
repository.save(new Customer("Peter", "Davis"));
return "Done";
}
@Transactional(readOnly = true)
@RequestMapping("/findall")
public String findAllByStream() {
List<String> mapstream = Collections.emptyList();
try (Stream<Customer> stream = repository.findAllCustomers()) {
mapstream = stream.map(customer -> customer.toString()).collect(Collectors.toList());
}
return mapstream.toString();
}
@Transactional(readOnly = true)
@RequestMapping("/findalllastname")
public String findAllLastNameByStream() {
List<String> mapstream = Collections.emptyList();;
try (Stream<String> stream = repository.findAllandShowLastName()) {
mapstream = stream.collect(Collectors.toList());
}
return mapstream.toString();
}
@Transactional(readOnly = true)
@RequestMapping("/findbylastname")
public String fetchDataByLastNameWithStream(@RequestParam("lastname") String lastName) {
List<String> mapstream = Collections.emptyList();
try (Stream<Customer> stream = repository.findByLastName(lastName)) {
mapstream = stream.map(customer -> customer.toString()).collect(Collectors.toList());
}
return mapstream.toString();
}
}
<file_sep>spring.datasource.url=jdbc:postgresql://localhost/postgres
spring.datasource.username=postgres
spring.datasource.password=<PASSWORD>
spring.jpa.generate-ddl=true | 1f59cd9f5bf2f8367f358c630c730084d044c610 | [
"Java",
"INI"
] | 2 | Java | narayana1830/SpringApplications | 208cc68c407790eca8dedc435ecc0d482716435c | e77d7780621befa6d20292687c6c97db98c0b56a | |
refs/heads/master | <file_sep>#!/usr/bin/env python3
import os
import venv
from pathlib import Path
import sys
testSuit = sys.argv[1]
root_path = Path(__file__).parent.resolve()
venv_dir = root_path / '.venv'
def runner():
if not venv_dir.exists():
try:
venv.create(venv_dir, with_pip=True)
except SystemError:
print("Virtualenv was NOT created")
finally:
print("Virtualenv was created")
if venv_dir.exists():
try:
os.system("pip install -r requirements.txt")
except SystemError:
print("Requirements were NOT installed")
finally:
print("Requirements were installed")
elif venv_dir.exists():
try:
os.system("python -m robot.run --outputdir logs_folder " + testSuit)
except SystemError:
print("Tests were not started")
finally:
print("Finish!")
if __name__ == "__main__":
runner()<file_sep>#!/usr/bin/python
# -*- coding: utf-8 -
import random
from datetime import timedelta
from faker import Factory
from faker.providers.company.en_US import Provider as CompanyProviderEnUs
from faker.providers.company.ru_RU import Provider as CompanyProviderRuRu
from munch import munchify
from data.op_faker.op_faker import OP_Provider
from local_time import get_now
fake_en = Factory.create(locale='en_US')
fake_ru = Factory.create(locale='ru_RU')
fake_uk = Factory.create(locale='uk_UA')
fake_uk.add_provider(OP_Provider)
fake = fake_uk
used_identifier_id = []
mode_open = ["belowThreshold", "aboveThresholdUA", "aboveThresholdEU",
"aboveThresholdUA.defense", "competitiveDialogueUA", "competitiveDialogueEU", "esco"]
mode_limited = ["reporting", "negotiation.quick", "negotiation"]
violationType = ["corruptionDescription", "corruptionProcurementMethodType", "corruptionChanges",
"corruptionPublicDisclosure", "corruptionBiddingDocuments", "documentsForm",
"corruptionAwarded", "corruptionCancelled", "corruptionContracting"]
# This workaround fixes an error caused by missing "catch_phrase" class method
# for the "ru_RU" locale in Faker >= 0.7.4
fake_ru.add_provider(CompanyProviderEnUs)
fake_ru.add_provider(CompanyProviderRuRu)
def create_fake_sentence():
return fake.sentence(nb_words=10, variable_nb_words=True)
def create_fake_funder():
return fake.funders_data()
def get_fake_funder_scheme():
return fake.funder_scheme()
def create_fake_amount(award_amount):
return round(random.uniform(1, award_amount), 2)
def create_fake_number(min_number, max_number):
return random.randint(int(min_number), int(max_number))
def create_fake_float(min_number, max_number):
return random.uniform(float(min_number), float(max_number))
def create_fake_title():
return u"[ТЕСТУВАННЯ] {}".format(fake.title())
def create_fake_date():
return get_now().isoformat()
def create_fake_period(days=0, hours=0, minutes=0):
data = {
"startDate": get_now().isoformat(),
"endDate": (get_now() + timedelta(days=days, hours=hours, minutes=minutes)).isoformat()
}
return data
def subtraction(value1, value2):
if "." in str (value1) or "." in str (value2):
return (float (value1) - float (value2))
else:
return (int (value1) - int (value2))
def create_fake_value_amount():
return fake.random_int(min=1)
def get_number_of_minutes(days, accelerator):
return 1440 * int(days) / accelerator
def field_with_id(prefix, sentence):
return u"{}-{}: {}".format(prefix, fake.uuid4()[:8], sentence)
"""Method for data criteria"""
def data_for_criteria(scheme='ДК021'):
data = fake.fake_item(scheme)
data_json = munchify(
{
"name": fake_uk.name(),
"nameEng": fake_en.name(),
"classification": {
"id": data["classification"]["id"],
"scheme": "ДК021",
"description": data["classification"]["description"]
},
"additionalClassification": {
"id": data["additionalClassifications"][0]["id"],
"scheme": data["additionalClassifications"][0]["scheme"],
"description": data["additionalClassifications"][0]["description"],
},
"minValue": str(create_fake_float(0, 10)),
"maxValue": str(create_fake_float(11, 42)),
"dataType": random.choice(['number', 'integer', 'boolean', 'string']),
"unit": {
"name": data['unit']['name'],
"code": data['unit']['code']
}
}
)
return data_json
def data_for_edit():
data = munchify(
{
"name": fake_uk.name(),
"nameEng": fake_en.name(),
"minValue": str(create_fake_float(0, 22)),
"maxValue": str(create_fake_float(43, 54)),
"status": "active"
}
)
return data
<file_sep>from data.op_faker import *<file_sep>import os
from munch import Munch, munchify
from json import load, loads
def load_data_from(file_name):
"""We assume that 'external_params' is a a valid json if passed
"""
if not os.path.exists(file_name):
file_name = os.path.join(os.path.dirname(__file__), 'data', file_name)
with open(file_name) as file_obj:
if file_name.endswith('.json'):
file_data = Munch.fromDict(load(file_obj))
elif file_name.endswith('.yaml'):
file_data = Munch.fromYAML(file_obj)
return munchify(file_data)<file_sep>from http.client import BadStatusLine
from client.criteria_client import ClientCriteria
from retrying import retry
def retry_if_request_failed(exception):
status_code = getattr(exception, 'status_code', None)
print(status_code)
if 500 <= status_code < 600 or status_code in (409, 429, 412):
return True
else:
return isinstance(exception, BadStatusLine)
class StableClient(ClientCriteria):
@retry(stop_max_attempt_number=100, wait_random_min=500,
wait_random_max=4000, retry_on_exception=retry_if_request_failed)
def request(self, *args, **kwargs):
return super(StableClient, self).request(*args, **kwargs)
def prepare_api_wrapper(username, password, api_host, api_version):
return StableClient(username, password, api_host, api_version)<file_sep>#!/usr/bin/python
import os
import json
from json import loads
import asyncio
import requests
from munch import munchify
from requests.auth import HTTPBasicAuth
from client.exceptions import InvalidResponse
class ClientCriteria(object):
def __init__(self, username='', password='', api_host='http://localhost:8000',
api_version='0', loop=asyncio.get_event_loop()):
self._username = username
self._password = <PASSWORD>
self.api_host = api_host
self.api_version = api_version
self.loop = loop
self.url = api_host + '/api/' + api_version + '/'
self.headers = {'Content-Type': 'application/json'}
#######Method GET####################
def get_criteria(self, path='', criteria_id=None):
if isinstance(criteria_id, type(None)):
response = requests.get(
url=self.url + path + '/',
auth=HTTPBasicAuth(self._username, self._password),
headers=self.headers
)
else:
response = requests.get(
url=self.url + path + '/' + criteria_id + '/',
auth=HTTPBasicAuth(self._username, self._password),
headers=self.headers,
)
if response.status_code == 200:
return munchify(loads(response.text))
raise InvalidResponse(response)
def get_status(self, path='', status_path=None):
response = requests.get(
url=self.url + path + '/' + status_path,
auth=HTTPBasicAuth(self._username, self._password),
headers=self.headers,
)
if response.status_code == 200:
return munchify(loads(response.text))
raise InvalidResponse(response)
######Method POST########################
def create_criteria(self, path='', data={}):
response = requests.post(
url=self.url + path + '/',
auth=HTTPBasicAuth(self._username, self._password),
data=json.dumps(data),
headers=self.headers
)
if response.status_code == 201:
return munchify(loads(response.text))
raise InvalidResponse(response)
##############Method PATCH#############
def update_criteria(self, criteria_id, path='', data={}):
response = requests.patch(
url=self.url + path + '/' + criteria_id + '/',
auth=HTTPBasicAuth(self._username, self._password),
headers=self.headers,
data=json.dumps(data)
)
if response.status_code == 200:
return munchify(loads(response.text))
raise InvalidResponse(response)
##########Method DELETE###################
def delete_criteria(self, criteria_id, path=''):
response = requests.delete(
url=self.url + path + '/' + criteria_id + '/',
auth=HTTPBasicAuth(self._username, self._password),
headers=self.headers
)
if response.status_code == 200:
return response.status_code
raise InvalidResponse(response)
<file_sep>certifi==2019.11.28
chardet==3.0.4
Faker==3.0.0
idna==2.8
munch==2.5.0
python-dateutil==2.8.1
pytz==2019.3
PyYAML==5.2
requests==2.22.0
retrying==1.3.3
robotframework==3.1.2
six==1.13.0
text-unidecode==1.3
urllib3==1.25.7
<file_sep># -*- coding: utf-8 -*-
from faker.generator import random
from faker.providers import BaseProvider
from copy import deepcopy
from munch import Munch
import random
from json import load
import os
def load_data_from_file(file_name):
if not os.path.exists(file_name):
file_name = os.path.join(os.path.dirname(__file__), file_name)
with open(file_name) as file_obj:
if file_name.endswith(".json"):
return Munch.fromDict(load(file_obj))
elif file_name.endswith(".yaml"):
return Munch.fromYAML(file_obj)
class OP_Provider(BaseProvider):
_fake_data = load_data_from_file("op_faker_data.json")
word_list = _fake_data.words
procuringEntities = _fake_data.procuringEntities
funders = _fake_data.funders
funders_scheme_list = _fake_data.funders_scheme
addresses = _fake_data.addresses
classifications = _fake_data.classifications
cpvs = _fake_data.cpvs
moz_cpvs = _fake_data.moz_cpvs
items_base_data = _fake_data.items_base_data
rationale_types = _fake_data.rationale_types
units = _fake_data.units
@classmethod
def randomize_nb_elements(self, number=10, le=60, ge=140):
"""
Returns a random value near number.
:param number: value to which the result must be near
:param le: lower limit of randomizing (percents). Default - 60
:param ge: upper limit of randomizing (percents). Default - 140
:returns: a random int in range [le * number / 100, ge * number / 100]
with minimum of 1
"""
if le > ge:
raise Exception("Lower bound: {} is greater then upper: {}.".format(le, ge))
return int(number * self.random_int(min=le, max=ge) / 100) + 1
@classmethod
def word(self):
"""
:example 'Курка'
"""
return self.random_element(self.word_list)
@classmethod
def words(self, nb=3):
"""
Generate an array of random words
:example: array('Надіньте', 'фуражка', 'зелено')
:param nb: how many words to return
"""
return random.sample(self.word_list, nb)
@classmethod
def sentence(self, nb_words=5, variable_nb_words=True):
"""
Generate a random sentence
:example: 'Курка надіньте пречудовий зелено на.'
:param nb_words: how many words the sentence should contain
:param variable_nb_words: set to false if you want exactly $nbWords returned,
otherwise $nbWords may vary by +/-40% with a minimum of 1
"""
if nb_words <= 0:
return ''
if variable_nb_words:
nb_words = self.randomize_nb_elements(number=nb_words)
words = self.words(nb_words)
words[0] = words[0].title()
return " ".join(words) + '.'
@classmethod
def title(self):
return self.sentence(nb_words=3)
@classmethod
def description(self):
return self.sentence(nb_words=10)
@classmethod
def procuringEntity(self):
return deepcopy(self.random_element(self.procuringEntities))
@classmethod
def funders_data(self):
return self.random_element(self.funders)
@classmethod
def funder_scheme(self):
return self.random_element(self.funders_scheme_list)
@classmethod
def cpv(self, cpv_group=None):
if cpv_group:
cpvs = []
for cpv_element in self.cpvs:
if cpv_element.startswith(cpv_group):
cpvs.append(cpv_element)
return self.random_element(cpvs)
else:
return self.random_element(self.cpvs)
@classmethod
def fake_item(self, scheme_group):
# """
# Generate a random item for criteria
scheme_group = str(scheme_group)
similar_scheme = []
actual_schema = ['ДК021', 'CPV_EN', 'CPV_RU', 'ДК003', ' ДК015', 'ДК018', 'КЕКВ', 'NONE', 'specialNorms', 'UA-ROAD', 'GMDN']
for scheme_element in self.classifications:
if scheme_element["classification"]["scheme"].startswith(scheme_group) \
and scheme_element["additionalClassifications"][0]["scheme"] in actual_schema \
and len(scheme_element["additionalClassifications"][0]["id"]) <= 10:
similar_scheme.append(scheme_element)
scheme = random.choice(similar_scheme)
similar_units = []
for unit_element in self.units:
similar_units.append(unit_element)
unit = random.choice(similar_units)
data = dict(scheme)
data['unit'] = unit
return deepcopy(data)<file_sep>## ДЛЯ ЗАПУСКУ ТЕСТІВ ПОТРІБНО ЗРОБИТИ НАСТУПНІ КРОКИ:
### 1. КЛОНУВАТИ РЕПОЗИТОРІЙ
* Прописати в консолі: git clone https://github.com/orest77/eCatalogues_tests.git
### 2. Для запуску тестового сценарію потрібно:
* Надати права файлу <run_test.py> (прописати в консолі sudo chmod -x run_test.py)
* Запустити тести ./run_test.py e_catalogues_tests.robot
### 3. ТАКОЖ МОЖНА НЕ ВИКОНУВАТИ 2 ПУНКТУ ЗАПУСТИВШИ ТЕСТИ КОМАНДОЮ
* python3 -m run_test.py e_catalogues_tests.robot | 81d079aab15bf2ae527c3cb607de5fc0db39c8c4 | [
"Markdown",
"Python",
"Text"
] | 9 | Python | orest77/eCatalogues_tests | 29c5ab256cbb92d5dbc2b3ba788932c238810503 | 75c7caee8f5d28348c6a6ec7e2946be048ddf25d | |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Day5
{
class Program
{
//Part 1
//private const int inputParameter = 1;
//Part 2
private const int inputParameter = 5;
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var originalOpcodes = sr.ReadToEnd().Split(',').Select(x => int.Parse(x)).ToList();
// Part 1
CalculateOpcodes(originalOpcodes);
}
}
static void CalculateOpcodes(List<int> opCodes)
{
int pointerPosition = 0;
while(true)
{
var instruction = CalculateInstruction(opCodes[pointerPosition]).ToList();
pointerPosition = ExecuteInstruction(instruction, pointerPosition, opCodes);
if (pointerPosition == -1)
return;
}
}
private static int ExecuteInstruction(List<int> instruction, int pointerPosition, List<int> opCodes)
{
switch(instruction[4])
{
case 1:
opCodes[opCodes[pointerPosition + 3]] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) + CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 2:
opCodes[opCodes[pointerPosition + 3]] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) * CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 3:
opCodes[opCodes[pointerPosition + 1]] = inputParameter;
return pointerPosition + 2;
case 4:
Console.Write(CalculateVariable(pointerPosition + 1, instruction[2], opCodes));
return pointerPosition + 2;
case 5:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) != 0)
return CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 6:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == 0)
return CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 7:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) < CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[opCodes[pointerPosition + 3]] = 1;
else
opCodes[opCodes[pointerPosition + 3]] = 0;
return pointerPosition + 4;
case 8:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[opCodes[pointerPosition + 3]] = 1;
else
opCodes[opCodes[pointerPosition + 3]] = 0;
return pointerPosition + 4;
case 9:
return -1;
}
return 0;
int CalculateVariable(int position, int mode, List<int> opCodes) => mode == 1 ? opCodes[position] : opCodes[opCodes[position]];
}
private static IEnumerable<int> CalculateInstruction(int wholeOpcode)
{
var stringOpcode = wholeOpcode.ToString();
for (int i = 4; i >= 0; i--)
{
if (stringOpcode.Length - 1 < i)
yield return 0;
else
yield return int.Parse(stringOpcode[stringOpcode.Length - (i + 1)].ToString());
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
namespace Day4
{
class Program
{
static void Main(string[] args)
{
const int begin = 153517, end = 630395;
var listOfPasswords = new List<int>();
// Part 1
bool check = HasExactlyTwoAdjacentDigits(123444);
for (int currentPassword = begin; currentPassword <= end; currentPassword++)
{
if(HasTwoAdjacentDigits(currentPassword) && !IsDecreasing(currentPassword))
listOfPasswords.Add(currentPassword);
}
Console.WriteLine($"Number of valid passwords for part 1: {listOfPasswords.Count}");
// Part 2
listOfPasswords = new List<int>();
for (int currentPassword = begin; currentPassword <= end; currentPassword++)
{
if(HasExactlyTwoAdjacentDigits(currentPassword) && !IsDecreasing(currentPassword))
listOfPasswords.Add(currentPassword);
}
Console.WriteLine($"Number of valid passwords for part 2: {listOfPasswords.Count}");
}
private static bool HasTwoAdjacentDigits(int currentPassword)
{
string passwordInString = currentPassword.ToString();
for (int letterNumber = 0; letterNumber < passwordInString.Length - 1; letterNumber++)
if (passwordInString[letterNumber] == passwordInString[letterNumber + 1])
return true;
return false;
}
private static bool IsDecreasing(int currentPassword)
{
string passwordInString = currentPassword.ToString();
for (int letterNumber = 0; letterNumber < passwordInString.Length - 1; letterNumber++)
if (passwordInString[letterNumber] > passwordInString[letterNumber + 1])
return true;
return false;
}
private static bool HasExactlyTwoAdjacentDigits(int currentPassword)
{
var numberCounts = new Dictionary<char,int>();
string passwordInString = currentPassword.ToString();
for (int letterNumber = 0; letterNumber < passwordInString.Length - 1; letterNumber++)
{
if (passwordInString[letterNumber] == passwordInString[letterNumber + 1])
{
if (numberCounts.ContainsKey(passwordInString[letterNumber]))
numberCounts[passwordInString[letterNumber]]++;
else
numberCounts.Add(passwordInString[letterNumber], 2);
}
}
return numberCounts.ContainsValue(2);
}
}
}
<file_sep>using System;
using System.IO;
using System.Linq;
namespace Day8
{
class Program
{
static void Main(string[] args)
{
const int width = 25, height = 6, lettersInLayer = width * height;
var text = File.ReadAllText("./input.txt");
int numberOfLayers = text.Length / lettersInLayer;
int[][][] layers = new int[numberOfLayers][][];
for (int layerNumber = 0; layerNumber < numberOfLayers; layerNumber++)
{
layers[layerNumber] = new int[height][];
for (int currentHeight = 0; currentHeight < height; currentHeight++)
{
layers[layerNumber][currentHeight] = new int[width];
for (int currentWidth = 0; currentWidth < width; currentWidth++)
{
layers[layerNumber][currentHeight][currentWidth] = int.Parse(text[(layerNumber * lettersInLayer) + (currentHeight * width) + currentWidth].ToString());
}
}
}
// Part 1
int smallestNumOfZeros = int.MaxValue;
int indexOfLayerWithSmallestNumberOfZeros = 0;
for(int i = 0; i < numberOfLayers; i++)
{
int numberOfZerosInCurrentLayer = CountNumberOfGivenNumberInLayer(layers[i], 0);
if (numberOfZerosInCurrentLayer < smallestNumOfZeros)
{
smallestNumOfZeros = numberOfZerosInCurrentLayer;
indexOfLayerWithSmallestNumberOfZeros = i;
}
}
int result = CountNumberOfGivenNumberInLayer(layers[indexOfLayerWithSmallestNumberOfZeros], 1) * CountNumberOfGivenNumberInLayer(layers[indexOfLayerWithSmallestNumberOfZeros], 2);
Console.WriteLine($"Part 1 solution: {result}");
// Part 2
int[,] renderedImage = new int[height,width];
for (int currentHeight = 0; currentHeight < height; currentHeight++)
{
for (int currentWidth = 0; currentWidth < width; currentWidth++)
{
int layerNumber = 0;
int currentPixel = 2;
do
{
currentPixel = layers[layerNumber][currentHeight][currentWidth];
layerNumber++;
}
while (currentPixel == 2);
renderedImage[currentHeight,currentWidth] = currentPixel;
}
}
// Printing password
for (int currentHeight = 0; currentHeight < height; currentHeight++)
{
for (int currentWidth = 0; currentWidth < width; currentWidth++)
{
if(renderedImage[currentHeight,currentWidth] == 0)
Console.Write(" ");
else
Console.Write("X");
}
Console.WriteLine();
}
}
private static int CountNumberOfGivenNumberInLayer(int[][] layer, int givenNumber) => layer.SelectMany(x => x).Count(x => x == givenNumber);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Day10
{
class Program
{
static void Main(string[] args)
{
var asteroids = new List<Asteroid>();
var lines = File.ReadAllLines("input.txt");
for (int i = 0; i < lines.Length; i++)
{
for (int j = 0; j < lines[i].Length; j++)
{
if (lines[i][j].Equals('#'))
asteroids.Add(new Asteroid(j,i));
}
}
// Part 1
int maxSeenAsteroids = 0;
Asteroid selectedAsteroid = null;
foreach (var asteroid in asteroids)
{
var seenAsteroids = GetSeenAsteroids(asteroid, asteroids);
if (seenAsteroids.Count > maxSeenAsteroids)
{
maxSeenAsteroids = seenAsteroids.Count;
selectedAsteroid = asteroid;
}
}
Console.WriteLine($"Solution part 1: {maxSeenAsteroids}");
// Part 2
int countOfDestroyedAsteroids = 0;
Queue<Asteroid> asteroidsToDestroy = null;
while (true)
{
var currentlySeenAsteroid = GetSeenAsteroids(selectedAsteroid, asteroids);
var rightSide = currentlySeenAsteroid.Where(x => x.Key.side == true).OrderBy(x => x.Key.sinus).Select(x => x.Value);
var leftSide = currentlySeenAsteroid.Where(x => x.Key.side == false).OrderByDescending(x => x.Key.sinus).Select(x => x.Value);
asteroidsToDestroy = new Queue<Asteroid>(rightSide.Concat(leftSide));
while (asteroidsToDestroy.Count > 0)
{
if (countOfDestroyedAsteroids == 199)
{
var asteroid = asteroidsToDestroy.Dequeue();
Console.WriteLine($"200th destroyed asteroid will be: ({asteroid.X}, {asteroid.Y})");
return;
}
else
{
asteroids.Remove(asteroidsToDestroy.Dequeue());
countOfDestroyedAsteroids++;
}
}
}
}
private static Dictionary<(bool side, double sinus), Asteroid> GetSeenAsteroids(Asteroid asteroid, List<Asteroid> asteroids)
{
var dictionary = new Dictionary<(bool, double), Asteroid>();
foreach (var asteroidToCheck in asteroids)
{
if (asteroidToCheck == asteroid)
continue;
var sideAndSinus = CalculateSideAndSinus(asteroid, asteroidToCheck);
var distance = CalculateDistance(asteroid, asteroidToCheck);
if (dictionary.ContainsKey(sideAndSinus) )
{
if (distance < CalculateDistance(asteroid, dictionary[sideAndSinus]))
{
dictionary.Remove(sideAndSinus);
dictionary.Add(sideAndSinus, asteroidToCheck);
}
else
continue;
}
else
dictionary.Add(sideAndSinus, asteroidToCheck);
}
return dictionary;
(bool, double) CalculateSideAndSinus(Asteroid asteroid, Asteroid asteroidToCheck)
{
bool side = asteroid.X <= asteroidToCheck.X;
double sinus = CalculateSinus(asteroid, asteroidToCheck);
return (side, RoundSinusToFivePlaces(sinus));
}
double CalculateSinus(Asteroid asteroid, Asteroid asteroidToCheck) => (asteroidToCheck.Y - asteroid.Y) / CalculateDistance(asteroid, asteroidToCheck);
double CalculateDistance(Asteroid asteroid, Asteroid asteroidToCheck) => Math.Sqrt( Math.Pow(asteroid.X - asteroidToCheck.X,2) + Math.Pow(asteroid.Y - asteroidToCheck.Y,2));
double RoundSinusToFivePlaces(double exactSinus) => (double)((int)(exactSinus * 100000)) / 100000;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Threading;
using System.Threading.Tasks;
namespace Day13
{
class Program
{
static async Task Main(string[] args)
{
var opcodes = File.ReadAllText("input.txt").Split(',').Select(x => BigInteger.Parse(x)).ToList();
for (int i = 0; i < 1000; i++)
opcodes.Add(0);
var inputQueue = new Queue<int>();
var outputQueue = new Queue<int>();
var opcodesCalculator = new OpcodesCalculator(opcodes);
await opcodesCalculator.CalculateOpcodes(inputQueue, outputQueue);
int boardSize = 1000;
var board = new int[boardSize][];
for (int i = 0; i < boardSize; i++)
board[i] = new int[boardSize];
// Part 1
while (outputQueue.Count > 0)
{
int x = outputQueue.Dequeue();
int y = outputQueue.Dequeue();
int tileId = outputQueue.Dequeue();
board[y][x] = tileId;
}
int numberOfBlockTiles = board.SelectMany(x => x).Where(x => x == 2).Count();
Console.WriteLine($"First part solution: {numberOfBlockTiles}");
// Part 2
boardSize = 40;
var tileBoard = new Tile[boardSize][];
for (int i = 0; i < boardSize; i++)
{
tileBoard[i] = new Tile[boardSize];
for (int j = 0; j < boardSize; j++)
{
tileBoard[i][j] = new Tile(i, j, 0);
}
}
opcodes[0] = 2;
bool waitedForOutput = false;
bool gameInitialized = false;
opcodesCalculator = new OpcodesCalculator(opcodes);
long score = 0;
bool firstChekckForEndScore = true;
var task = Task.Factory.StartNew(() => opcodesCalculator.CalculateOpcodes(inputQueue, outputQueue)).Unwrap();
while (true)
{
if (!waitedForOutput)
{
await Task.Delay(50);
waitedForOutput = true;
}
if (outputQueue.Count < 3)
{
//DrawBoard(tileBoard); // for manual playing
var ball = tileBoard.SelectMany(x => x).First(x => x.Type == 4);
var paddle = tileBoard.SelectMany(x => x).First(x => x.Type == 3);
int move = Math.Sign(ball.X - paddle.X);
//int move = ParseInput(Console.ReadKey()); // for manual playing
inputQueue.Enqueue(move);
waitedForOutput = false;
gameInitialized = true;
continue;
}
int x = outputQueue.Dequeue();
int y = outputQueue.Dequeue();
int tileId = outputQueue.Dequeue();
if (x == -1 && y == 0)
{
score = tileId;
}
else
{
if (tileId == 3 || tileId == 4)
{
var obj = tileBoard.SelectMany(x => x).FirstOrDefault(x => x.Type == tileId);
if (obj != null)
{
obj.Type = 0;
}
}
tileBoard[x][y].Type = tileId;
}
if (gameInitialized && tileBoard.SelectMany(x => x).Where(x => x.Type == 2).Count() == 0)
{
if (!firstChekckForEndScore) // score need some time to propagate
{
Console.WriteLine($"Second part solution: number of points = {score}");
break;
}
firstChekckForEndScore = false;
}
}
}
private static int ParseInput(ConsoleKeyInfo consoleKeyInfo) // for manual playing
{
switch(consoleKeyInfo.Key)
{
case ConsoleKey.LeftArrow:
return -1;
case ConsoleKey.RightArrow:
return 1;
default:
return 0;
}
}
private static void DrawBoard(Tile[][] tileBoard)
{
Console.Clear();
Console.WriteLine("Board state:");
for (int i = 0; i < tileBoard.Length; i++)
{
for (int j = 0; j < tileBoard[i].Length; j++)
{
Console.Write(ChangeIntToChar(tileBoard[j][i].Type));
}
Console.WriteLine();
}
char ChangeIntToChar(int tileId)
{
switch(tileId)
{
case 0:
return ' ';
case 1:
return 'X';
case 2:
return 'B';
case 3:
return '-';
case 4:
return 'O';
}
return ' ';
}
}
}
}
<file_sep>using System.Collections.Generic;
namespace Day11
{
public class Floor
{
private int[,] floor;
private HashSet<(int, int)> paintedPanels = new HashSet<(int, int)>();
int currentX;
int currentY;
Direction currentDirectionOfRobot = Direction.Up;
public Floor(int floorSize)
{
floor = new int[floorSize, floorSize];
currentX = floorSize / 2;
currentY = floorSize / 2;
}
public int GetCurrentColor() => floor[currentX, currentY];
public void Paint(int color)
{
floor[currentX, currentY] = color;
paintedPanels.Add((currentX, currentY));
}
public int GetNumberOfPaintedPanels() => paintedPanels.Count;
public int[,] GetFloor() => floor;
public void Move(int turn)
{
turn = turn == 0 ? -1 : 1;
currentDirectionOfRobot = (Direction)Mod(((int)currentDirectionOfRobot + turn), 4);
switch (currentDirectionOfRobot)
{
case Direction.Up:
currentY++;
break;
case Direction.Right:
currentX++;
break;
case Direction.Down:
currentY--;
break;
case Direction.Left:
currentX--;
break;
}
}
private int Mod(int x, int m) => (x%m + m)%m;
}
public enum Direction
{
Up = 0,
Right = 1,
Down = 2,
Left = 3
}
}<file_sep>using System.Collections.Generic;
namespace Day6
{
internal class OrbitingObject
{
public string Name { get; set; }
public OrbitingObject DirectOrbit { get; set; }
public HashSet<OrbitingObject> AllNeighbours { get; set; } = new HashSet<OrbitingObject>();
public OrbitingObject PreviousNode { get; set; }
public OrbitingObject(string name, OrbitingObject directOrbit)
{
Name = name;
DirectOrbit = directOrbit;
}
}
}<file_sep>using System;
using System.Collections.Generic;
namespace Day3
{
internal class Field
{
private int[,] internalField;
public List<(int, int)> Crosses { get; } = new List<(int, int)>();
public Field(int xSize, int ySize)
{
internalField = new int[xSize, ySize];
}
internal (int x, int y) Apply(Command command, int startingX, int startingY)
{
switch(command.Direction)
{
case DirectionEnum.Left:
MarkAsVisited(command, startingX, startingY, -1, 0);
return (startingX - command.Number, startingY);
case DirectionEnum.Up:
MarkAsVisited(command, startingX, startingY, 0, 1);
return (startingX , startingY + command.Number);
case DirectionEnum.Right:
MarkAsVisited(command, startingX, startingY, 1, 0);
return (startingX + command.Number, startingY);
case DirectionEnum.Down:
MarkAsVisited(command, startingX, startingY, 0, -1);
return (startingX, startingY - command.Number);
}
throw new Exception("Unknown direction");
}
private void MarkAsVisited(Command command, int startingX, int startingY, int moveX, int moveY)
{
for(int i = 0; i < command.Number; i++)
{
internalField[startingX + (i * moveX), startingY + (i * moveY)] += command.NumberOfPath + 1;
if (internalField[startingX + (i * moveX), startingY + (i * moveY)] == 3)
Crosses.Add((startingX + (i * moveX), startingY + (i * moveY)));
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Threading;
using System.Threading.Tasks;
namespace Day11
{
class Program
{
static async Task Main(string[] args)
{
var opcodes = File.ReadAllText("input.txt").Split(',').Select(x => BigInteger.Parse(x)).ToList();
for (int i = 0; i < 1000; i++)
opcodes.Add(0);
var inputQueue = new Queue<int>();
var outputQueue = new Queue<int>();
var floor = new Floor(200);
var opcodesCalculator = new OpcodesCalculator(opcodes);
var task = Task.Factory.StartNew(() => opcodesCalculator.CalculateOpcodes(inputQueue, outputQueue)).Unwrap();
//Part 1
while (!task.IsCompleted)
{
inputQueue.Enqueue(floor.GetCurrentColor());
while (outputQueue.Count != 2)
{
if (task.IsCompleted)
break;
else
await Task.Delay(5);
}
if (task.IsCompleted)
break;
floor.Paint(outputQueue.Dequeue());
floor.Move(outputQueue.Dequeue());
}
Console.WriteLine($"Part 1: Number of painted {floor.GetNumberOfPaintedPanels()}");
//Part 2
inputQueue.Clear();
outputQueue.Clear();
floor = new Floor(100);
opcodesCalculator = new OpcodesCalculator(opcodes);
task = Task.Factory.StartNew(() => opcodesCalculator.CalculateOpcodes(inputQueue, outputQueue)).Unwrap();
floor.Paint(1);
while (!task.IsCompleted)
{
inputQueue.Enqueue(floor.GetCurrentColor());
while (outputQueue.Count != 2)
{
if (task.IsCompleted)
break;
else
await Task.Delay(5);
}
if (task.IsCompleted)
break;
floor.Paint(outputQueue.Dequeue());
floor.Move(outputQueue.Dequeue());
}
Console.WriteLine("Part 2:");
var floorAfterPainting = floor.GetFloor();
for(int i = 0; i < floorAfterPainting.GetLength(0); i++)
{
for (int j = 0; j < floorAfterPainting.GetLength(1); j++)
{
Console.Write(floorAfterPainting[i, j] == 0 ? " " : "X");
}
Console.WriteLine();
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace Day12
{
class Moon
{
public Vector3D Position { get; private set; }
public Vector3D BasePosition { get; }
public Vector3D Velocity { get; private set; } = new Vector3D();
public Moon(int x, int y, int z)
{
Position = new Vector3D { X = x, Y = y, Z = z };
BasePosition = new Vector3D { X = x, Y = y, Z = z };
}
public void ResetToBasePosition()
{
Position = BasePosition;
Velocity = new Vector3D();
}
public void CalculateGravity(List<Moon> moons)
{
foreach(var moon in moons)
{
if (this == moon)
continue;
var velocityGain = CalculateVelocityGain(this, moon);
Velocity = new Vector3D(Velocity.X + velocityGain.x, Velocity.Y + velocityGain.y, Velocity.Z + velocityGain.z);
}
(int x, int y, int z) CalculateVelocityGain(Moon moon1, Moon moon2)
{
int x, y, z;
x = Math.Sign(moon2.Position.X - moon1.Position.X);
y = Math.Sign(moon2.Position.Y - moon1.Position.Y);
z = Math.Sign(moon2.Position.Z - moon1.Position.Z);
return (x, y, z);
}
}
public void ApplyVelocity()
{
Position = new Vector3D(Position.X + Velocity.X, Position.Y + Velocity.Y, Position.Z + Velocity.Z);
}
public int CalculateTotalEnergy()
{
return CalculatePotentialEnergy() * CalculateKineticEnergy();
}
private int CalculatePotentialEnergy() => Math.Abs(Position.X) + Math.Abs(Position.Y) + Math.Abs(Position.Z);
private int CalculateKineticEnergy() => Math.Abs(Velocity.X) + Math.Abs(Velocity.Y) + Math.Abs(Velocity.Z);
}
}
<file_sep>using System;
namespace Day3
{
internal class Command
{
public DirectionEnum Direction { get; set; }
public int Number { get; set; }
public int NumberOfPath { get; set; }
public Command(string commandAsString, int numberOfPath)
{
switch(commandAsString[0])
{
case 'L':
Direction = DirectionEnum.Left;
break;
case 'U':
Direction = DirectionEnum.Up;
break;
case 'R':
Direction = DirectionEnum.Right;
break;
case 'D':
Direction = DirectionEnum.Down;
break;
}
Number = int.Parse(commandAsString.Substring(1));
NumberOfPath = numberOfPath;
}
}
internal enum DirectionEnum
{
Left,
Up,
Right,
Down
}
}<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Threading.Tasks;
namespace Day9
{
class Program
{
static async Task Main(string[] args)
{
var originalOpcodes = File.ReadAllText("input.txt").Split(',').Select(x => BigInteger.Parse(x)).ToList();
for (int i = 0; i < 10000; i++)
originalOpcodes.Add(0);
// Part 1
var inputQueue = new Queue<int>();
inputQueue.Enqueue(1);
var opCalc = new OpcodesCalculator(originalOpcodes);
Console.WriteLine("Part 1 solution: ");
await opCalc.CalculateOpcodes(inputQueue);
// Part 2
inputQueue = new Queue<int>();
inputQueue.Enqueue(2);
Console.WriteLine("Part 2 solution: ");
opCalc = new OpcodesCalculator(originalOpcodes);
await opCalc.CalculateOpcodes(inputQueue);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Day6
{
class Program
{
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var stringOrbits = sr.ReadToEnd().Split('\n').ToList();
var dictionaryOfObjects = new Dictionary<string, OrbitingObject>();
dictionaryOfObjects.Add("COM",new OrbitingObject("COM",null));
foreach (var stringOrbit in stringOrbits)
{
var firstObjectName = stringOrbit.Substring(0,3);
var secondObjectName = stringOrbit.Substring(4,3);
// first object
if (!dictionaryOfObjects.ContainsKey(firstObjectName))
{
dictionaryOfObjects.Add(firstObjectName, new OrbitingObject(firstObjectName, null));
}
// second object
if (!dictionaryOfObjects.ContainsKey(secondObjectName))
{
dictionaryOfObjects.Add(secondObjectName, new OrbitingObject(secondObjectName, dictionaryOfObjects[firstObjectName]));
}
else
{
dictionaryOfObjects[secondObjectName].DirectOrbit = dictionaryOfObjects[firstObjectName];
}
// needed for 2nd part
dictionaryOfObjects[firstObjectName].AllNeighbours.Add(dictionaryOfObjects[secondObjectName]);
dictionaryOfObjects[secondObjectName].AllNeighbours.Add(dictionaryOfObjects[firstObjectName]);
}
// Part 1
int sumOfOrbits = 0;
foreach (var orbitingObject in dictionaryOfObjects.Values)
{
sumOfOrbits += CountOrbits(orbitingObject);
}
Console.WriteLine($"Number of orbits = {sumOfOrbits}");
// Part 2 BFS search
var startObject = dictionaryOfObjects["YOU"];
var endObject = dictionaryOfObjects["SAN"];
var visitedNodes = new List<OrbitingObject>();
visitedNodes.Add(startObject);
var queue = new Queue<OrbitingObject>();
queue.Enqueue(startObject);
while (queue.Count != 0)
{
var currentNode = queue.Dequeue();
if (currentNode == endObject)
break;
foreach (var neighbour in currentNode.AllNeighbours)
{
if (visitedNodes.Contains(neighbour))
continue;
neighbour.PreviousNode = currentNode;
queue.Enqueue(neighbour);
visitedNodes.Add(neighbour);
}
}
int sumOfHops = 0;
var tempNode = endObject;
while (tempNode.PreviousNode != startObject)
{
sumOfHops++;
tempNode = tempNode.PreviousNode;
}
Console.WriteLine($"Number of hops = {sumOfHops - 1}");
}
}
private static int CountOrbits(OrbitingObject orbitingObject)
{
if (orbitingObject.DirectOrbit == null)
return 0;
else
return 1 + CountOrbits(orbitingObject.DirectOrbit);
}
}
}
<file_sep>using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Day7
{
class Program
{
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var originalOpcodes = sr.ReadToEnd().Split(',').Select(x => int.Parse(x)).ToList();
// Part 1
var permutations = GenerateAllPermutations(Enumerable.Range(0, 5), 5);
int part1MaxSignal = GetMaxThrusterSignal(originalOpcodes, permutations);
Console.WriteLine($"Part 1 highest signal = {part1MaxSignal}");
// Part 2
permutations = GenerateAllPermutations(Enumerable.Range(5, 5), 5);
int part2MaxSignal = GetMaxThrusterSignal(originalOpcodes, permutations);
Console.WriteLine($"Part 2 highest signal = {part2MaxSignal}");
}
}
private static List<List<int>> GenerateAllPermutations(IEnumerable<int> list, int length)
{
if (length == 1)
return list.Select(x => new List<int> { x }).ToList();
return GenerateAllPermutations(list, length - 1)
.SelectMany(x => list.Where(y => !x.Contains(y)),
(t1, t2) => t1.Concat(new List<int> { t2 }).ToList()).ToList();
}
private static int GetMaxThrusterSignal(List<int> originalOpcodes, List<List<int>> permutations)
{
var concurrentBag = new ConcurrentBag<int>();
Parallel.For(0, permutations.Count, number =>
{
var permutation = permutations[number];
var taskParameterQueues = new List<Queue<int>>();
for (int i = 0; i < permutation.Count; i++)
{
taskParameterQueues.Add(new Queue<int>(new int[] { permutation[i] }));
}
taskParameterQueues[0].Enqueue(0);
var tasks = new List<Task>();
tasks.Add(Task.Factory.StartNew(() => CalculateOpcodes(originalOpcodes, taskParameterQueues[0], taskParameterQueues[1])).Unwrap());
tasks.Add(Task.Factory.StartNew(() => CalculateOpcodes(originalOpcodes, taskParameterQueues[1], taskParameterQueues[2])).Unwrap());
tasks.Add(Task.Factory.StartNew(() => CalculateOpcodes(originalOpcodes, taskParameterQueues[2], taskParameterQueues[3])).Unwrap());
tasks.Add(Task.Factory.StartNew(() => CalculateOpcodes(originalOpcodes, taskParameterQueues[3], taskParameterQueues[4])).Unwrap());
tasks.Add(Task.Factory.StartNew(() => CalculateOpcodes(originalOpcodes, taskParameterQueues[4], taskParameterQueues[0])).Unwrap());
Task.WaitAll(tasks.ToArray());
concurrentBag.Add(taskParameterQueues[0].Dequeue());
});
return concurrentBag.Max();
}
static async Task CalculateOpcodes(List<int> originalOpCodes, Queue<int> input, Queue<int> output)
{
var opCodes = new List<int>(originalOpCodes);
int pointerPosition = 0;
while(true)
{
var instruction = CalculateInstruction(opCodes[pointerPosition]).ToList();
pointerPosition = await ExecuteInstruction(instruction, pointerPosition, opCodes, input, output);
if (pointerPosition == -1)
return;
}
}
private static async Task<int> ExecuteInstruction(List<int> instruction, int pointerPosition, List<int> opCodes, Queue<int> input, Queue<int> output)
{
switch(instruction[4])
{
case 1:
opCodes[opCodes[pointerPosition + 3]] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) + CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 2:
opCodes[opCodes[pointerPosition + 3]] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) * CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 3:
while (input.Count == 0)
{
await Task.Delay(5);
}
opCodes[opCodes[pointerPosition + 1]] = input.Dequeue();
return pointerPosition + 2;
case 4:
output.Enqueue(CalculateVariable(pointerPosition + 1, instruction[2], opCodes));
return pointerPosition + 2;
case 5:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) != 0)
return CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 6:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == 0)
return CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 7:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) < CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[opCodes[pointerPosition + 3]] = 1;
else
opCodes[opCodes[pointerPosition + 3]] = 0;
return pointerPosition + 4;
case 8:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[opCodes[pointerPosition + 3]] = 1;
else
opCodes[opCodes[pointerPosition + 3]] = 0;
return pointerPosition + 4;
case 9:
return -1;
}
return 0;
int CalculateVariable(int position, int mode, List<int> opCodes) => mode == 1 ? opCodes[position] : opCodes[opCodes[position]];
}
private static IEnumerable<int> CalculateInstruction(int wholeOpcode)
{
var stringOpcode = wholeOpcode.ToString();
for (int i = 4; i >= 0; i--)
{
if (stringOpcode.Length - 1 < i)
yield return 0;
else
yield return int.Parse(stringOpcode[stringOpcode.Length - (i + 1)].ToString());
}
}
}
}
<file_sep>namespace Day3
{
internal class Cell
{
public bool [] VisitedByPath { get; set; } = new bool[2];
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace Day13
{
class Tile
{
public int X { get; set; }
public int Y { get; set; }
public int Type { get; set; }
public Tile(int x, int y, int type)
{
X = x;
Y = y;
Type = type;
}
}
}
<file_sep>namespace Day10
{
internal class Asteroid
{
public int X { get; }
public int Y { get; }
public int NumberOfSeenAsteroids { get; set; }
public Asteroid(int x, int y)
{
X = x;
Y = y;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Threading.Tasks;
namespace Day11
{
public class OpcodesCalculator
{
private int relativeBase = 0;
private List<BigInteger> opCodes;
public OpcodesCalculator(List<BigInteger> originalOpCodes)
{
this.opCodes = new List<BigInteger>(originalOpCodes);
}
public async Task CalculateOpcodes(Queue<int> input, Queue<int> output = null)
{
int pointerPosition = 0;
while(true)
{
var instruction = CalculateInstruction(opCodes[pointerPosition]).ToList();
pointerPosition = await ExecuteInstruction(instruction, pointerPosition, opCodes, input, output);
if (pointerPosition == -1)
return;
}
}
private async Task<int> ExecuteInstruction(List<int> instruction, int pointerPosition, List<BigInteger> opCodes, Queue<int> input, Queue<int> output)
{
switch(instruction[4])
{
case 1:
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) + CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 2:
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = CalculateVariable(pointerPosition + 1, instruction[2], opCodes) * CalculateVariable(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 4;
case 3:
while (input.Count == 0)
{
await Task.Delay(5);
}
opCodes[CalculatePosition(pointerPosition + 1, instruction[2], opCodes)] = input.Dequeue();
return pointerPosition + 2;
case 4:
if (output != null)
output.Enqueue(CalculateVariableWithCastToInt(pointerPosition + 1, instruction[2], opCodes));
else
Console.WriteLine(CalculateVariable(pointerPosition + 1, instruction[2], opCodes));
return pointerPosition + 2;
case 5:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) != 0)
return CalculateVariableWithCastToInt(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 6:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == 0)
return CalculateVariableWithCastToInt(pointerPosition + 2, instruction[1], opCodes);
return pointerPosition + 3;
case 7:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) < CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = 1;
else
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = 0;
return pointerPosition + 4;
case 8:
if (CalculateVariable(pointerPosition + 1, instruction[2], opCodes) == CalculateVariable(pointerPosition + 2, instruction[1], opCodes))
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = 1;
else
opCodes[CalculatePosition(pointerPosition + 3, instruction[0], opCodes)] = 0;
return pointerPosition + 4;
case 9:
if (instruction[3] == 9)
return -1;
else
{
relativeBase += CalculateVariableWithCastToInt(pointerPosition + 1, instruction[2], opCodes);
return pointerPosition + 2;
}
}
return 0;
// local functions
BigInteger CalculateVariable(int position, int mode, List<BigInteger> opCodes) => opCodes[CalculatePosition(position, mode, opCodes)];
int CalculatePosition(int position, int mode, List<BigInteger> opCodes)
{
switch(mode)
{
case 0:
return (int)opCodes[position];
case 1:
return position;
case 2:
return relativeBase + (int)opCodes[position];
}
return -1;
}
int CalculateVariableWithCastToInt(int position, int mode, List<BigInteger> opCodes) => (int)CalculateVariable(position, mode, opCodes);
}
private IEnumerable<int> CalculateInstruction(BigInteger wholeOpcode)
{
var stringOpcode = wholeOpcode.ToString();
for (int i = 4; i >= 0; i--)
{
if (stringOpcode.Length - 1 < i)
yield return 0;
else
yield return int.Parse(stringOpcode[stringOpcode.Length - (i + 1)].ToString());
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Text.RegularExpressions;
namespace Day12
{
class Program
{
static void Main(string[] args)
{
var moons = new List<Moon>();
var lines = File.ReadAllLines("input.txt");
var regex = new Regex(@"^<x=(?<x>-?\d+), y=(?<y>-?\d+), z=(?<z>-?\d+)>");
foreach (var line in lines)
{
var match = regex.Match(line);
moons.Add(new Moon(int.Parse(match.Groups["x"].Value), int.Parse(match.Groups["y"].Value), int.Parse(match.Groups["z"].Value)));
}
for (int i = 0; i < 1000; i++)
{
foreach (var moon in moons)
moon.CalculateGravity(moons);
foreach (var moon in moons)
moon.ApplyVelocity();
}
int totalEnergy = 0;
foreach (var moon in moons)
totalEnergy += moon.CalculateTotalEnergy();
Console.WriteLine($"Total energy in the system after 1000 steps: {totalEnergy}");
// Part 2
foreach (var moon in moons)
moon.ResetToBasePosition();
long xPeriod = 0, yPeriod = 0, zPeriod = 0;
int numberOfSteps = 0;
while (true)
{
foreach (var moon in moons)
moon.CalculateGravity(moons);
foreach (var moon in moons)
moon.ApplyVelocity();
numberOfSteps++;
if (xPeriod == 0 && moons.All(x => x.Position.X == x.BasePosition.X) && moons.All(x => x.Velocity.X == 0))
xPeriod = numberOfSteps;
if (yPeriod == 0 && moons.All(x => x.Position.Y == x.BasePosition.Y) && moons.All(x => x.Velocity.Y == 0))
yPeriod = numberOfSteps;
if (zPeriod == 0 && moons.All(x => x.Position.Z == x.BasePosition.Z) && moons.All(x => x.Velocity.Z == 0))
zPeriod = numberOfSteps;
if (xPeriod != 0 && yPeriod != 0 && zPeriod != 0)
{
Console.WriteLine("Cycle period:");
Console.WriteLine(MathNet.Numerics.Euclid.LeastCommonMultiple(new[] { xPeriod, yPeriod, zPeriod }));
break;
}
}
}
static bool SaveCurrentStates(HashSet<((Vector3D, Vector3D), (Vector3D, Vector3D), (Vector3D, Vector3D), (Vector3D, Vector3D))> set, List<Moon> moons)
{
return set.Add(((moons[0].Position, moons[0].Velocity), (moons[1].Position, moons[1].Velocity), (moons[2].Position, moons[2].Velocity), (moons[3].Position, moons[3].Velocity)));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Day3
{
class Program
{
const int fieldSize = 50000;
const int startingCoordinate = fieldSize / 2;
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var lines = sr.ReadToEnd().Split('\n');
var firstPathCommands = lines[0].Split(',')
.Select(x => new Command(x, 0)).ToList();
var secondPathCommands = lines[1].Split(',')
.Select(x => new Command(x, 1)).ToList();
// Part 1
var field = new Field(fieldSize, fieldSize);
int currentX = startingCoordinate;
int currentY = startingCoordinate;
foreach(var command in firstPathCommands)
{
(currentX, currentY) = field.Apply(command, currentX, currentY);
}
currentX = startingCoordinate;
currentY = startingCoordinate;
foreach(var command in secondPathCommands)
{
(currentX, currentY) = field.Apply(command, currentX, currentY);
}
var crosses = field.Crosses;
crosses.Remove((startingCoordinate,startingCoordinate));
int minDistance = int.MaxValue;
foreach (var cross in crosses)
{
int distance = CalculateManhattanDistance((startingCoordinate, startingCoordinate), cross);
if (distance < minDistance)
minDistance = distance;
}
Console.WriteLine($"Distance to closest intersection = {minDistance}");
// Part 2
var firstPathSteps = CalculateNumberOfStepsToAllCrosses(firstPathCommands, crosses);
var secondPathSteps = CalculateNumberOfStepsToAllCrosses(secondPathCommands, crosses);
var zippedList = firstPathSteps.Zip(secondPathSteps, (x, y) => (x + y));
Console.WriteLine($"Number of steps = {zippedList.Min()}");
}
}
private static int CalculateManhattanDistance((int x, int y) startingPoint, (int x, int y) cross)
{
return Math.Abs(startingPoint.x - cross.x) + Math.Abs(startingPoint.y - cross.y);
}
private static List<int> CalculateNumberOfStepsToAllCrosses(List<Command> commands, List<(int,int)> crosses)
{
var numbersOfStepsInPath = new List<int>();
var field = new Field(fieldSize, fieldSize);
int crossNumber = 0;
foreach (var cross in crosses)
{
numbersOfStepsInPath.Add(0);
var startingPos = (startingCoordinate, startingCoordinate);
foreach(var command in commands)
{
var currentPos = field.Apply(command, startingPos.Item1, startingPos.Item2);
numbersOfStepsInPath[crossNumber] += command.Number;
if (IsBetween(startingPos, currentPos, cross))
{
numbersOfStepsInPath[crossNumber] -= Math.Abs((currentPos.x - cross.Item1) + (currentPos.y - cross.Item2));
break;
}
startingPos = currentPos;
}
crossNumber++;
}
return numbersOfStepsInPath;
static bool IsBetween((int x, int y) startingPos, (int x, int y) currentPos, (int x, int y) cross)
{
if (startingPos.y == cross.y)
{
if (Math.Abs(currentPos.x - startingPos.x) > Math.Abs(currentPos.x - cross.x))
return true;
}
else if (startingPos.x == cross.x)
{
if (Math.Abs(currentPos.y - startingPos.y) > Math.Abs(currentPos.y - cross.y))
return true;
}
return false;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace Day2
{
class Program
{
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var originalOpcodes = sr.ReadToEnd().Split(',').Select(x => int.Parse(x)).ToList();
// Part 1
int result = CalculateOpcodes(originalOpcodes, 12, 2);
Console.WriteLine($"Solution for part 1: {result}");
// Part 2
int numberToSearch = 19690720;
for (int noun = 0; noun < 100; noun++)
{
for (int verb = 0; verb < 100; verb++)
{
if (CalculateOpcodes(originalOpcodes, noun, verb) == numberToSearch)
{
Console.WriteLine($"Solution for part 2: {100 * noun + verb}");
break;
}
}
}
}
}
static int CalculateOpcodes(List<int> originalOpcodes, int noun, int verb)
{
var opCodes = new List<int>(originalOpcodes);
opCodes[1] = noun;
opCodes[2] = verb;
int currentOpcodePosition = 0;
while(true)
{
int currentOpcode = opCodes[currentOpcodePosition];
switch(currentOpcode)
{
case 1:
opCodes[opCodes[currentOpcodePosition + 3]] = opCodes[opCodes[currentOpcodePosition + 1]] + opCodes[opCodes[currentOpcodePosition + 2]];
currentOpcodePosition += 4;
break;
case 2:
opCodes[opCodes[currentOpcodePosition + 3]] = opCodes[opCodes[currentOpcodePosition + 1]] * opCodes[opCodes[currentOpcodePosition + 2]];
currentOpcodePosition += 4;
break;
case 99:
return opCodes[0];
}
}
}
}
}
<file_sep>using System;
using System.IO;
using System.Linq;
namespace Day1
{
class Program
{
static void Main(string[] args)
{
using (var sr = new StreamReader("input.txt"))
{
var moduleMasses = sr.ReadToEnd().Split('\n').Select(x => int.Parse(x)).ToList();
// Part 1
int fuelSum = 0;
foreach (var mass in moduleMasses)
{
fuelSum += CalculateFuelForModule(mass);
}
Console.WriteLine($"Solution for part 1: {fuelSum}");
// Part 2
fuelSum = 0;
foreach (var mass in moduleMasses)
{
var moduleFuel = CalculateFuelForModule(mass);
while (moduleFuel >= 0)
{
fuelSum += moduleFuel;
moduleFuel = CalculateFuelForModule(moduleFuel);
}
}
Console.WriteLine($"Solution for part 2: {fuelSum}");
}
}
public static int CalculateFuelForModule(int moduleMass) => moduleMass / 3 - 2;
}
}
| 042a950235e85bcc11d2048c80b7563eb17a0a3b | [
"C#"
] | 22 | C# | kswider/AdventOfCode2019 | ccaf03309f249a1b4e0101f72fde30deaf3067d4 | a325a2e69bbc92cb67b6cad33395b4a172973119 | |
refs/heads/main | <file_sep>Installation instructions
1. Download and install the Python 3.6 version of Anaconda.
https://www.anaconda.com/download/
2. Create the Anaconda environment and activate it.
```
$ conda env create -f environment.yml
$ source activate collab_design
```
3. Download the trained lamem model.
http://memorability.csail.mit.edu/memnet.tar.gz
4. Unzip and untar the model in the current directory.
```
$ gunzip memnet.tar.gz
$ tar xvf memnet.tar
```
You should have a directory called memnet along side this file.
5. Launch ipython and run demo code
```
$ ipython
In[1]: %run prototype.py
In[2]: demo('hats')
```
Look in the data directory for the results.
<file_sep>from __future__ import print_function
import os
import datetime
import re
import time
from PIL import Image
import numpy as np
import requests
import caffe
import haiku_client
OUTPUT_DIR = 'data'
# locate important imagenet data file installed with caffe
IMAGENET_DATA_FILE = os.path.join(
os.path.split(caffe.__file__)[0], 'imagenet', 'ilsvrc_2012_mean.npy')
def get_img_urls(tag, limit):
url = 'https://www.instagram.com/explore/tags/{0}/'.format(tag)
response = requests.get(url)
# poorly written regex for parsing out the urls
image_urls = re.findall(r"https://scontent[^x]*?jpg", response.text)
image_urls = np.random.choice(image_urls, limit, replace=False)
return image_urls
def download_images(image_urls):
now = datetime.datetime.now()
output_dir = os.path.join(OUTPUT_DIR, now.strftime('%Y%m%d_%H%M%S'))
os.makedirs(output_dir)
img_data = list(enumerate(image_urls))
for num, url in img_data:
print('downloading {0}'.format(url))
response = requests.get(url)
with open(os.path.join(output_dir, '{}.jpg'.format(num)), 'wb') as f:
f.write(response.content)
time.sleep(1 + np.random.normal(0, 1)**2)
return output_dir, img_data
def evaluate_images(img_dir, img_data):
model_def = 'memnet/deploy.prototxt'
model_weights = 'memnet/memnet.caffemodel'
net = caffe.Net(model_def, model_weights, caffe.TEST)
mu = np.load(IMAGENET_DATA_FILE).mean(1).mean(1)
# create transformer for the input called 'data'
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
transformer.set_transpose('data', (2, 0, 1))
transformer.set_mean('data', mu)
transformer.set_raw_scale('data', 255)
transformer.set_channel_swap('data', (2, 1, 0))
net.blobs['data'].reshape(len(img_data), 3, 227, 227)
for num, url in img_data:
img = caffe.io.load_image(
os.path.join(img_dir, '{0}.jpg'.format(num)))
transformed_img = transformer.preprocess('data', img)
net.blobs['data'].data[num, ...] = transformed_img
nn_values = list(net.forward().values())[0]
result = [(a[0], a[1], b[0]) for a, b in zip(img_data, nn_values)]
return result
def make_html(img_dir, data, best_limit):
data = sorted(data, key=lambda x: x[2], reverse=True)[:best_limit]
body = ''.join(['<div>{1}<img src="{0}.jpg"></div>'.format(i, r)
for i, _, r in data])
html = "<html><head></head><body>{0}</body></html>".format(body)
with open(os.path.join(img_dir, 'out.html'), 'w') as f:
f.write(html)
def make_haiku_polaroid(img_dir, data):
best_image = sorted(data, key=lambda x: x[2], reverse=True)[0]
input_img = os.path.join(img_dir, str(best_image[0]) + '.jpg')
output_img = os.path.join(img_dir, 'haiku.jpg')
print("Making haiku for instagram image.")
print("Most memorable instagram image:", input_img)
print("Haiku image:", output_img)
haiku_client.send_recv_img(input_img, output_img)
def show_top_images(img_dir, data, num=3):
top_images = sorted(data, key=lambda x: x[2], reverse=True)[:num]
for img in top_images:
Image.open(os.path.join(img_dir, f'{img[0]}.jpg')).show()
def demo(tag, limit=10):
image_urls = get_img_urls(tag, limit)
output_dir, img_data = download_images(image_urls)
data = evaluate_images(output_dir, img_data)
show_top_images(output_dir, data)
# make_html(output_dir, data, 5)
# make_haiku_polaroid(output_dir, data)
| df36bb5b5b03be6f8f64e860c2b63e7e0e17288b | [
"Markdown",
"Python"
] | 2 | Markdown | hx2A/itp_collaborative_design_class | 6d406bc9cc3d2f8b97ec59295c309bc5d06f29ca | 3a30a21ae9555a6616eaace76ac29753d9be78cb | |
refs/heads/master | <repo_name>hitjedi/Oil<file_sep>/src/name/babkov/oilproject/Pipe.java
package name.babkov.oilproject;
import java.util.ArrayList;
import java.util.List;
import name.babkov.oilproject.event.PipeEvent;
import name.babkov.oilproject.listener.PipeListener;
public class Pipe {
private final double thresh;
private double volume;
private List<PipeListener> listOfPipeListener;
public Pipe(double thresh) {
this.volume = 0;
this.thresh = thresh;
this.listOfPipeListener = new ArrayList<PipeListener>();
}
public double getVolume() {
return volume;
}
public synchronized void addPipeListener(PipeListener listener){
listOfPipeListener.add(listener);
}
public synchronized void removePipeListener(PipeListener listener){
listOfPipeListener.remove(listener);
}
public synchronized void addOilToPipe(double volume){
if(this.volume != thresh){
if(this.volume+volume>thresh){
this.volume = thresh;
firePipeEvent();
}
else{
this.volume+= volume;
System.out.println("Volume in pipe = " + String.format("%6.2f",this.volume));
}
}
}
private void firePipeEvent(){
PipeEvent event = new PipeEvent(this);
for(PipeListener listener:listOfPipeListener){
listener.pipeFulled(event);
}
}
}
<file_sep>/src/name/babkov/oilproject/listener/InstallationEngineListener.java
package name.babkov.oilproject.listener;
import java.util.EventObject;
public interface InstallationEngineListener {
void failureOccured(EventObject event);
}
<file_sep>/src/name/babkov/oilproject/InstallationEngine.java
package name.babkov.oilproject;
import name.babkov.oilproject.event.InstallationEngineEvent;
import name.babkov.oilproject.listener.InstallationEngineListener;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;
public class InstallationEngine extends Thread{
private final int interval = 500;
private Installation installation;
private Pipe pipe;
private List<InstallationEngineListener> listOfInstallationEngineListener;
private Date failureDate;
public InstallationEngine(Installation installation, Pipe pipe) {
this.installation = installation;
this.pipe = pipe;
this.listOfInstallationEngineListener = new ArrayList<InstallationEngineListener>();
}
public Installation getInstallation() {
return installation;
}
public synchronized void addInstallationEngineListener(InstallationEngineListener listener){
listOfInstallationEngineListener.add(listener);
}
public synchronized void removeInstallationEngineListener(InstallationEngineListener listener){
listOfInstallationEngineListener.remove(listener);
}
@Override
public void run() {
boolean isWorked = true;
Random random = new Random();
while(isWorked && !Thread.interrupted()){
double volume = installation.produceVolume();
System.out.println(
installation.getName() +
" produced " +
String.format("%6.2f",installation.getFullVolume())
+ " of oil");
pipe.addOilToPipe(volume);
try {
Thread.sleep(interval);
} catch (InterruptedException ex) {
break;
}
if(random.nextInt(20)==10){
fireFailure();
isWorked = false;
}
}
}
public boolean isFailure()
{
return failureDate != null;
}
public Date getFailureDate() {
return failureDate;
}
private void fireFailure(){
failureDate = new Date();
InstallationEngineEvent event = new InstallationEngineEvent(this);
for(InstallationEngineListener listener: listOfInstallationEngineListener){
listener.failureOccured(event);
}
}
}
<file_sep>/src/name/babkov/oilproject/Installation.java
package name.babkov.oilproject;
public class Installation {
private double volume;
private double distance;
private String name;
private double fullVolume;
public Installation(double volume, double distance, String name) {
this.volume = volume;
this.distance = distance;
this.name = name;
fullVolume = 0;
}
public double getDistance() {
return distance;
}
public String getName() {
return name;
}
public double getFullVolume() {
return fullVolume;
}
public double produceVolume() {
fullVolume+=volume;
return volume;
}
}
<file_sep>/src/name/babkov/oilproject/OilProject.java
package name.babkov.oilproject;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class OilProject {
private final int numOfInstallation = 5;
private final int numOfWorkInstallation = 3;
private final int volumeInterval = 50;
private final int distanceInterval = 10;
private List<Installation> listOfInstallation;
private List<InstallationEngine> listOfInstallationEngine;
private Dispatcher dispatcher;
private Pipe pipe;
public static void main(String[] args) {
new OilProject().Start();
}
public void Start(){
Random random = new Random();
listOfInstallation = new ArrayList<Installation>(numOfInstallation);
listOfInstallationEngine = new ArrayList<InstallationEngine>(numOfWorkInstallation);
for(int i = 0;i<numOfInstallation;i++){
listOfInstallation.add(new Installation(random.nextDouble()*volumeInterval,
random.nextDouble()*distanceInterval,
"Installation #"+Integer.toString(i)));
}
pipe = new Pipe(500);
dispatcher = new Dispatcher(listOfInstallation, numOfWorkInstallation, pipe);
dispatcher.start();
}
}
| d39ad3a9170b75187c5d6f3f50d88dc2dc641b9e | [
"Java"
] | 5 | Java | hitjedi/Oil | 85580bf248b9a903bef11f9978ef0f3fcf991ff7 | 53cc834396b0724742ded4305e52e359e646cdfa | |
refs/heads/main | <repo_name>75asa/deno-github-contributions-api<file_sep>/color_scheme.ts
import { hexStrToHexNum } from "./utils.ts";
import {
CONTRIBUTION_LEVELS,
ContributionLevelName,
isValidContributionLevelName,
} from "./contributions.ts";
const COLOR_SCHEMES = {
// by [williambelle/github-contribution-color-graph](https://github.com/williambelle/github-contribution-color-graph)
github: ["#eeeeee", "#9be9a8", "#40c463", "#30a14e", "#216e39"],
halloween: ["#eeeeee", "#fdf156", "#ffc722", "#ff9711", "#04001b"],
amber: ["#eeeeee", "#ffecb3", "#ffd54f", "#ffb300", "#ff6f00"],
blue: ["#eeeeee", "#bbdefb", "#64b5f6", "#1e88e5", "#0d47a1"],
bluegrey: ["#eeeeee", "#cfd8dc", "#90a4ae", "#546e7a", "#263238"],
brown: ["#eeeeee", "#d7ccc8", "#a1887f", "#6d4c41", "#3e2723"],
cyan: ["#eeeeee", "#b2ebf2", "#4dd0e1", "#00acc1", "#006064"],
deeporange: ["#eeeeee", "#ffccbc", "#ff8a65", "#f4511e", "#bf360c"],
deeppurple: ["#eeeeee", "#d1c4e9", "#9575cd", "#5e35b1", "#311b92"],
green: ["#eeeeee", "#c8e6c9", "#81c784", "#43a047", "#1b5e20"],
grey: ["#eeeeee", "#e0e0e0", "#9e9e9e", "#616161", "#212121"],
indigo: ["#eeeeee", "#c5cae9", "#7986cb", "#3949ab", "#1a237e"],
lightblue: ["#eeeeee", "#b3e5fc", "#4fc3f7", "#039be5", "#01579b"],
lightgreen: ["#eeeeee", "#dcedc8", "#aed581", "#7cb342", "#33691e"],
lime: ["#eeeeee", "#f0f4c3", "#dce775", "#c0ca33", "#827717"],
orange: ["#eeeeee", "#ffe0b2", "#ffb74d", "#fb8c00", "#e65100"],
pink: ["#eeeeee", "#f8bbd0", "#f06292", "#e91e63", "#880e4f"],
purple: ["#eeeeee", "#e1bee7", "#ba68c8", "#8e24aa", "#4a148c"],
red: ["#eeeeee", "#ffcdd2", "#e57373", "#e53935", "#b71c1c"],
teal: ["#eeeeee", "#b2dfdb", "#4db6ac", "#00897b", "#004d40"],
yellowMd: ["#eeeeee", "#fff9c4", "#fff176", "#ffd835", "#f57f17"],
unicorn: ["#eeeeee", "#6dc5fb", "#f6f68c", "#8affa4", "#f283d1"],
summer: ["#eeeeee", "#eae374", "#f9d62e", "#fc913a", "#ff4e50"],
sunset: ["#eeeeee", "#fed800", "#ff6f01", "#fd2f24", "#811d5e"],
moon: ["#eeeeee", "#6bcdff", "#00a1f3", "#48009a", "#4f2266"],
psychedelic: ["#eeeeee", "#faafe1", "#fb6dcc", "#fa3fbc", "#ff00ab"],
yellow: ["#eeeeee", "#d7d7a2", "#d4d462", "#e0e03f", "#ffff00"],
// by kawarimidoll
gameboy: ["#eeeeee", "#ccdc5f", "#91a633", "#606520", "#2c370b"],
};
type ColorSchemeName = keyof typeof COLOR_SCHEMES;
const isValidColorSchemeName = (name?: string): name is ColorSchemeName =>
!!name && Object.hasOwn(COLOR_SCHEMES, name);
const randomColorScheme = () => {
const values = Object.values(COLOR_SCHEMES);
return values[(Math.random() * values.length) << 0];
};
const getColorScheme = (name = "github") => {
if (name != "random" && !isValidColorSchemeName(name)) {
throw new Error(
`'${name}' is invalid color scheme name! Choose from: ${
Object.keys(COLOR_SCHEMES)
},random`,
);
}
const hexStrColors = name === "random"
? randomColorScheme()
: COLOR_SCHEMES[name];
const hexNumColors = hexStrColors.map((color) => hexStrToHexNum(color));
const getByLevel = (levelName?: ContributionLevelName) =>
hexNumColors[
isValidContributionLevelName(levelName)
? CONTRIBUTION_LEVELS[levelName]
: 0
];
return { hexStrColors, hexNumColors, getByLevel };
};
export { COLOR_SCHEMES, getColorScheme, isValidColorSchemeName };
export type { ColorSchemeName };
<file_sep>/health_check.ts
import { ky } from "./deps.ts";
const prefixUrl = "https://github-contributions-api.deno.dev";
const k = ky.create({ prefixUrl });
try {
console.log("Root");
await k("").text();
console.log("User");
await k("kawarimidoll").text();
console.log("Text");
await k("kawarimidoll.text").text();
console.log("Json");
await k("kawarimidoll.json").text();
console.log("Term");
await k("kawarimidoll.term").text();
console.log("Svg");
await k("kawarimidoll.svg").text();
console.log("Parameters");
await k("kawarimidoll.svg", {
searchParams: {
scheme: "random",
"no-total": true,
bg: "123abc",
},
}).text();
console.log("System all green!");
} catch (error) {
console.error(`${error}`);
Deno.exit(1);
}
<file_sep>/utils_test.ts
import { assertEquals } from "./deps.ts";
import {
confirmHex,
convertToSixChars,
hexStrToHexNum,
hexStrToRgbObj,
} from "./utils.ts";
Deno.test("confirmHex", () => {
assertEquals(confirmHex("#123456"), "#123456");
assertEquals(confirmHex("123456"), "123456");
assertEquals(confirmHex("#12A"), "#12A");
assertEquals(confirmHex("12A"), "12A");
assertEquals(confirmHex("#12345"), "eee");
assertEquals(confirmHex("12345"), "eee");
assertEquals(confirmHex("#12"), "eee");
assertEquals(confirmHex("12"), "eee");
assertEquals(confirmHex("hex"), "eee");
});
Deno.test("convertToSixChars", () => {
assertEquals(convertToSixChars("#123456"), "123456");
assertEquals(convertToSixChars("123456"), "123456");
assertEquals(convertToSixChars("#12A"), "1122AA");
assertEquals(convertToSixChars("12A"), "1122AA");
assertEquals(convertToSixChars("12"), "eeeeee");
assertEquals(convertToSixChars("hex"), "eeeeee");
});
Deno.test("hexStrToRgbObj", () => {
assertEquals(hexStrToRgbObj("#123456"), { r: 18, g: 52, b: 86 });
assertEquals(hexStrToRgbObj("123456"), { r: 18, g: 52, b: 86 });
assertEquals(hexStrToRgbObj("#12A"), { r: 17, g: 34, b: 170 });
assertEquals(hexStrToRgbObj("12A"), { r: 17, g: 34, b: 170 });
assertEquals(hexStrToRgbObj("12"), { r: 238, g: 238, b: 238 });
assertEquals(hexStrToRgbObj("hex"), { r: 238, g: 238, b: 238 });
});
Deno.test("hexStrToHexNum", () => {
assertEquals(hexStrToHexNum("#123456"), 0x123456);
assertEquals(hexStrToHexNum("123456"), 0x123456);
assertEquals(hexStrToHexNum("#12A"), 0x1122aa);
assertEquals(hexStrToHexNum("12A"), 0x1122aa);
});
<file_sep>/mod.ts
// constants and functions
export {
CONTRIBUTION_LEVELS,
getContributions,
isValidContributionLevelName,
} from "./contributions.ts";
export { COLOR_SCHEMES, getColorScheme } from "./color_scheme.ts";
export {
confirmHex,
convertToSixChars,
hexStrToHexNum,
hexStrToRgbObj,
} from "./utils.ts";
// types and interfaces
export type {
ContributionDay,
ContributionLevelName,
} from "./contributions.ts";
export type { ColorSchemeName } from "./color_scheme.ts";
<file_sep>/utils.ts
const defaultPixelColor = "eee";
const confirmHex = (str: string, defaultColor = defaultPixelColor) =>
/^#?([0-9a-f]{3}){1,2}$/i.test(str) ? str : defaultColor;
const convertToSixChars = (str: string, defaultColor = defaultPixelColor) =>
confirmHex(str, defaultColor).replace(
/^#?(.*)$/,
(_, hex) => (hex.length == 3) ? hex.replace(/./g, "$&$&") : hex,
);
const hexStrToRgbObj = (color: string, defaultColor = defaultPixelColor) =>
Object.fromEntries(
(convertToSixChars(color || defaultColor).match(/../g) ?? []).map((
c,
i,
) => ["rgb".charAt(i), parseInt("0x" + c)]),
);
const hexStrToHexNum = (color: string, defaultColor = defaultPixelColor) =>
parseInt("0x" + convertToSixChars(color || defaultColor));
export { confirmHex, convertToSixChars, hexStrToHexNum, hexStrToRgbObj };
<file_sep>/README.md
# deno-github-contributions-api
[](.github/workflows/ci.yml)
[](https://github-contributions-api.deno.dev)
[](https://deno.land)
[](https://velociraptor.run)
[](LICENSE)
Get your GitHub contributions data powered by Deno!

## Usage
### as API
In your terminal:
```
$ curl https://github-contributions-api.deno.dev
# Then follow the messages...
```
Of course, you can access the endpoint from the web browser:
https://github-contributions-api.deno.dev
### as deno module
In your deno script file:
```ts
import { getContributions } from "https://github.com/kawarimidoll/deno-github-contributions-api/raw/main/mod.ts";
const username = "your-github-username";
const token = "<KEY>";
const contributions = await getContributions(username, token);
console.log(contributions.toTerm({ scheme: "random" }));
```
You can see an example in
[main.ts](https://github.com/kawarimidoll/deno-github-contributions-api/blob/main/main.ts)
The personal access token which has a "read:user" scope is required.
Generate your token from this page: https://github.com/settings/tokens/new
## Extra
If you are using [GitHub CLI](https://github.com/cli/cli), you can call this API
from [gh-graph](https://github.com/kawarimidoll/gh-graph).
<!-- ## TODO -->
<!-- - add more tests -->
<!-- - add Month/Day names -->
---
```ts
if (this.repo.isAwesome || this.repo.isHelpful) {
star(this.repo);
}
```
<!-- this part is inspired by https://github.com/bhumijgupta/Deno-news-cli -->
<file_sep>/color_scheme_test.ts
import { assert, assertEquals, assertThrows } from "./deps.ts";
import {
COLOR_SCHEMES,
getColorScheme,
isValidColorSchemeName,
} from "./color_scheme.ts";
Deno.test("getColorScheme", () => {
const correctScheme = ["#eeeeee", "#9be9a8", "#40c463", "#30a14e", "#216e39"];
const scheme1 = getColorScheme("github");
assertEquals(scheme1.hexStrColors, correctScheme);
const scheme2 = getColorScheme();
assertEquals(scheme2.hexStrColors, correctScheme);
assertEquals(scheme2.getByLevel("NONE"), 0xeeeeee);
assertEquals(scheme2.getByLevel("FIRST_QUARTILE"), 0x9be9a8);
assertEquals(scheme2.getByLevel(), 0xeeeeee);
const scheme3 = getColorScheme("random");
assert(Object.values(COLOR_SCHEMES).includes(scheme3.hexStrColors));
assertThrows(() => {
getColorScheme("123456");
});
});
Deno.test("isValidColorSchemeName", () => {
assert(isValidColorSchemeName("github"));
assert(isValidColorSchemeName("unicorn"));
assert(!isValidColorSchemeName(""));
assert(!isValidColorSchemeName("nothub"));
});
<file_sep>/contributions.ts
import { getColorScheme } from "./color_scheme.ts";
import { bgRgb24, h, ky, rgb24, stringWidth } from "./deps.ts";
import { confirmHex, convertToSixChars } from "./utils.ts";
interface ContributionDay {
contributionCount: number;
contributionLevel: ContributionLevelName;
date: string;
color: string;
}
const CONTRIBUTION_LEVELS = {
NONE: 0,
FIRST_QUARTILE: 1,
SECOND_QUARTILE: 2,
THIRD_QUARTILE: 3,
FOURTH_QUARTILE: 4,
};
type ContributionLevelName = keyof typeof CONTRIBUTION_LEVELS;
const isValidContributionLevelName = (
name?: string,
): name is ContributionLevelName =>
!!name && Object.hasOwn(CONTRIBUTION_LEVELS, name);
const getContributionCalendar = async (
userName: string,
token: string,
) => {
if (!userName || !token) {
throw new Error("Missing required arguments");
}
const query = `
query($userName:String!) {
user(login: $userName){
contributionsCollection {
contributionCalendar {
totalContributions
weeks {
contributionDays {
color
contributionCount
contributionLevel
date
}
}
}
}
}
}
`;
const variables = `
{
"userName": "${userName}"
}
`;
const json = { query, variables };
const url = "https://api.github.com/graphql";
const { data } = await ky.post(url, {
headers: { Authorization: `Bearer ${token}` },
json,
}).json();
const contributionCalendar = data?.user?.contributionsCollection
?.contributionCalendar;
if (
!contributionCalendar || !Object.hasOwn(contributionCalendar, "weeks") ||
!Object.hasOwn(contributionCalendar, "totalContributions")
) {
throw new Error("Could not get contributions data");
}
const { weeks, totalContributions }: {
weeks: { contributionDays: ContributionDay[] }[];
totalContributions: number;
} = contributionCalendar;
const contributions = weeks.map((week) => week.contributionDays);
return { contributions, totalContributions };
};
const totalMsg = (totalNum: number): string =>
totalNum + " contributions in the last year\n";
const moreContributionDay = (a: ContributionDay, b: ContributionDay) =>
a.contributionCount > b.contributionCount ? a : b;
const getMaxContributionDay = (
contributions: ContributionDay[][],
): ContributionDay =>
contributions.reduce(
(max, week) =>
moreContributionDay(
max,
week.reduce(
(maxInWeek, current) => moreContributionDay(maxInWeek, current),
week[0],
),
),
contributions[0][0],
);
const contributionsToJson = (
contributions: ContributionDay[][],
totalContributions: number,
{
flat = false,
} = {},
) =>
JSON.stringify({
contributions: flat ? contributions.flat() : contributions,
totalContributions,
});
const contributionsToTerm = (
contributions: ContributionDay[][],
totalContributions: number,
{
noTotal = false,
noLegend = false,
scheme = "github",
pixel = "■",
invert = false,
} = {},
) => {
const pixelWidth = stringWidth(pixel);
if (pixelWidth > 2) {
// width == 2 is ok
// like as "[]", "草", " "
throw new Error(`Pixel '${pixel}' is too long. Max width of pixel is 2.`);
}
const colorScheme = getColorScheme(scheme);
const total = !noTotal ? totalMsg(totalContributions) : "";
// 10 is length of 'Less More'
// 5 is count of colored pixels as legend
const legendOffset = " ".repeat(
(contributions.length - 5) * pixelWidth - 10,
);
const legend = !noLegend
? legendOffset +
"Less " + colorScheme.hexNumColors.map((color) =>
invert ? bgRgb24(pixel, color) : rgb24(pixel, color)
).join("") + " More\n"
: "";
const grass = (day?: ContributionDay) =>
day?.contributionLevel
? invert
? bgRgb24(pixel, colorScheme.getByLevel(day?.contributionLevel))
: rgb24(pixel, colorScheme.getByLevel(day?.contributionLevel))
: "";
return total +
contributions[0].reduce(
(acc, _, i) =>
acc + contributions.map((row) => grass(row[i])).join("") + "\n",
"",
) + legend;
};
const contributionsToText = (
contributions: ContributionDay[][],
totalContributions: number,
maxContributionDay: ContributionDay,
{
noTotal = false,
} = {},
) => {
const total = !noTotal ? totalMsg(totalContributions) : "";
const pad = String(maxContributionDay.contributionCount).length;
return total +
contributions[0].reduce(
(acc, _, i) =>
acc + contributions.map((row) =>
`${row[i]?.contributionCount ?? ""}`.padStart(pad)
).join(",") +
"\n",
"",
);
};
const contributionsToSvg = (
contributions: ContributionDay[][],
totalContributions: number,
{
noTotal = false,
noLegend = false,
scheme = "github",
fontColor = "000",
frame = "none",
bg = "none",
} = {},
): string => {
const svgID = "deno-github-contributions-graph";
const rectSize = 10;
const rectSpan = 3;
const rectRadius = 2;
const rectStep = rectSize + rectSpan;
const weekCounts = 53;
const dayCounts = 7;
const topPadding = noTotal ? 0 : 1;
const bottomPadding = noLegend ? 0 : 1;
const width = rectStep * (weekCounts + 2) - rectSpan;
const height = rectStep * (dayCounts + 2 + topPadding + bottomPadding) -
rectSpan;
const offset = { x: rectStep, y: rectStep * (topPadding + 1) };
// the left top position of the 5 pixels of legend
const legendPos = {
x: width - rectStep * 10 + rectSpan,
y: offset.y + rectStep * dayCounts + rectSpan,
};
const styles = `#${svgID} .pixel {
width: ${rectSize}px;
height: ${rectSize}px;
rx: ${rectRadius}px;
ry: ${rectRadius}px;
stroke: rgba(27,31,35,0.06);
stroke-width: 2px;
}
#${svgID} text {
font-family: monospace;
font-size: ${rectSize * 1.5}px;
fill: #${convertToSixChars(fontColor, "000")};
}
`;
try {
const colorScheme = getColorScheme(scheme);
const rect = (x: number, y: number, {
contributionLevel = "",
date = "",
contributionCount = 0,
}): string =>
contributionLevel == null ? "" : h("rect", {
class: `pixel ${contributionLevel}`,
x: x * rectStep,
y: y * rectStep,
"data-date": date,
"data-count": contributionCount,
}, h("title", `${date}: ${contributionCount}`));
frame = confirmHex(frame, "none");
const stroke = frame === "none" ? frame : "#" + convertToSixChars(frame);
bg = confirmHex(bg, "none");
const fill = bg === "none" ? bg : "#" + convertToSixChars(bg);
return h(
"svg",
{ width, height, xmlns: "http://www.w3.org/2000/svg", id: svgID },
h(
"style",
styles,
...Object.entries(CONTRIBUTION_LEVELS).map(([k, v]) =>
`#${svgID} .${k} { fill: ${colorScheme.hexStrColors[v]}; }`
),
),
h("rect", { width, height, stroke, "stroke-width": "2px", fill }),
noTotal ? "" : h(
"g",
h(
"text",
{ transform: `translate(${offset.x}, ${offset.y - rectSpan * 2})` },
totalMsg(totalContributions),
),
),
h(
"g",
{ transform: `translate(${offset.x}, ${offset.y})` },
contributions.map((column, i) =>
column.map((pixel, j) => rect(i, j, pixel)).join("")
).join(""),
),
noLegend ? "" : h(
"g",
{ transform: `translate(${legendPos.x}, ${legendPos.y})` },
h(
"text",
{
transform: `translate(-${rectStep * 1}, ${rectSize * 1})`,
"text-anchor": "end",
},
"Less",
),
Object.keys(CONTRIBUTION_LEVELS).map((levelName, idx) =>
rect(idx, 0, { contributionLevel: levelName })
).join(""),
h(
"text",
{
transform: `translate(${rectStep * 5 + rectSize}, ${rectSize * 1})`,
},
"More",
),
),
);
} catch (error) {
return h(
"svg",
{ width, height, xmlns: "http://www.w3.org/2000/svg", id: svgID },
h(
"text",
{ y: height },
`${error}`,
),
);
}
};
const getContributions = async (
userName: string,
token: string,
) => {
const { contributions, totalContributions } = await getContributionCalendar(
userName,
token,
);
const maxContributionDay = getMaxContributionDay(contributions);
const toJson = ({ flat = false } = {}) =>
contributionsToJson(contributions, totalContributions, { flat });
const toTerm = (
{
noTotal = false,
noLegend = false,
scheme = "github",
pixel = "■",
invert = false,
} = {},
) =>
contributionsToTerm(contributions, totalContributions, {
noTotal,
noLegend,
scheme,
pixel,
invert,
});
const toText = (
{
noTotal = false,
} = {},
) =>
contributionsToText(contributions, totalContributions, maxContributionDay, {
noTotal,
});
const toSvg = (
{
noTotal = false,
noLegend = false,
scheme = "github",
fontColor = "000",
frame = "none",
bg = "none",
} = {},
) =>
contributionsToSvg(contributions, totalContributions, {
noTotal,
noLegend,
scheme,
fontColor,
frame,
bg,
});
return {
contributions,
totalContributions,
maxContributionDay,
toJson,
toTerm,
toText,
toSvg,
};
};
export {
CONTRIBUTION_LEVELS,
contributionsToJson,
contributionsToSvg,
contributionsToTerm,
contributionsToText,
getContributionCalendar,
getContributions,
getMaxContributionDay,
isValidContributionLevelName,
moreContributionDay,
totalMsg,
};
export type { ContributionDay, ContributionLevelName };
<file_sep>/deps.ts
import ky from "https://cdn.skypack.dev/[email protected]?dts";
import testdouble from "https://esm.sh/[email protected]/dist/testdouble.js";
import stringWidth from "https://cdn.skypack.dev/[email protected]?dts";
import { bgRgb24, rgb24 } from "https://deno.land/[email protected]/fmt/colors.ts";
import {
assert,
assertEquals,
assertRejects,
assertThrows,
} from "https://deno.land/[email protected]/testing/asserts.ts";
import { Env } from "https://deno.land/x/[email protected]/env.js";
const env = new Env();
import { tag as h } from "https://deno.land/x/[email protected]/mod.ts";
export {
assert,
assertEquals,
assertRejects,
assertThrows,
bgRgb24,
env,
h,
ky,
rgb24,
stringWidth,
testdouble,
};
<file_sep>/main.ts
import { getContributions } from "./contributions.ts";
import { env } from "./deps.ts";
const username = "kawarimidoll";
const token = env.require("GH_READ_USER_TOKEN");
const contributions = await getContributions(username, token);
// console.log(contributions.toJson());
console.log(contributions.toTerm({ scheme: "random" }));
// console.log(contributions.toText());
// console.log(contributions.toTerm({ invert: true, pixel: " " }));
<file_sep>/contributions_test.ts
import {
assert,
assertEquals,
assertRejects,
assertThrows,
ky,
testdouble,
} from "./deps.ts";
import {
ContributionDay,
contributionsToJson,
contributionsToSvg,
contributionsToTerm,
contributionsToText,
getContributionCalendar,
getContributions,
getMaxContributionDay,
isValidContributionLevelName,
moreContributionDay,
totalMsg,
} from "./contributions.ts";
const {
contributions,
totalContributions,
}: {
contributions: ContributionDay[][];
totalContributions: number;
} = JSON.parse(
await Deno.readTextFile("./resources/tests/example_contributions.json"),
);
const weeks = contributions.map((week) => ({ contributionDays: week }));
const max: ContributionDay = {
contributionCount: 32,
contributionLevel: "FOURTH_QUARTILE",
date: "2021-03-22",
color: "#216e39",
};
Deno.test("contributionsToJson", () => {
assertEquals(
contributionsToJson(contributions, totalContributions),
JSON.stringify({ contributions, totalContributions }),
);
assertEquals(
contributionsToJson(contributions, totalContributions, { flat: true }),
JSON.stringify({ contributions: contributions.flat(), totalContributions }),
);
});
Deno.test("contributionsToSvg", async () => {
const resultToSvg = await Deno.readTextFile(
"./resources/tests/to_svg.svg",
);
const resultToSvgWithParams = await Deno.readTextFile(
"./resources/tests/to_svg_bg_font_frame_scheme.svg",
);
assertEquals(
contributionsToSvg(contributions, totalContributions),
resultToSvg,
);
assertEquals(
contributionsToSvg(contributions, totalContributions, {
bg: "786688",
fontColor: "#d7f07b",
frame: "#f03153",
scheme: "amber",
}),
resultToSvgWithParams,
);
});
Deno.test("contributionsToTerm", async () => {
const resultToTerm = await Deno.readTextFile(
"./resources/tests/to_term_github.text",
);
const resultToTermUnicorn = await Deno.readTextFile(
"./resources/tests/to_term_unicorn.text",
);
const resultToTermNoTotal = await Deno.readTextFile(
"./resources/tests/to_term_no_total.text",
);
const resultToTermNoLegend = await Deno.readTextFile(
"./resources/tests/to_term_no_legend.text",
);
const resultToTermPixelX = await Deno.readTextFile(
"./resources/tests/to_term_pixel_x.text",
);
const resultToTermInvert = await Deno.readTextFile(
"./resources/tests/to_term_invert.text",
);
assertEquals(
contributionsToTerm(contributions, totalContributions),
resultToTerm,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, {
noTotal: false,
noLegend: false,
scheme: "github",
pixel: "■",
invert: false,
}),
resultToTerm,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, {
scheme: "unicorn",
}),
resultToTermUnicorn,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, { noTotal: true }),
resultToTermNoTotal,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, { noLegend: true }),
resultToTermNoLegend,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, { pixel: "x" }),
resultToTermPixelX,
);
assertThrows(
() => {
contributionsToTerm(contributions, totalContributions, { pixel: "xxx" });
},
Error,
);
assertEquals(
contributionsToTerm(contributions, totalContributions, { invert: true }),
resultToTermInvert,
);
});
Deno.test("contributionsToText", async () => {
const resultToText = await Deno.readTextFile(
"./resources/tests/to_text.text",
);
const resultToTextNoTotal = await Deno.readTextFile(
"./resources/tests/to_text_no_total.text",
);
assertEquals(
contributionsToText(contributions, totalContributions, max),
resultToText,
);
assertEquals(
contributionsToText(contributions, totalContributions, max, {
noTotal: true,
}),
resultToTextNoTotal,
);
});
Deno.test("getContributions", async () => {
testdouble.replace(
ky,
"post",
(_: string) => ({
json: () => ({ data: null }),
}),
);
assertRejects(
() => {
return getContributions("a", "a");
},
Error,
"Could not get contributions data",
);
testdouble.replace(
ky,
"post",
() => ({
json: () => ({
data: {
user: {
contributionsCollection: {
contributionCalendar: {
weeks,
totalContributions,
},
},
},
},
}),
}),
);
const obj = await getContributions("a", "a");
assert(obj);
assertEquals(obj.contributions, contributions);
assertEquals(obj.totalContributions, totalContributions);
assertEquals(obj.maxContributionDay, max);
assert(obj.toJson());
assert(obj.toTerm());
assert(obj.toText());
});
Deno.test("getContributionCalendar", async () => {
assertRejects(
() => {
return getContributionCalendar("userName", "");
},
Error,
"Missing required arguments",
);
assertRejects(
() => {
return getContributionCalendar("", "token");
},
Error,
"Missing required arguments",
);
testdouble.replace(
ky,
"post",
(_: string) => ({
json: () => ({ data: null }),
}),
);
assertRejects(
() => {
return getContributionCalendar("a", "a");
},
Error,
"Could not get contributions data",
);
testdouble.replace(
ky,
"post",
() => ({
json: () => ({
data: {
user: {
contributionsCollection: {
contributionCalendar: {
// weeks: [{ contributionDays: [max] }],
weeks,
totalContributions,
},
},
},
},
}),
}),
);
assertEquals(
await getContributionCalendar("a", "a"),
{ contributions, totalContributions },
);
});
Deno.test("getMaxContributionDay", () => {
assertEquals(getMaxContributionDay(contributions), max);
});
Deno.test("isValidContributionLevelName", () => {
assert(isValidContributionLevelName("NONE"));
assert(isValidContributionLevelName("FIRST_QUARTILE"));
assert(isValidContributionLevelName("SECOND_QUARTILE"));
assert(isValidContributionLevelName("THIRD_QUARTILE"));
assert(isValidContributionLevelName("FOURTH_QUARTILE"));
assert(!isValidContributionLevelName(""));
assert(!isValidContributionLevelName("none"));
});
Deno.test("moreContributionDay", () => {
const a: ContributionDay = {
contributionCount: 10,
contributionLevel: "FIRST_QUARTILE",
date: "2000-01-01",
color: "#eeeeee",
};
const b: ContributionDay = {
contributionCount: 3,
contributionLevel: "FIRST_QUARTILE",
date: "2000-01-01",
color: "#eeeeee",
};
assertEquals(moreContributionDay(a, b), a);
});
Deno.test("totalMsg", () => {
assertEquals(totalMsg(10), "10 contributions in the last year\n");
});
<file_sep>/server.ts
/// <reference path="./deploy.d.ts" />
import { getContributions } from "./contributions.ts";
import { env } from "./deps.ts";
// cache one hour
const CACHE_MAX_AGE = 3600;
function getPathExtension(request: Request): string {
const { pathname } = new URL(request.url);
const split = pathname.split(".");
return split.length > 1 ? split[split.length - 1] : "";
}
async function handleRequest(request: Request) {
const { pathname, searchParams, host } = new URL(request.url);
if (pathname === "/") {
return [
"Welcome to deno-github-contributions-api!",
`Access to ${host}/[username] to get your contributions data.`,
].join("\n");
}
const paths = pathname.split("/");
if (paths.length > 2) {
throw new Error(
`'${request.url}' is invalid path. Access to ${host}/[username].`,
);
}
const username = paths[1].replace(/\..*$/, "");
const ext = getPathExtension(request);
const contributions = await getContributions(
username,
env.require("GH_READ_USER_TOKEN"),
);
const scheme = searchParams.get("scheme") ?? "github";
const pixel = searchParams.get("pixel") ?? undefined;
const noTotal = searchParams.get("no-total") == "true";
const noLegend = searchParams.get("no-legend") == "true";
const flat = searchParams.get("flat") == "true";
const invert = searchParams.get("invert") == "true";
const fontColor = searchParams.get("font-color") ?? "#000";
const frame = searchParams.get("frame") ?? "none";
const bg = searchParams.get("bg") ?? "none";
if (ext === "json") {
return contributions.toJson({ flat });
}
if (ext === "term") {
return contributions.toTerm({ scheme, pixel, noTotal, noLegend, invert });
}
if (ext === "text") {
return contributions.toText({ noTotal });
}
if (ext === "svg") {
return contributions.toSvg({
scheme,
noTotal,
noLegend,
frame,
bg,
fontColor,
});
}
return [
`${contributions.totalContributions} contributions in the last year.`,
"",
`Use extensions like as '${host}/${username}.text'.`,
" - .json : return data as a json",
" - .term : return data as a colored pixels graph (works in the terminal with true color)",
" - .text : return data as a table-styled text",
" - .svg : return data as a svg image",
"",
"You can use other parameters. Each of them works on specific extensions.",
" - no-total=true : remove total contributions count (term/text/svg)",
" - no-legend=true : remove legend (term/svg)",
" - invert=true : change the background colors instead of the foreground colors (term)",
" - flat=true : return contributions as one-dimensional array (json)",
" - scheme=[name] : use specific color scheme (term/svg)",
" - pixel=[char] : use the character as pixels, URL encoding is required (term)",
" - frame=[color] : use the color as a frame of image (svg)",
" - bg=[color] : use the color as a background of image (svg)",
" - font-color=[color] : use the color as a font color (svg)",
"",
"Color parameters allows hex color string without # like '123abc'.",
].join("\n");
}
addEventListener("fetch", async (event) => {
const ext = getPathExtension(event.request);
const type = {
json: "application/json",
svg: "image/svg+xml",
}[ext] || "text/plain";
const headers = {
"Content-Type": `${type}; charset=utf-8`,
"Cache-Control": `public, max-age=${CACHE_MAX_AGE}`,
};
try {
const body = await handleRequest(event.request);
event.respondWith(new Response(body, { headers }));
} catch (error) {
console.error(error);
const body = ext == "json"
? JSON.stringify({ error: `${error}` })
: `${error}`;
event.respondWith(
new Response(body, {
status: 400,
headers,
}),
);
}
});
| 87bb13c0863fe2af3cd85a056c121449a3e73fb8 | [
"Markdown",
"TypeScript"
] | 12 | TypeScript | 75asa/deno-github-contributions-api | 6e7521638bccb40fd76f72636f20416d1728d8c9 | 6c240125099e7127e3303fe1424bfeaac2a24dc2 | |
refs/heads/master | <file_sep># learn-to-calculate-in-mind-for-children
The training for children to train calculations (+ or -)
It is a small task to train children to calculate in mind (add and substract) numbers from 1 to 1000
To start you need just to download and launch script.
<file_sep># question function
def function (x1, x2, action, actions):
import random
import operator
answer = actions.get(action)(x1,x2)
print('\nWhat is the result \n\n {} \n{} \n {}\n'.format(x1, action, x2))
return answer
# guess function
def askQuestion(x1, x2, action, actions):
answer = function(x1, x2, action, actions)
try:
guess = float(input(" "))
except ValueError:
print("\nOnly numbers to be entered!\n")
else:
return guess == answer
# Main function
def quiz():
print('Hello. Please answer 10 questions\n')
import random
import operator
actions = {'+':operator.add,'-':operator.sub}
score = 0
for i in range(10):
x2 = random.randint(0,1000)
x1 = random.randint(x2,1000)
action = random.choice(list(actions.keys()))
repeat = True
while repeat:
correct = askQuestion(x1, x2, action, actions)
if correct:
score += 1
print('\nWell done!\nCorrect answer!\n')
repeat = False
else:
print('\nWrong answer, Try once more!\n')
return 'You have answered correctly {}/10'.format(score)
# Start
quiz() | 76e4662492dd7cc6f62a3b48d1fb1d0472baa88d | [
"Markdown",
"Python"
] | 2 | Markdown | Ebanchous/learn-to-calculate-in-mind-for-children | 6d95df59b54173807e06d37a095afeeae76cc1af | 3d6bb12799c9a3b5225797e22baabbf03fce8496 | |
refs/heads/master | <file_sep># First App
## Release notes
### 0.0.1-1
- Первая версия приложения
### 0.1.0-2
- Добавлена кнопка для перехода в активити, предоставляющую информацию о типах пива
### 0.2.1-1
- Добавлена кнопка для перехода в активити с секундомером
- Поправлено отображение Рикардо с текстом
### 0.3.0-1
- Добавлена кнопка для перехода в активити с картами из игры "Берсерк"
- Мелкие правки
### x.x.x-x
- Coming soon...
## Downloads
- Версия 0.0.1-1 - <a href="https://github.com/sk1ly/first_app/raw/master/apks/first_app_debug_0.0.1-1.apk" download>скачать</a>
- Версия 0.1.0-2 - <a href="https://github.com/sk1ly/first_app/raw/master/apks/first_app_debug_0.1.0-2.apk" download>скачать</a>
- Версия 0.2.1-1 - <a href="https://github.com/sk1ly/first_app/raw/master/apks/first_app_debug_0.2.1-1.apk" download>скачать</a>
- Версия 0.3.0-1 - <a href="https://github.com/sk1ly/first_app/raw/master/apks/first_app_debug_0.3.0-1.apk" download>скачать</a>
- Coming soon...
<file_sep>package ru.sk1ly.firstapp;
/**
* Инкапсулирует сущность карты
*/
public class BerserkCard {
/**
* Тестовый набор карт
*/
public final static BerserkCard[] TEST_BERSERK_CARDS = {
new BerserkCard("Анкаб", 3, R.drawable.berserk_card_1),
new BerserkCard("Агатервол", 5, R.drawable.berserk_card_2),
new BerserkCard("Акванит", 3, R.drawable.berserk_card_3)
};
private final String name;
private final int cost;
private final int imageResourceId;
public BerserkCard(String name, int cost, int imageResourceId) {
this.name = name;
this.cost = cost;
this.imageResourceId = imageResourceId;
}
public String getName() {
return name;
}
public int getCost() {
return cost;
}
public int getImageResourceId() {
return imageResourceId;
}
}
<file_sep>package ru.sk1ly.firstapp;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import android.widget.TextView;
public class BeerInfoActivity extends Activity {
private Spinner mBeerGlobalTypeSpinner;
private Spinner mBeerSubtypeSpinner;
private TextView mBeerInfoTextOnPaper;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_beer_info);
mBeerGlobalTypeSpinner = findViewById(R.id.beer_global_type_spinner);
mBeerSubtypeSpinner = findViewById(R.id.beer_subtype_spinner);
mBeerInfoTextOnPaper = findViewById(R.id.beer_info_text_on_paper);
mBeerGlobalTypeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
String itemText = (String) ((TextView) selectedItemView).getText();
ArrayAdapter<CharSequence> adapter;
if (itemText.equals("Эль")) {
adapter = ArrayAdapter.createFromResource(getApplicationContext(), R.array.ale_beer_types, android.R.layout.simple_spinner_item);
} else if (itemText.equals("Лагер")) {
adapter = ArrayAdapter.createFromResource(getApplicationContext(), R.array.lager_beer_types, android.R.layout.simple_spinner_item);
} else {
adapter = ArrayAdapter.createFromResource(getApplicationContext(), R.array.mixed_beer_types, android.R.layout.simple_spinner_item);
}
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mBeerSubtypeSpinner.setAdapter(adapter);
}
@Override
public void onNothingSelected(AdapterView<?> parentView) {
// nothing to do
}
});
}
public void onClickShowBeerInfoButton(View view) {
String selectedBeerGlobalType = String.valueOf(mBeerGlobalTypeSpinner.getSelectedItem());
String selectedBeerSubtype = String.valueOf(mBeerSubtypeSpinner.getSelectedItem());
if (selectedBeerGlobalType.equals("Эль")) {
setAleBeerInfo(selectedBeerSubtype);
} else if (selectedBeerGlobalType.equals("Лагер")) {
setLagerBeerInfo(selectedBeerSubtype);
} else {
setMixedBeerInfo(selectedBeerSubtype);
}
}
private void setAleBeerInfo(String selectedBeerSubtype) {
switch (selectedBeerSubtype) {
case "Пшеничное пиво":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_witbier);
break;
case "Берлинское белое":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_berliner_weisse);
break;
case "Блонд эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_belgian_blond_ale);
break;
case "Светлый эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_pale_ale);
break;
case "Кёльш":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_kolsch);
break;
case "Золотой эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_golden_ale);
break;
case "Трипель":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_tripel_ale);
break;
case "Индийский светлый эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_india_pale_ale);
break;
case "Старый эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_old_ale);
break;
case "Янтарный эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_amber_ale);
break;
case "Квадрупель":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_quadrupel);
break;
case "Мягкий эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_mild_ale);
break;
case "Старое коричневое":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_flanders_brown_ale);
break;
case "Коричневый эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_brown_ale);
break;
case "Портер":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_porter);
break;
case "Стаут":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_ale_stout);
break;
}
}
private void setLagerBeerInfo(String selectedBeerSubtype) {
switch (selectedBeerSubtype) {
case "Мюнхенское светлое":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_munchner_helles_lager);
break;
case "Пильзнер":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_pilsner);
break;
case "Экспорт (дортмундер)":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_export);
break;
case "Венский лагер":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_vienna_lager);
break;
case "Келлербир":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_kellerbier);
break;
case "Бок":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_bockbier);
break;
case "Темный лагер":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_dark_lager);
break;
case "Черное пиво":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_lager_schwarzbier);
break;
}
}
private void setMixedBeerInfo(String selectedBeerSubtype) {
switch (selectedBeerSubtype) {
case "Сливочный эль":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_mixed_cream_ale);
break;
case "Ламбик":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_mixed_lambic);
break;
case "Мартовское пиво":
getStringResourceAndSetHisToBeerInfoTextView(R.string.beer_info_mixed_marzen);
break;
}
}
private void getStringResourceAndSetHisToBeerInfoTextView(int stringResource) {
mBeerInfoTextOnPaper.setText(getString(stringResource));
}
}
<file_sep>package ru.sk1ly.firstapp;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.EditText;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import static ru.sk1ly.firstapp.Constants.*;
public class AuthorizeActivity extends AppCompatActivity {
public EditText mUserFirstName;
public EditText mUserSecondName;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_authorize);
mUserFirstName = findViewById(R.id.input_first_user_name);
mUserSecondName = findViewById(R.id.input_second_user_name);
}
public void onClickAuthorizeCompleteButton(View view) {
Intent answerIntent = new Intent();
if (mUserFirstName.getText().toString().isEmpty() ||
mUserSecondName.getText().toString().isEmpty()) {
Toast.makeText(getApplicationContext(),
"Необходимо заполнить два поля! Вы не были авторизованы", Toast.LENGTH_LONG).show();
setResult(RESULT_CANCELED, answerIntent);
} else {
answerIntent.putExtra(Keys.USER_FIRST_NAME_ANSWER, mUserFirstName.getText().toString());
answerIntent.putExtra(Keys.USER_SECOND_NAME_ANSWER, mUserSecondName.getText().toString());
setResult(RESULT_OK, answerIntent);
}
finish();
}
}
<file_sep>package ru.sk1ly.firstapp;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import static ru.sk1ly.firstapp.Constants.*;
public class MainActivity extends AppCompatActivity {
private static final int AUTHORIZE_USER = 0;
private static final String KEK_WORD = "Kek";
private static final String PORTRAIT_ORIENTATION_WORD_RUS = "портретная";
private static final String LANDSCAPE_ORIENTATION_WORD_RUS = "альбомная";
private static final String UNKNOWN_ORIENTATION_WORD_RUS = "неизвестная";
private TextView mUserFirstName;
private TextView mUserSecondName;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setTitle(R.string.activity_main_label);
mUserFirstName = findViewById(R.id.user_first_name);
mUserSecondName = findViewById(R.id.user_second_name);
if (savedInstanceState != null) {
mUserFirstName.setText(savedInstanceState.getCharSequence(Keys.USER_FIRST_NAME));
mUserSecondName.setText(savedInstanceState.getCharSequence(Keys.USER_SECOND_NAME));
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
public void onAuthorizeMenuClick(MenuItem item) {
Toast.makeText(getApplicationContext(), "Скоро тут будет авторизация!", Toast.LENGTH_SHORT).show();
}
public void onSettingsMenuClick(MenuItem item) {
Toast.makeText(getApplicationContext(), "Скоро тут будут настройки!", Toast.LENGTH_SHORT).show();
}
public void onAboutAppMenuClick(MenuItem item) {
Intent aboutAppIntent = new Intent(MainActivity.this, AboutActivity.class);
startActivity(aboutAppIntent);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == AUTHORIZE_USER && resultCode == RESULT_OK) {
mUserFirstName.setText(data.getStringExtra(Keys.USER_FIRST_NAME_ANSWER));
mUserSecondName.setText(data.getStringExtra(Keys.USER_SECOND_NAME_ANSWER));
} // TODO AUTHORIZE CODE? И правильно ли использовать RESULT_CODE CANCELED?
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putCharSequence(Keys.USER_FIRST_NAME, mUserFirstName.getText());
outState.putCharSequence(Keys.USER_SECOND_NAME, mUserSecondName.getText());
}
public void onClickAuthorizeButton(View view) {
Intent authorizeIntent = new Intent(MainActivity.this, AuthorizeActivity.class);
startActivityForResult(authorizeIntent, AUTHORIZE_USER);
}
public void onClickSayKekButton(View view) {
Toast.makeText(getApplicationContext(), KEK_WORD, Toast.LENGTH_LONG).show();
}
public void onClickSayKekWithRicardoButton(View view) {
LayoutInflater inflater = getLayoutInflater();
View layout = inflater.inflate(R.layout.ricardo_with_kek_toast,
(ViewGroup) findViewById(R.id.ricardo_with_kek_toast_container));
TextView text = layout.findViewById(R.id.kek_word);
text.setText(KEK_WORD);
Toast toast = new Toast(getApplicationContext());
toast.setGravity(Gravity.CENTER_VERTICAL, 0, 0);
toast.setDuration(Toast.LENGTH_SHORT);
toast.setView(layout);
toast.show();
}
public void onClickGetOrientationButton(View view) {
int currentOrientation = getResources().getConfiguration().orientation;
if (currentOrientation == Configuration.ORIENTATION_PORTRAIT) {
makeOrientationToastText(PORTRAIT_ORIENTATION_WORD_RUS);
} else if (currentOrientation == Configuration.ORIENTATION_LANDSCAPE) {
makeOrientationToastText(LANDSCAPE_ORIENTATION_WORD_RUS);
} else {
makeOrientationToastText(UNKNOWN_ORIENTATION_WORD_RUS);
}
}
public void onClickWorkWithDisplayButton(View view) {
Intent workWithDisplayIntent = new Intent(MainActivity.this, WorkWithDisplayActivity.class);
startActivity(workWithDisplayIntent);
}
public void onClickBeerInfoButton(View view) {
Intent beerInfoIntent = new Intent(MainActivity.this, BeerInfoActivity.class);
startActivity(beerInfoIntent);
}
public void onClickStopwatchButton(View view) {
Intent stopwatchIntent = new Intent(MainActivity.this, StopwatchActivity.class);
startActivity(stopwatchIntent);
}
public void onClickBerserkCardsInfoButton(View view) {
Intent berserkCardsMainIntent = new Intent(MainActivity.this, BerserkCardsInfoMainActivity.class);
startActivity(berserkCardsMainIntent);
}
private void makeOrientationToastText(String orientationType) {
Toast.makeText(getApplicationContext(), "В данный момент " + orientationType + " ориентация", Toast.LENGTH_LONG).show();
}
}
<file_sep>package ru.sk1ly.firstapp;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
/**
* Активность с подробным описанием карты.
* Если приложение запущено на планшете, то активность не используется.
*/
public class BerserkCardsInfoDetailActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_berserk_cards_info_detail);
BerserkCardsInfoDetailFragment frag = (BerserkCardsInfoDetailFragment)
getSupportFragmentManager().findFragmentById(R.id.berserk_cards_info_detail_fragment);
int cardId = (int) getIntent().getExtras().get(Constants.Keys.BERSERK_CARD_ID);
frag.setCardId(cardId);
}
}<file_sep>package ru.sk1ly.firstapp;
public class Constants {
public static class Keys {
public static final String USER_FIRST_NAME = "ru.sk1ly.firstapp.USER_FIRST_NAME";
public static final String USER_SECOND_NAME = "ru.sk1ly.firstapp.USER_SECOND_NAME";
public static final String USER_FIRST_NAME_ANSWER = "ru.sk1ly.firstapp.USER_FIRST_NAME_ANSWER";
public static final String USER_SECOND_NAME_ANSWER = "ru.sk1ly.firstapp.USER_SECOND_NAME_ANSWER";
public static final String STOPWATCH_STATUS = "ru.sk1ly.firstapp.STOPWATCH_STATUS";
public static final String STOPWATCH_SECONDS = "ru.sk1ly.firstapp.SECONDS";
public static final String BERSERK_CARD_ID = "ru.sk1ly.firstapp.BERSERK_CARD_ID";
}
}
| ea534f5cf806948a496b251b0ee3c8cc267592d3 | [
"Markdown",
"Java"
] | 7 | Markdown | sk1ly/first_app | 9595d716738fb71cf7b3ee6cc2e8718c88c24726 | d20b1eb36c8806e174e7b2694ae0eafc990e6dfa | |
refs/heads/master | <repo_name>deboshas/BlockChainUsingJavaScript<file_sep>/dev/transcation.ts
export class Transcation {
Amount: number;
sender: string;
recepient: string;
}<file_sep>/dev/Block.ts
import { Transcation } from "./transcation";
export class Block {
index: number;
Hash: string;
previousBlockHash: string;
Transcations: Transcation[];
TimeStamp: string;
nance: number;
constructor(hash, prevHash, Trasncationdata, nance) {
this.Hash = hash;
this.previousBlockHash = prevHash;
this.nance = nance;
this.TimeStamp = "9-9-";
this.Transcations.push(Trasncationdata);
}
}<file_sep>/dev/blockChain.js
const sha256 = require('sha256');
const currentNodeurl = process.argv[3];
const uuid = require('uuid/v1');//unique identifer for ths current node
class BlockChain {
constructor() {
this.chain = [];
this.pendingTransactions = [];
this.addBlocktoChain(this.createNewBlock(100, '0', '0'));//genesys block,first block
this.currentNodeUrl = currentNodeurl;
this.networkNodes = [];
this.node_address = uuid().split('-').join('');
}
createNewBlock(nance, previousBlockHash, hash) {
const newBlock = {
index: this.chain.length + 1,
timestamp: Date.now(),
transactions: this.pendingTransactions,
nance: nance,
hash: hash,
previousBlockHash: previousBlockHash
};
this.pendingTransactions = [];
return newBlock;
}
addBlocktoChain(block) {
this.chain.push(block);
}
getLastBlock() {
return this.chain[this.chain.length - 1];
}
createNewTransaction(amount, sender, receiver) {
//add a logic to validate the transcation before adding it to pending transcation works
const newTransaction = {
amount: amount,
sender: sender,
receiver: receiver,
transcationId: uuid().split('-').join('')
};
return newTransaction;
}
addTransactionToPendingTransactions(transactionObj) {
this.pendingTransactions.push(transactionObj);
}
hashBlock(previousBlockHash, currentBlockData, nance) {
const dataAsString = previousBlockHash + nance.toString() + JSON.stringify(currentBlockData);
const hash = sha256(dataAsString);
return hash;
}
proofOfWork(previousBlockHash, currentBlockData) {
let nance = 0;
let hash = this.hashBlock(previousBlockHash, currentBlockData, nance);
while (hash.substring(0, 4) !== '0000') {
nance++;
hash = this.hashBlock(previousBlockHash, currentBlockData, nance);
}
return nance;
}
//need to fix it,return true in case of a block been compromised
ChainIsValid(blockChain) {
let isValidChain = true;
let isvalidGenesysBlock = true;
let genesysBlock = blockChain.chain[0];
//longest chain rule
//validate each an every block in current node by comparing prev block hash with the current block and also validating each and every data in the block by rehashing it
for (var i = 1; i < blockChain.chain.length; i++) {
let prevBlock = blockChain.chain[i - 1];
let currentBlock = blockChain.chain[i];
if (this.hashBlock(prevBlock.hash, { transcations: currentBlock.transactions, index: currentBlock.index }, currentBlock.nance).substring(0, 4) != '0000') {
isValidChain = false;
break;
}
if (prevBlock.hash != currentBlock.previousBlockHash) {
isValidChain = false;
break;
}
}
//check for genesys block
if ((genesysBlock.nance != "100") || (genesysBlock.hash != "0") || (genesysBlock.previousBlockHash
!= "0") && (genesysBlock.transactions.length != 0)) {
isvalidGenesysBlock = false
}
return isValidChain && isvalidGenesysBlock;
//validate the data for evry block by re hashing
}
}
module.exports = BlockChain<file_sep>/dev/networknode.js
const express = require('express');
const BlockChain = require('./blockChain');
const bodyParser = require('body-parser');
const port = process.argv[2];//port for different nodes
const url = process.argv[3];
const rp = require('request-promise');
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
const bitcoin = new BlockChain();
app.get('/blockchain', function (req, res) {
res.send(bitcoin);
})
app.post('/transcation', function (req, res) {
bitcoin.addTransactionToPendingTransactions(req.body.transcationData);
res.json({ note: `Transcation will be added to block at ` });
})
//create a new block for us by mining
app.post('/mine', function (req, res) {
registerNodePromises = [];
const lastBlock = bitcoin.getLastBlock();
const prevblockHash = lastBlock.hash;
const currentBlockdata = {
transcations: bitcoin.pendingTransactions,
index: lastBlock.index + 1
}
let nance = bitcoin.proofOfWork(prevblockHash, currentBlockdata);
let hash = bitcoin.hashBlock(prevblockHash, currentBlockdata, nance);
let newBlock = bitcoin.createNewBlock(nance, prevblockHash, hash);
//broad cast the mined block to teh enire network
bitcoin.networkNodes.forEach(networkNodeUrl => {
let requestOptions = {
url: networkNodeUrl + "/receive/new/block",
method: 'Post',
body: { block: newBlock },
json: true
}
registerNodePromises.push(rp(requestOptions));
});
Promise.all(registerNodePromises)
.then(data => {
//add the block to chain if teh block is accepted
bitcoin.addBlocktoChain(newBlock);
//reward teh miner with a bitcoin
let newTranscation = bitcoin.createNewTransaction(12.5, "mine-reward-programme", bitcoin.node_address);
//broadcast the new reward Tranascation to the entire network
let requestOptions = {
url: bitcoin.currentNodeUrl + "/transcation/broadcast",
method: 'Post',
body: {
transcationData: {
"amount": "12.5",
"sender": "mine reward programme",
"receiver": bitcoin.currentNodeUrl
}
},
json: true
}
return rp(requestOptions);
})
.then(data => {
res.json({
note: `New block mined successfully at ${newBlock.index} and broadcasted successfully`,
block: newBlock
})
})
//reward this current node address with uuid unique id
})
//create a decentrilized network--start
//register a node and broadcast it in the block chain network
app.post('/register-and-broadcast', function (req, res) {
//new node wants to join our network
const newNodeUrl = req.body.newNodeUrl;
let registerNodePromises = [];
//regsiter the new node with self
if (bitcoin.networkNodes.indexOf(newNodeUrl) === -1) {
bitcoin.networkNodes.push(newNodeUrl);
}
//broadcast the new node to the entire network
bitcoin.networkNodes.forEach(networkNodeUrl => {
//hit register node endpoint for all nodes
let requestOptions = {
url: networkNodeUrl + "/register-node",
method: 'Post',
body: { newNodeUrl: newNodeUrl },
json: true
}
registerNodePromises.push(rp(requestOptions));
});
Promise.all(registerNodePromises)
.then(data => {
//use the data
let bulkrequestOptions = {
url: newNodeUrl + "/register-node-bulk",
method: 'Post',
body: { allnetworkNodes: [...bitcoin.networkNodes, bitcoin.currentNodeUrl] },
json: true
}
return rp(bulkrequestOptions)
}).then(data => {
res.json({ note: "New Node registered successfully" });
})
})
//register a node to the block chain network
app.post('/register-node', function (req, res) {
if (bitcoin.networkNodes.indexOf(req.body.newNodeUrl) === -1 && req.body.newNodeUrl != bitcoin.currentNodeUrl) {
bitcoin.networkNodes.push(req.body.newNodeUrl);
res.json({ note: `New node is registerd successfully at ${bitcoin.currentNodeUrl} ` })
}
else {
res.json({ note: `New node is already registred with ${bitcoin.currentNodeUrl} ` })
}
})
//regsiter multiple nodes at once
app.post('/register-node-bulk', function (req, res) {
let allnetworkNodes = req.body.allnetworkNodes;
allnetworkNodes.forEach(url => {
if (url != bitcoin.currentNodeUrl && bitcoin.networkNodes.indexOf(url) === -1) {
bitcoin.networkNodes.push(url);
}
})
res.json({ note: `New node is registerd with all the network nodes available` })
})
//create a decentrilized network--END
//sync the nodes for transcation and mining --start
//this endpoint will be hit whenever we try to create a new transcsation
app.post('/transcation/broadcast', function (req, res) {
let network_nodes = bitcoin.network_nodes;
let transcationData = req.body.transcationData;
let registerNodePromises = [];
//create the transcation in the current node
let newTranscation = bitcoin.createNewTransaction(transcationData.amount, transcationData.sender, transcationData.receiver);
bitcoin.addTransactionToPendingTransactions(newTranscation);
//borad cast the transcation to the enitre netwoprk
bitcoin.networkNodes.forEach(networkNodeUrl => {
let requestOptions = {
url: networkNodeUrl + "/transcation",
method: 'Post',
body: { transcationData: newTranscation },
json: true
}
registerNodePromises.push(rp(requestOptions));
});
Promise.all(registerNodePromises)
.then(data => {
res.json({ note: 'Transcation has benn boradcasted to every other node ' });
});
});
app.post('/receive/new/block', function (req, res) {
let newBlock = req.body.block;
//validate teh new block upon receiving to check wheather teh block is legitimate or not
let lastBlock = bitcoin.getLastBlock();
let correctHash = lastBlock.hash === newBlock.previousBlockHash;
let correctindex = lastBlock.index === (newBlock.index - 1);
if (correctHash && correctindex) {
bitcoin.addBlocktoChain(newBlock);
bitcoin.pendingTransactions = [];
res.json({
note: `New block is received and accepted and added to teh chain ${newBlock}`
})
}
else {
res.json({
note: `New block is rejected by the network ${newBlock}`
})
}
})
app.get('/consensus', (req, res) => {
registerNodePromises = [];
bitcoin.networkNodes.forEach(networkNodeUrl => {
let requestOptions = {
url: networkNodeUrl + "/blockchain",
method: 'Get',
json: true
}
registerNodePromises.push(rp(requestOptions));
});
//longest chain rule implemenattion
Promise.all(registerNodePromises)
.then(blockChains => {
blockChains.forEach(chain => {
})
});
})
//sync the nodes for transcation and mining --End
app.listen(port, function () {
console.log(`Listing on port ${port}`);
});
<file_sep>/dev/blockChainData.ts
import { Block } from "./Block";
import { Transcation } from "./transcation";
const sha256 = require('sha256');
const uuid = require('uuid/v1');//unique identifer for ths current node
export class BlockChain {
Chain: Block[];
CurrentNodeUrl: string;
NetworkNodeUrls: string[]
PendingTranscation: Transcation[];
node_address: string;
constructor() {
this.node_address = uuid().spilt('-').join('');
}
createTranscation(amount, sender, receiver) {
this.PendingTranscation.push({
Amount: amount,
sender: sender,
recepient: receiver
});
}
getLastBlockIndex() {
return this.Chain.length - 1;
}
createBlock(hash, prevHash, nance) {
//validate wheather hash,previous and nance are empty before creating new block and pushing to chain
let block = new Block(hash, prevHash, this.PendingTranscation, nance);
block.index = this.Chain.length + 1;
//push the block into chain/ledger
this.Chain.push(block);
}
createHash(prevHash, currentData, nance) {
//validate wheather hash,previous and nance are empty before creating new block and pushing to chain
let payload = prevHash + JSON.stringify(currentData) + nance;
return sha256(payload);
}
//mine the block,after that only u can create the block and and it to the chain
proofOfWork(prevHash, currentData) {
let nance = 0;
let payload = prevHash + JSON.stringify(currentData) + nance;
let hash: string = this.createHash(prevHash, currentData, nance);
while (hash.substring(0, 4) != "0000") {
nance++;
payload = this.createHash(prevHash, currentData, nance);
hash = sha256(payload);
}
return nance;
}
}<file_sep>/dev/test.js
var BlockChain = require('./blockChain');
var bitcoin = new BlockChain();
let blockChaindata = {
"chain": [
{
"index": 1,
"timestamp": 1564544724877,
"transactions": [
],
"nance": 100,
"hash": "0",
"previousBlockHash": "0"
},
{
"index": 2,
"timestamp": 1564544819793,
"transactions": [
{
"amount": "10",
"sender": "tax",
"receiver": "public expenditure"
},
{
"amount": "10",
"sender": "1",
"receiver": "public expenditure"
}
],
"nance": 124840,
"hash": "0000ec477f70dc528acdfc6fdd91ba5be2ca812ff3ee64cf439ae1ee0e51c7fb",
"previousBlockHash": "0"
},
{
"index": 3,
"timestamp": 1564544841472,
"transactions": [
{
"amount": "12.5",
"sender": "mine reward programme",
"receiver": "http://localhost:3001",
"transcationId": "d9b3b7e0b34511e9b170897154018a7b"
},
{
"amount": "10",
"sender": "1",
"receiver": "public expenditure"
},
{
"amount": "10",
"sender": "1",
"receiver": "public expenditure"
},
{
"amount": "10",
"sender": "1",
"receiver": "public expenditure"
}
],
"nance": 86458,
"hash": "0000e66932943be9a3aaa3fd672a560b5f7093d6f916ce2682c1090803e0f2d3",
"previousBlockHash": "0000ec477f70dc528acdfc6fdd91ba5be2ca812ff3ee64cf439ae1ee0e51c7fb"
},
{
"index": 4,
"timestamp": 1564544891957,
"transactions": [
{
"amount": "12.5",
"sender": "mine reward programme",
"receiver": "http://localhost:3001",
"transcationId": "e69e9a60b34511e9b170897154018a7b"
},
{
"amount": "70",
"sender": "1",
"receiver": "public expenditure"
},
{
"amount": "90",
"sender": "1",
"receiver": "public expenditure"
},
{
"amount": "90",
"sender": "1",
"receiver": "public expenditure"
}
],
"nance": 473192,
"hash": "00004bb91732d1930b3e8c064f44a44889145a7b6f3c116e4dc8dba5fe4cb52d",
"previousBlockHash": "0000e66932943be9a3aaa3fd672a560b5f7093d6f916ce2682c1090803e0f2d3"
},
{
"index": 5,
"timestamp": 1564544903292,
"transactions": [
{
"amount": "12.5",
"sender": "mine reward programme",
"receiver": "http://localhost:3001",
"transcationId": "04b5b290b34611e9b170897154018a7b"
}
],
"nance": 22379,
"hash": "00009e3237f5db39c5787ce1167e4a90ec635c419d7b5ae5c8800e816755d103",
"previousBlockHash": "00004bb91732d1930b3e8c064f44a44889145a7b6f3c116e4dc8dba5fe4cb52d"
},
{
"index": 6,
"timestamp": 1564544909176,
"transactions": [
{
"amount": "12.5",
"sender": "mine reward programme",
"receiver": "http://localhost:3001",
"transcationId": "0b76d2d0b34611e9b170897154018a7b"
}
],
"nance": 2981,
"hash": "0000677ccff4a72bafd5a0ced7cc456299d202b2ddb48266146bab88c7f9ddc4",
"previousBlockHash": "00009e3237f5db39c5787ce1167e4a90ec635c419d7b5ae5c8800e816755d103"
}
],
"pendingTransactions": [
{
"amount": "12.5",
"sender": "mine reward programme",
"receiver": "http://localhost:3001",
"transcationId": "0ef8cda0b34611e9b170897154018a7b"
}
],
"currentNodeUrl": "http://localhost:3001",
"networkNodes": [
],
"node_address": "a11edae0b34511e9b170897154018a7b"
};
console.log('validChain', bitcoin.ChainIsValid(blockChaindata)); | 3b6db8faab5ed95fc4a996a999d84049b0f48dfa | [
"JavaScript",
"TypeScript"
] | 6 | TypeScript | deboshas/BlockChainUsingJavaScript | 46298cd619b9ce202fe3736921acf02e85693766 | 6d698cff81f22daaea2af7c4dfced5beb95baa60 | |
refs/heads/master | <repo_name>farukkhan14/ClassicalPHP<file_sep>/details.php
<?php
session_start();
mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db("example") or die(mysql_error());
$ID = $_GET['id'];
$query = "SELECT * FROM students WHERE id='$ID'";
$result = mysql_query($query);
$data = mysql_fetch_object( $result );
?>
<!DOCTYPE html>
<html>
<head>
<title>Update</title>
</head>
<body>
<label for="id">ID: </label><?php echo $data->id ?>
<br/>
<label for="email">Email: </label><?php echo $data->email ?>
<br/>
<a href="index.php">Back to List</a>
</body>
</html><file_sep>/store.php
<?php
session_start();
/*
var_dump($_POST);
echo "<pre>";
print_r($_REQUEST);
print_r($_FILES);
echo "</pre>";
die();
*/
//collect the data
$email = $_POST["email"];
//validate
if(empty($_POST['email']) ){
$message = "Email field cannot be empty";
$_SESSION['message'] = $message;
header('location:create.php');
exit();
}
//sanitize data
$email = mysqli_real_escape_string($conn, $email);
//coonect to database
$conn = mysqli_connect("localhost", "example", "example","example") or die(mysqli_connect_error());
//prepare query
$query = "INSERT into students (email) values ('$email')";
//insert data
if (mysqli_query($conn, $query)) //if successfull
{
$_SESSION['message'] = "Data is inserted successfully";
header('location:index.php');
exit();
}
else
{ //failed
$_SESSION['message'] = mysqli_error($conn);
header('location:create.php');
exit();
}
?>
<file_sep>/edit.php
<?php
mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db("example") or die(mysql_error());
$ID = $_GET['id'];
$query = "SELECT * FROM students WHERE id='$ID'";
$result = mysql_query($query);
$data = mysql_fetch_object( $result );
?>
<!DOCTYPE html>
<html>
<head>
<title>Update</title>
</head>
<body>
<form action="update.php" method="post">
<label for="email">Email:</label>
<input type="hidden" name="id" value="<?php echo $data->id?>">
<input type="text" name="email" value="<?php echo $data->email ?>"/>
<input type="submit" value="Update" name="update"/>
</form>
</body>
</html><file_sep>/delete.php
<?php
session_start();
mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db("example") or die(mysql_error());
$id= $_GET["id"];
if(mysql_query("DELETE from students where id = $id"))
{
$deleted = "Record deleted successfully";
$_SESSION['deleted'] = $deleted;
header('location:index.php');
}
<file_sep>/create.php
<?php session_start(); ?>
<!DOCTYPE html>
<html>
<head>
<title>create</title>
</head>
<body>
<div class="message">
<?php echo $_SESSION['message']; $_SESSION['message']=""; ?>
</div>
<form action="store.php" method="post" enctype="multipart/form-data">
<label for="email">Email:</label>
<input type="text" name="email" value="<EMAIL>" />
<!--
<input type="checkbox" name="hobby[]" value="Cycling" />Cycling
<input type="checkbox" name="hobby[]" value="Singing" />Singing
<input type="checkbox" name="hobby[]" value="Painting" />Painting
<input type="radio" name="hobby" value="Cycling" checked="checked" />Cycling
<input type="radio" name="hobby" value="Singing" />Singing
<input type="radio" name="hobby" value="Painting" />Painting
<select name="options">
<option value="option1">Option 1</option>
<option value="option2">Option 2</option>
<option value="option3">Option 3</option>
</select>
<select name="multioptions[]" multiple="multiple">
<option value="option1">Option 1</option>
<option value="option2" selected="selected">Option 2</option>
<option value="option3">Option 3</option>
<option value="option4">Option 4</option>
<option value="option5">Option 5</option>
</select>
<input type="file" name="aFile" />
-->
<input type="submit" value="Submit"/>
</form>
</body>
</html><file_sep>/index.php
<?php session_start();?>
<html>
<head></head>
<body>
<?php if(isset($_SESSION['added'])){
$message = $_SESSION['added'];
?>
<div style="color: green"><?php echo $message;?></div>
<?php
session_unset();
}?>
<?php if(isset($_SESSION['deleted'])){
$message = $_SESSION['deleted'];
?>
<div style="color: red"><?php echo $message;?></div>
<?php
session_unset();
}?>
<table>
<tr>
<th>Serial</th>
<th>Email</th>
<th colspan="2">Actions</th>
</tr>
<?php
mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db("example") or die(mysql_error());
$query= mysql_query("SELECT id,email from students");
$i = 0;
while($list=mysql_fetch_object($query)){
echo "<tr>";
echo"<td>".++$i."</td>";
echo"<td>".$list->email."</td>";
echo"<td>"."<a href='details.php?id=".$list->id."'><button>detail</button></a>"."</td>";
echo"<td>"."<a href='edit.php?id=".$list->id."'><button>Edit</button></a>"."</td>";
echo"<td>"."<a href='delete.php?id=".$list->id."'><button>Delete</button></a>"."</td>";
echo "</tr>";
}
?>
</table>
<a href="create.php"><button>Create New</button></a>
</body>
</html><file_sep>/update.php
<?php
session_start();
mysql_connect("localhost", "root", "") or die(mysql_error());
mysql_select_db("example") or die(mysql_error());
if( isset( $_POST['update'] ) ){
$id = $_POST['id'];
$email = $_POST['email'];
mysql_query("UPDATE students SET email = '$email' WHERE id = '$id'")
or die(mysql_error());
$message = "Record updated successfully";
$_SESSION['added'] = $message;
header('location:index.php');
exit();
}
?> | 9e6ab94ffcd9b8351b1c14561612ced50b9343d5 | [
"PHP"
] | 7 | PHP | farukkhan14/ClassicalPHP | 6558a665231750e466454dacd233b22f8c31eeec | 1cedabac81a8e356e2958446a5e682462355a847 | |
refs/heads/master | <file_sep>
//create an object for the default data
const defaultState = { posts };
const store = createStore(rootReducer, defaultState);
export const history = syncHistoryWithStore(browserHistory, store);
export default store;
| 9b5c0e12868099292293b6268a2c24f1049b97ab | [
"JavaScript"
] | 1 | JavaScript | olavea/Redux-for-dads-og-Ola-6 | 882e7f39a25312d5eae98ddef93b896b5ff39230 | 0981d21eb184ad2cf4817c4204ac3c57492fb3dc | |
refs/heads/master | <repo_name>mihai-varga/fileChangeNotifier<file_sep>/main.py
import webapp2, os, jinja2, sys, cgi, urllib, urllib2, hashlib
from google.appengine.api import users, mail, urlfetch
sys.path.append('./models')
import models
from webapp2_extras import sessions
JINJA_ENVIRONMENT = jinja2.Environment(
loader = jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions = ['jinja2.ext.autoescape'])
config = {}
config['webapp2_extras.sessions'] = {
'secret_key' : 'super-banan-girl-unguessable',
}
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
self.session_store = sessions.get_store(request=self.request)
try:
webapp2.RequestHandler.dispatch(self)
finally:
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
return self.session_store.get_session()
class LoginPage(BaseHandler):
def get(self):
template_values = {
'g_sign_in' : users.create_login_url(self.request.uri)}
template = JINJA_ENVIRONMENT.get_template('templates/login_page.html')
self.response.write(template.render(template_values))
def post(self):
# Facebook does a POST request when loading the page in the iFrame
template_values = {
'g_sign_in' : users.create_login_url(self.request.uri)}
template = JINJA_ENVIRONMENT.get_template('templates/login_page.html')
self.response.write(template.render(template_values))
class SessionData(BaseHandler):
def get(self):
# noting to do here
self.redirect('/')
def post(self):
# save the userID for this session
self.session['user_id'] = self.request.get('userID')
self.session['email'] = self.request.get('email')
if self.session['email'] == '':
# we're on facebook
self.session['on_facebook'] = True
else:
self.session['user_id'] = self.session['user_id'] + '@google'
# encode it, in case a user_id exists both on facebook and google
class MainPage(BaseHandler):
def get(self):
user_id = self.session.get('user_id')
if user_id == None:
# session has expired
self.redirect('/')
file_list = []
file_query = models.Files.query( ancestor = models.FilesKey(
user_id)).fetch()
for entry in file_query:
dict = {}
dict['url'] = entry.file_url
dict['name'] = entry.file_name
file_list.append(dict)
template_values = { 'file_list' : file_list}
template = JINJA_ENVIRONMENT.get_template('templates/index.html')
self.response.write(template.render(template_values))
def post(self):
user_id = self.session.get('user_id')
if user_id == None:
# session has expired
self.redirect('/')
new_file = models.Files(parent = models.FilesKey(user_id))
if self.session.get('on_facebook'):
new_file.fb_user = user_id
else:
new_file.g_user = user_id
new_file.email = self.session.get('email')
new_file.file_url = str(cgi.escape(self.request.get('file_url')))
new_file.file_name = str(cgi.escape(self.request.get('file_name')))
if new_file.file_url != '' and new_file.file_url != None:
new_file.put()
self.redirect('/index')
class LogoutHandler(BaseHandler):
def get(self):
self.session['user_id'] = None
self.redirect('/')
class UnWatch(BaseHandler):
def get(self):
user_id = self.session.get('user_id')
if user_id == None:
# session has expired
self.redirect('/')
file_list = []
file_query = models.Files.query( ancestor = models.FilesKey(
user_id)).fetch()
for entry in file_query:
dict = {}
dict['url'] = entry.file_url
dict['name'] = entry.file_name
dict['chbox'] = 'chbox' + entry.file_url
file_list.append(dict)
if len(file_list) == 0:
self.redirect('/index')
template_values = { 'file_list' : file_list}
template = JINJA_ENVIRONMENT.get_template('templates/unwatch.html')
self.response.write(template.render(template_values))
def post(self):
user_id = self.session.get('user_id')
if user_id == None:
# session has expired
self.redirect('/')
file_query = models.Files.query( ancestor = models.FilesKey(
user_id)).fetch()
files_to_unwatch = []
for entry in file_query:
if ('on' == str(cgi.escape(self.request.get('chbox'+entry.file_url)))):
files_to_unwatch.append(entry)
for entry in files_to_unwatch:
entry.key.delete()
self.redirect('/unwatch')
class ChannelUrl(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('templates/channelUrl.html')
self.response.write(template.render())
class PrivacyPolicy(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('templates/privacyPolicy.html')
self.response.write(template.render())
def post(self):
template = JINJA_ENVIRONMENT.get_template('templates/privacyPolicy.html')
self.response.write(template.render())
class TermsOfService(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('templates/termsOfUse.html')
self.response.write(template.render())
def post(self):
template = JINJA_ENVIRONMENT.get_template('templates/termsOfUse.html')
self.response.write(template.render())
application = webapp2.WSGIApplication([
('/', LoginPage),
('/index', MainPage),
('/unwatch', UnWatch),
('/session', SessionData),
('/channelUrl', ChannelUrl),
('/logout', LogoutHandler),
('/privacy', PrivacyPolicy),
('/terms', TermsOfService),
], debug = True,
config = config)
<file_sep>/watcher.py
import webapp2, sys, urllib, hashlib
from google.appengine.api import users, mail, urlfetch
sys.path.append('./models')
import models
FACEBOOK_APP_ID = '189988587852276'
FACEBOOK_APP_SECRET = ''
class Watcher(webapp2.RequestHandler):
def get(self):
file_query = models.Files.query().fetch()
rpcs = []
def handle_result(rpc, entry):
response = rpc.get_result().content
md5 = hashlib.md5(response).hexdigest()
if entry.md5 == None or entry.md5 == '':
entry.md5 = md5
entry.put()
elif entry.md5 != md5:
entry.md5 = md5
entry.put()
if entry.fb_user != None:
# Send notification on facebook:
template = 'The file: "' + entry.file_name + '" has changed!'
url = 'https://graph.facebook.com/' + entry.fb_user + '/notifications/'
data = { 'access_token' : FACEBOOK_APP_ID + '|' + FACEBOOK_APP_SECRET,
'href' : 'index',
'template' : template}
data = urllib.urlencode(data)
response = urlfetch.fetch(url=url,
payload=data,
method=urlfetch.POST,
headers={'Content-Type': 'application/x-www-form-urlencoded'})
else:
# we have a google+ user
sender = 'File Change Notifier <<EMAIL>>'
subject = 'A file has changed!'
body = ('The file: "' + entry.file_name + '" at ' +
entry.file_url + ' has changed.')
mail.send_mail(sender, entry.email, subject, body)
def create_callback(rpc, entry):
return lambda: handle_result(rpc, entry)
for entry in file_query:
rpc = urlfetch.create_rpc()
rpc.callback = create_callback(rpc, entry)
urlfetch.make_fetch_call(rpc, entry.file_url,
headers={'Content-Type': 'application/x-www-form-urlencoded'},
follow_redirects = False)
rpcs.append(rpc)
for rpc in rpcs:
rpc.wait()
application = webapp2.WSGIApplication([
('/watcher', Watcher)
], debug = True)
<file_sep>/models/models.py
from google.appengine.ext import ndb
class Files(ndb.Model):
g_user = ndb.StringProperty()
fb_user = ndb.StringProperty()
email = ndb.StringProperty()
file_url = ndb.StringProperty()
file_name = ndb.StringProperty()
md5 = ndb.StringProperty()
def FilesKey(user_id):
return ndb.Key('profile', user_id)
<file_sep>/README.md
Facebook app, it sends notifications when certain files on the web have changed <br>
Available for Facebook: https://apps.facebook.com/flchangenotifier/ <br>
and Google+: http://filechgnotifier.appspot.com/
| e0221ab474bcbb0b83c8ff077561c4a0641f7344 | [
"Markdown",
"Python"
] | 4 | Python | mihai-varga/fileChangeNotifier | c49cce9557798b820cc3c54dcccbb4f100091d7f | a8aa1b3df8c75d9f7b6d8b19a77d8999e5ebda14 | |
refs/heads/master | <repo_name>onuryilmaz/google-maps-place-fetcher<file_sep>/www/js/app.js
angular.module('starter', ['ionic'])
// Start application
.run(function ($ionicPlatform) {
$ionicPlatform.ready(function () {
if (window.cordova && window.cordova.plugins.Keyboard) {
cordova.plugins.Keyboard.hideKeyboardAccessoryBar(true);
}
if (window.StatusBar) {
StatusBar.styleDefault();
}
})
})
// Configuration for app
.config(function ($stateProvider, $urlRouterProvider) {
$stateProvider
.state('map', {
url: '/',
templateUrl: 'templates/map.html',
controller: 'MapCtrl'
});
$urlRouterProvider.otherwise("/");
})
// Place manager factory
.factory('PlaceManager', function () {
// All places
var places = [];
// Insert into place
function insertPlace(item) {
places.push(item);
}
// Get places
function get() {
return places;
}
return {
insert: function (item) {
insertPlace(item)
},
getPlaces: function () {
console.save(JSON.stringify(get()), "places.json");
},
getCount: function () {
return places.length;
}
}
})
// Google Maps factory
.factory('GoogleMaps', function (PlaceManager, $ionicLoading) {
var map = null;
// Initialize map
function initMap() {
$ionicLoading.show();
var options = {timeout: 10000, enableHighAccuracy: true};
var latLng = new google.maps.LatLng(41, 29); // Istanbul
navigator.geolocation.getCurrentPosition(function (pos) {
var latLng = new google.maps.LatLng(pos.coords.latitude, pos.coords.longitude);
$ionicLoading.hide();
}, function (error) {
console.log("Couldn't get your current location!")
$ionicLoading.hide();
});
var mapOptions = {
center: latLng,
zoom: 7,
mapTypeId: google.maps.MapTypeId.ROADMAP,
streetViewControl: false,
mapTypeControl: false
};
map = new google.maps.Map(document.getElementById("map"), mapOptions);
var drawingManager = new google.maps.drawing.DrawingManager({
drawingMode: google.maps.drawing.OverlayType.MARKER,
drawingControl: true,
drawingControlOptions: {
position: google.maps.ControlPosition.TOP_CENTER,
drawingModes: [
google.maps.drawing.OverlayType.RECTANGLE
]
},
rectangleOptions: {editable: false}
});
drawingManager.setMap(map);
// Start when rectangle is drawn
google.maps.event.addListener(drawingManager, 'rectanglecomplete', function (rectangle) {
drawingManager.setOptions({drawingControl: false})
drawingManager.setDrawingMode(null);
var places = new google.maps.places.PlacesService(map);
bounds = rectangle.getBounds();
southWest = bounds.getSouthWest();
northEast = bounds.getNorthEast();
tileWidth = (northEast.lng() - southWest.lng()) / 2;
tileHeight = (northEast.lat() - southWest.lat()) / 2;
for (x = 0; x < 2; x++) {
for (y = 0; y < 2; y++) {
var x1 = southWest.lat() + (tileHeight * x);
var y1 = southWest.lng() + (tileWidth * y);
var x2 = x1 + tileHeight;
var y2 = y1 + tileWidth;
var tempCell = new google.maps.LatLngBounds(new google.maps.LatLng(x1, y1), new google.maps.LatLng(x2, y2));
places.radarSearch({
bounds: tempCell,
types: [
'establishment'
]
}, function (results, status) {
if (status == google.maps.places.PlacesServiceStatus.OK) {
for (var i = 0; i < results.length; i++) {
var placeLoc = results[i].geometry.location;
var place = results[i];
var marker = new google.maps.Marker({
map: map,
position: placeLoc
});
PlaceManager.insert(place);
service = new google.maps.places.PlacesService(map);
google.maps.event.addListener(marker, 'click', function () {
var infoWindow = new google.maps.InfoWindow();
service.getDetails(place, function (result, status) {
if (status !== google.maps.places.PlacesServiceStatus.OK) {
console.error(status);
return;
}
infoWindow.setContent(result.name);
infoWindow.open(map, marker);
});
});
}
}
else {
console.log("Error: " + status)
}
});
}
}
});
}
return {
init: function () {
initMap();
}
}
})
// Map controller
.controller('MapCtrl', function ($scope, $window, $ionicPopup, $ionicLoading, $interval, GoogleMaps, PlaceManager) {
GoogleMaps.init();
// Download places as JSON
$scope.download = function () {
PlaceManager.getPlaces();
}
// Reload the page
$scope.reloadPage = function () {
$window.location.reload();
}
// Check four counts with interval
$interval(function () {
$scope.placeCount = PlaceManager.getCount();
}, 100);
// Alert at startup
$ionicPopup.alert({
title: 'User Guide',
template: 'Draw a rectangle and wait until all places are gathered. <br> You can download places as JSON or check on the map.'
});
});<file_sep>/README.md
Google Maps Place Fetcher
=========================
* This application gets all the places (within API requirements) in a rectangle that you draw on map.
* Main approach is to divide rectangular into **n x n** partitions and search each of them with [Place Radar Search](https://developers.google.com/maps/documentation/javascript/examples/place-radar-search).
## Build & Start
### Requirements
* [Node.js](http://nodejs.org/)
### Installation
#### Local
```sh
npm install
npm start
```
Your app should now be running on [localhost:5000](http://localhost:5000/).
#### Heroku
Alternatively, you can deploy to Heroku using this button:
[](https://heroku.com/deploy?template=https://github.com/onuryilmaz/google-maps-fetcher)
| 9846842a90ec308e0652bc2e6bbc6e741a72ac5a | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | onuryilmaz/google-maps-place-fetcher | 3d6cf81880c4de4f6066a850424cfc407b1b2caf | d0c5b730cffbb5b06a80db52c69af13ea84ac982 | |
refs/heads/main | <file_sep>"use strict";
let map;
//**var Bogota =document.getElementById('Bogota');**//
//** Bogota.addEventListener('Click',function ('Bogota') {})**//
function initMap() {
//Se imprime el json
$("#btnFetch").click(function(){
})
$("#ciudades").change(function () {
var optionSelected = $(this).find("option:selected");
var valueSelected = optionSelected.val(); //Nombre de ciudad
console.log(valueSelected)
//carag de datos
loadCity(valueSelected)
loadWeather(valueSelected)
loadCountryData(valueSelected)
loadCountryCovid(valueSelected)
});
map = new google.maps.Map(document.getElementById("map"), {
center: { lat: 30.06263 , lng: 31.24967 },
zoom: 5,
});
function loadWeather(nameCity){
fetch('https://api.openweathermap.org/data/2.5/weather?q='+nameCity+'&appid=becd8278c865cdb08238db3bdf46026b')
.then(response => response.json())
.then(data => {
$("#state").text(data['weather'][0]['description'])
$("#temp").text(data['main']['temp']+"°C")
$("#max_temp").text(data['main']['temp_max']+"°F")
$("#min_temp").text(data['main']['temp_min']+"°F")
$("#country").text(data['sys']['country'])
$("#jsonWheater").val(JSON.stringify(data))
})
.catch(err => alert("data: "+data))
}
function loadCountryData(nameCity){
fetch('https://restcountries.eu/rest/v2/capital/'+nameCity)
.then(response => response.json())
.then(data => {
console.log(JSON.stringify(data))
var indivData = data[0];
//llamado a la bandera
document.getElementById('flagImg').src=indivData['flag']
$("#countryName").text(indivData['name'])
$("#capital").text(indivData['capital'])
$("#dial_code").text("+"+indivData['callingCodes'][0])
$("#population").text(indivData['population'])
$("#currency").text(indivData['currencies'][0]['name'] + " ("+indivData['currencies'][0]['code']+")")
$("#region").text(indivData['region'])
$("#sub_region").text(indivData['subregion'])
})
.catch(err => alert("error: "+err))
}
//load city
function loadCity(nameCity) {
console.log("carga ciudad")
let locationCoord;
switch (nameCity) {
case "Bogota":locationCoord={ lat: 4.570868, lng :-74.297333}
break;
case "Asuncion": locationCoord= {lat: -23.442503 , lng:-58.443832 }
break;
case "Brasilia":locationCoord ={ lat: -15.7801, lng: -47.9292}
break;
case "Buenos Aires":locationCoord={lat: -34.61315, lng: -58.37723}
break;
case "Caracas":locationCoord= {lat: 10.48801, lng: -66.87919}
break;
case "Georgetown":locationCoord={lat: 6.80448, lng: -58.15527}
break;
case "Lima":locationCoord={lat: -12.04318, lng: -77.02824}
break;
case "Montevideo":locationCoord={lat: -34.90328,lng: -56.18816}
break;
case "Paramaribo":locationCoord={lat:5.86638 , lng:-55.16682}
break;
case "Quito":locationCoord={lat:-0.22985 , lng: -78.52495}
break;
case "<NAME>":locationCoord={lat:-33.4569400, lng: -70.6482700}
break;
case "Sucre":locationCoord={lat: -19.03332, lng: -65.26274}
break;
}
map = new google.maps.Map( document.getElementById("map"),{
center: locationCoord,
zoom:10,
})
//llamada a la API de Covid
function loadCountryCovid(countryName) {
fetch('https://covid-193.p.rapidapi.com/statistics?country='+countryName, {
"method": "GET",
"headers": {
"x-rapidapi-key": "<KEY>",
"x-rapidapi-host": "covid-193.p.rapidapi.com"
}
})
.then(response => response.json())
.then(data => {
console.log(JSON.stringify(data))
var dataCov= data[0];
$("#countryName").text(dataCov['country'])
$("#cases").text(dataCov['new'])
$("#cases").text(dataCov['active'])
$("#cases").text(dataCov['critical'])
$("#cases").text(dataCov['recovered'])
$("#cases").text(dataCov['total'])
})
.catch(err => alert("error:"+err))
}
}
} | 816a1634ac0a1c6109c3e1fa0e4587ea53f7b3a0 | [
"JavaScript"
] | 1 | JavaScript | LinaNiampira/apis | aa4ffb7cad9ac8070d673785943983bb8b3a38cf | 84d1a22df9f380c5a5059a065adabbdd6f482fec | |
refs/heads/master | <repo_name>gabmaxs/ionic-firebase<file_sep>/src/app/firebase.credentials.ts
export const FIREBASE_CONFIG = {
apiKey: "<KEY>",
authDomain: "cursoudemy-82ac8.firebaseapp.com",
databaseURL: "https://cursoudemy-82ac8.firebaseio.com",
projectId: "cursoudemy-82ac8",
storageBucket: "cursoudemy-82ac8.appspot.com",
messagingSenderId: "568343983243"
}; | 3e2519d860cf5bc1a80ff9a6347e59c0b30967a2 | [
"TypeScript"
] | 1 | TypeScript | gabmaxs/ionic-firebase | b66bd7ae2cdff38bb2fd311d006d06d824bb54a1 | c9dd682cc5249a2eb3d8f1cd58688a26bb77f1f0 | |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ShopOwner',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('phoneIp', models.CharField(max_length=50, verbose_name=b'\xe6\x89\x8b\xe6\x9c\xbaIP')),
('phoneMac', models.CharField(max_length=50, verbose_name=b'\xe6\x89\x8b\xe6\x9c\xbaMac')),
('wifiSsid', models.CharField(max_length=50, verbose_name=b'wifiSSID')),
('wifiMac', models.CharField(max_length=50, verbose_name=b'wifiMac')),
('register_time', models.DateTimeField(verbose_name=b'\xe7\xad\xbe\xe5\x88\xb0\xe6\x97\xb6\xe9\x97\xb4')),
],
),
]
<file_sep>from django.shortcuts import render
import json
from django.http import HttpResponse
# Create your views here.
def bdWifi(request):
username = request.GET.get('username', False)
password = request.GET.get('password', False)
if username=='sdy' and password=='<PASSWORD>':
serverMsg="login success"
jStr = {'serverMsg': serverMsg,'flag':'True'}
else:
serverMsg="login defied"
jStr = {'serverMsg': serverMsg,'flag':'True'}
return HttpResponse(json.JSONEncoder().encode(jStr))
<file_sep># -*- coding:utf-8 -*-
from django.shortcuts import render
# from urllib import urlencode
import json
from django.http import HttpResponse
# import logging
# from freeradius.models import Radcheck, AuthUser, SystemConfig
# from django.utils import timezone
# from accounting.models import *
from wifiSign.models import *
import time
from datetime import datetime,timedelta
# from datetime import datetime
# Create your views here.
def wifiSign(request):
netName = request.GET.get('netName', False)
netMac = request.GET.get('netMac', False)
localMac = request.GET.get('localMac', False)
localIp = request.GET.get('localIp', False)
curDate = request.GET.get('curDate', False)
# from datetime import datetime,timedelta
# now = datetime.now()
# yestoday = now - timedelta(days=1)
# tommorow = now + timedelta(days=1)
# or netMac != 'xxx'MERCURY_701 netName != 'SuccessfulTeam' or
# /wifi_sign?netName="SuccessfulTeam"&netMac=ca:3a:35:4b:03:69&localMac=9c:99:a0:22:11:cb&localIp=192.168.0.111
# /wifi_sign?netName="SuccessfulTeam"&netMac=ca:3a:35:4b:03:69&localMac=08:57:00:c0:1c:c5&localIp=192.168.0.100
if ( netMac == 'ca:3a:35:4b:03:69' or netMac =='c8:3a:35:4b:03:68'):
dateNow = datetime.now()
print(dateNow)
startTime=dateNow.date()
# startDate=datetime.datetime.strptime(time.gmtime(time.time()),'%Y-%m-%d')
endTime = startTime + timedelta(days=1)
print(endTime)
signHistory = SignHistory.objects.filter(phoneMac=localMac, register_time__lte=endTime,
register_time__gte=startTime)
if signHistory.count()>=1:
serverMsg="今天已经签到,不用重复签到"
jstr = {'serverMsg': serverMsg}
return HttpResponse(json.JSONEncoder().encode(jstr))
if signHistory.count()==0:
newSignHistory=SignHistory()
newSignHistory.phoneMac=localMac
newSignHistory.phoneIp=localIp
newSignHistory.wifiMac=netMac
newSignHistory.wifiSsid=netName
newSignHistory.register_time=dateNow
newSignHistory.save()
serverMsg="本次签到成功"
jstr = {'serverMsg': serverMsg}
return HttpResponse(json.JSONEncoder().encode(jstr))
else:
serverMsg = "请链接公司wifi"
jstr = {'serverMsg': serverMsg}
return HttpResponse(json.JSONEncoder().encode(jstr))
<file_sep># -*- coding:utf-8 -*-
from django.db import models
# Create your models here.
class SignHistory(models.Model):
phoneIp = models.CharField('手机IP',max_length=50)
phoneMac = models.CharField('手机Mac',max_length=50,blank=False, null = False)
wifiSsid = models.CharField('wifiSSID',max_length=50)
wifiMac = models.CharField('wifiMac',max_length=50)
register_time = models.DateTimeField('签到时间')
# employee_name=models.CharField('employee_name',max_length=50)
# employee_id=models.CharField('employee_id',max_length=20)
# class Meta:
# verbose_name='手机MAC'
def __str__(self):
return self.phoneMac
def __unicode__(self):
return self.phoneMac | 2daa365bb896f9b42b39b9cf199695323f48c820 | [
"Python"
] | 4 | Python | FrankHitman/bdyun | 4b11b0cf1b2497eb15d0f63380afe2a1cec1bf08 | 7ea7d482d3630bc9ccd84a4918c1d62fea7f2d0f | |
refs/heads/master | <repo_name>AkilakumariA/React-17-project<file_sep>/src/page/Tensorflow.js
import React, { useRef } from 'react';
import useTFClassify from '../utils/hooks/useTFClassify';
export default function Tensorflow() {
const imageRef = useRef();
const { predict, predictions, isLoading } = useTFClassify();
return (
<div className='flex justify-center'>
<div className='w-1/3'>
<h1 className='text-center'>TensorFlow Example</h1>
<img src='https://images.unsplash.com/photo-1587300003388-59208cc962cb?crop=entropy&cs=tinysrgb&fit=max&fm=jpg&ixid=MnwyNDI1MTZ8MHwxfHNlYXJjaHw0fHxkb2d8ZW58MHx8fHwxNjI1NTgzNDYy&ixlib=rb-1.2.1&q=80&w=1080' alt='' width='400' crossOrigin='anonymous' ref={imageRef} />
<div className="text-center my-5">
{predictions.length > 0 &&
predictions.map((prediction) => (
<div className='flex justify-between text-sm'>
<p>{prediction.className}</p>
<p>{Math.floor(prediction.probability * 100)}%</p>
</div>
))}
<button className="p-2 rounded bg-gray-900 text-white w-64" onClick={() => predict(imageRef.current)}>
{isLoading && '⏳'}
{!isLoading && 'Predict Result'}
</button>
</div>
</div>
</div>
)
}
<file_sep>/src/components/Header.js
import React, { useContext } from 'react';
import { NavLink, useHistory } from 'react-router-dom';
import firebase from '../congif/firebase';
import AppContext from '../store/AppContext';
export default function Header() {
// const [isLoggedIn, setisLoggedIn] = useState(false);
const [isLoggedIn] = useContext(AppContext);
const history = useHistory();
// console.log(user);
function logout() {
firebase.auth().signOut().then(res => {
// setisLoggedIn(false);
history.replace('/login');
}).catch(e => {
console.log(e.response.data);
})
}
return (
<nav className='py-5 bg-gray-900 text-white flex justify-between'>
<ul className='flex justify-between px-10'>
<li className='mr-5'>
<NavLink to="/" exact className='hover:underline hover:text-blue-200' activeClassName='underline text-blue-200'>Home</NavLink>
</li>
<li className='mr-5'>
<NavLink to="/gallery" className='hover:underline hover:text-blue-200' activeClassName='underline text-blue-200'>Gallery</NavLink>
</li>
<li className='mr-5'>
<NavLink to="/tensorflow" className='hover:underline hover:text-blue-200' activeClassName='underline text-blue-200'>Tensorflow</NavLink>
</li>
</ul>
<ul className='flex justify-between px-10'>
<li>
{isLoggedIn ? (<button className='hover:underline hover:text-blue-200' onClick={logout}>Logout</button>) : (<NavLink to="/login" className='hover:underline hover:text-blue-200'>Login</NavLink>)}
</li>
{!isLoggedIn &&
<li className='ml-5'>
<NavLink to="/signup" className='hover:underline hover:text-blue-200'>Sign Up</NavLink>
</li>}
</ul>
</nav >
)
}
<file_sep>/src/page/Gallery.js
import React from 'react'
import Images from '../components/Images'
export default function Gallery() {
return (
<section className='flex justify-center text-center'>
{/* {console.log('re-render')} */}
<div className='w-10/12'>
<div className='text-center'>
{/* <div className='my-4'>{title}</div> */}
<Images />
</div>
{/* <div>
<button onClick={handleClick} className='my-2 p-1 bg-blue-700 text-white'>Toggle image</button>
</div>
{isShowing ? <Images /> : null} */}
</div>
</section>
)
}
<file_sep>/src/page/Login.js
import React, { useState } from 'react'
import firebase from '../congif/firebase';
import "firebase/auth";
import { useHistory } from 'react-router-dom';
export default function Login() {
const [isLoading, setisLoading] = useState(false);
const [error, seterror] = useState('');
const [form, setform] = useState({ email: "", password: "" });
// const [isLoggedIn, setisLoggedIn] = useState(false);
const history = useHistory();
function handleForm(e) {
if (isLoading) return;
setisLoading(true);
e.preventDefault();
firebase.auth().signInWithEmailAndPassword(form.email, form.password).then(res => {
setisLoading(false);
seterror('');
history.replace('/');
// setisLoggedIn(true)
}).catch((error) => {
setisLoading(false)
seterror(error.message);
});
}
function handleInput(e) {
setform({ ...form, [e.target.name]: e.target.value })
}
// if (isLoggedIn) return <Redirect to='/' />;
return (
<div className='flex h-screen bg-gray-200'>
<div className='bg-white flex flex-wrap justify-center m-auto rounded-lg shadow-lg w-1/3'>
<form className='m-5 w-10/12' onSubmit={handleForm}>
{error !== '' && <p>{error}</p>}
<h1 className='w-full text-4xl tracking-widest text-center my-6'>Login</h1>
<div className='w-full my-6'>
<input type='email' className='border-b-2 border-dashed border-gray-300 py-1.5 text-black w-full' placeholder='Email or Username' name='email' value={form.email} onChange={handleInput} />
</div>
<div className='w-full my-6'>
<input type='password' className='border-b-2 border-dashed border-gray-300 py-1.5 text-black w-full' placeholder='<PASSWORD>' name='password' value={form.password} onChange={handleInput} />
</div>
<div className='w-full my-10'>
<button type='submit' className='bg-red-300 p-2 rounded shadow text-white w-full'>
{isLoading ? (<i className='fas fa-circle-notch fa-spin '></i>) : ('Login')}
</button>
</div>
</form>
</div>
</div>
)
}
<file_sep>/src/components/Image.js
import React, { useRef, useState } from 'react';
import PropTypes from 'prop-types';
import useTFClassify from '../utils/hooks/useTFClassify';
function Image({ image, index, handleRemove, show }) {
const [isHovering, setIsHovering] = useState(false);
const { predict, predictions, setPredictions, isLoading } = useTFClassify();
const imageRef = useRef();
return (
<div className='relative' onMouseEnter={() => setIsHovering(true)} onMouseLeave={() => setIsHovering(false)}>
{(predictions.length > 0 || isLoading) &&
(<span class="absolute bg-gray-800 text-white rounded-lg shadow px-2 left-0 ml-5" onClick={() => setPredictions([])}>
{isLoading && <p>Fetching results...</p>}
{predictions.map((prediction) => (
<div class="flex justify-between text-sm">
<p>{prediction.className}</p>
<p>{Math.floor(prediction.probability * 100)}%</p>
</div>
))}
</span>)
}
<i className={`fas fa-times absolute right-0 cursor-pointer opacity-25 hover:opacity-100 ${isHovering ? '' : 'hidden'}`} onClick={() => handleRemove(index)}></i>
<i className={`fas fa-search absolute left-0 cursor-pointer opacity-25 hover:opacity-100 ${isHovering ? '' : 'hidden'}`} onClick={() => predict(imageRef.current)}></i>
<img ref={imageRef} onClick={show} src={image} alt='nature' width='100%' height='auto' crossOrigin='anonymous' />
</div>
);
}
// const types = {
// function(props, propName) {
// if (typeof props[propName] !== 'function') {
// return new Error(`'${propName}' must be a function but you have provided ${typeof props[propName]}`);
// }
// }, number(props, propName) {
// if (typeof props[propName] !== 'number') {
// return new Error(`'${propName}' must be a number but you have provided ${typeof props[propName]}`);
// }
// }
// }
Image.propTypes = {
// show: types.function,
// index: types.number,
show: PropTypes.func,
index: PropTypes.number,
image: PropTypes.string,
handleRemove: PropTypes.func
}
export default Image;
<file_sep>/src/page/SignUp.js
import React from 'react'
import firebase from '../congif/firebase';
import "firebase/auth";
import { useHistory } from 'react-router-dom';
import { Formik, Form, Field, ErrorMessage } from 'formik';
import * as Yup from 'yup';
export default function SignUp() {
// const formik = useFormik({
// initialValues: {
// email: '',
// password: ''
// },
// // validate: value => {
// // const errors = {};
// // if (!value.email) {
// // errors.email = "Email field is Required.";
// // } else if (!/^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$/i.test(value.email)) {
// // errors.email = 'Email is Invalid ';
// // }
// // if (!value.password) {
// // errors.password = "Password field is Required.";
// // } else if (value.password.length <= 8) {
// // errors.password = "<PASSWORD>.";
// // } else if (!/^(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).{8,32}$/i.test(value.password)) {
// // errors.password = '<PASSWORD>';
// // }
// // return errors;
// // },
// validationSchema: Yup.object({
// email: Yup.string().required('Email field is Required').email(),
// password: Yup.string().required('Password field is Required.').min(8),
// }),
// onSubmit: value => {
// console.log(value);
// }
// });
const history = useHistory();
return (
<Formik
initialValues={{
email: '',
password: ''
}}
validationSchema={Yup.object({
email: Yup.string().required('Email field is Required').email(),
password: Yup.string().required('Password field is Required.').min(8),
})}
onSubmit={(value, formikBag) => {
firebase.auth().createUserWithEmailAndPassword(value.email, value.password).then(
res => {
history.replace('/');
}).catch(e => {
formikBag.setFieldError('email', e.message);
});
}}
>
{formik => (
<div className='flex h-screen bg-gray-200'>
<div className='bg-white flex flex-wrap justify-center m-auto rounded-lg shadow-lg w-1/3'>
<Form className='m-5 w-10/12'>
<h1 className='w-full text-4xl tracking-widest text-center my-6'>Sign Up Here</h1>
<div className='w-full my-6'>
<Field name="email" type="email" className='border-b-2 border-dashed border-gray-300 py-1.5 text-black w-full' placeholder='Email or Username' />
<ErrorMessage name="email" />
</div>
<div className='w-full my-6'>
<Field name="password" type="<PASSWORD>" className='border-b-2 border-dashed border-gray-300 py-1.5 text-black w-full' placeholder='<PASSWORD>' />
<ErrorMessage name="password" />
</div>
<div className='w-full my-10'>
<button type='submit' className='bg-red-300 p-2 rounded shadow text-white w-full'>Sign Up
{/* {isLoading ? (<i className='fas fa-circle-notch fa-spin '></i>) : ('Sign Up')} */}
</button>
</div>
</Form>
</div>
</div>
)}
</Formik >
)
}
<file_sep>/src/congif/firebase.js
import firebase from "firebase/app";
// Your web app's Firebase configuration
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "in-depth-react-1-45e3e.firebaseapp.com",
projectId: "in-depth-react-1-45e3e",
storageBucket: "in-depth-react-1-45e3e.appspot.com",
messagingSenderId: "857943629592",
appId: "1:857943629592:web:9e6f20f148cb840b47886d"
};
// Initialize Firebase
firebase.initializeApp(firebaseConfig);
export default firebase; | 65e7bc6d0aea18b46abaf045f57cf73242276a19 | [
"JavaScript"
] | 7 | JavaScript | AkilakumariA/React-17-project | 948d42ba7b6190755604499626bae9ef097c425e | e2984ff6c064e419cc52d85fa58eccb29b6960ce | |
refs/heads/master | <file_sep>import tkinter
from tkinter import messagebox, simpledialog
root = tkinter.Tk()
root.title("Minesweeper")
frame = tkinter.Frame(root)
frame.pack()
tile = tkinter.PhotoImage(file = "tile_plain.png")
button = tkinter.Button(frame, image = tile)
def hello(event):
messagebox.showinfo("Game Over", "You Win!")
button.bind('<Button-1>', hello)
button.grid(row = 0, column = 0)
label1 = tkinter.Label(frame, text = "Mines: 10")
label1.grid(row = 1, column = 0, columnspan = 5)
label2 = tkinter.Label(frame, text = "Flags: 0")
label2.grid(row = 1, column = 5, columnspan = 5)
simpledialog.askstring("Input", "Enter Your Name")
root.mainloop()<file_sep>""" QF205 G1 Team 6 - MineSweeper Application """
from random import randint
from tkinter import messagebox, simpledialog
import tkinter
import time
import math
import googlesheet as gs
class Minesweeper:
def __init__(self, root):
# import images
self.tile_plain = tkinter.PhotoImage(file = "images/tile_plain.png")
self.tile_clicked = tkinter.PhotoImage(file = "images/tile_clicked.png")
self.tile_mine = tkinter.PhotoImage(file = "images/tile_mine.png")
self.tile_flag = tkinter.PhotoImage(file = "images/tile_flag.png")
self.tile_wrong = tkinter.PhotoImage(file = "images/tile_wrong.png")
self.tile_no = {}
#for tile numbers 1-8
for x in range(1, 9):
self.tile_no[x]=tkinter.PhotoImage(file = "images/tile_"+str(x)+".png")
# set up frame
frame = tkinter.Frame(root)
frame.pack()
self.gridsize = 10
# flag and clicked tile variables
self.flags = 0
self.clicked = 0
# timer counter
self.timerCounter = 0
# gameover flag
self.gameoverstatus = 0
# dictionary of buttons
self.buttons = dict()
# mine and numberofmines variables
self.mines = 0
self.numberofmines = 10
# set default image
default = self.tile_plain
for y in range(self.gridsize):
for x in range(self.gridsize):
# 0 = Button widget
# 1 = if a mine y/n (1/0)
# 2 = state (0 = unclicked, 1 = clicked, 2 = flagged)
# 3 = [y, x] coordinates in the grid
# 4 = nearby mines, 0 by default, calculated after placement in grid
self.buttons[y, x] = [ tkinter.Button(frame, image = default),
0,
0,
[y, x],
0 ]
#if left clicked, run lclicked_wrapper function,
#else right clicked run rclicked_wrapper function
self.buttons[y, x][0].bind('<Button-1>',
self.lclicked_wrapper(self.buttons[y,x]))
self.buttons[y, x][0].bind('<Button-3>',
self.rclicked_wrapper(self.buttons[y,x]))
# lay buttons in grid
for key in self.buttons:
self.buttons[key][0].grid(row = self.buttons[key][3][0],
column = self.buttons[key][3][1])
# create label for number of mines in game
self.label2 = tkinter.Label(frame, text = "Mines: "+str(self.numberofmines))
self.label2.grid(row = self.gridsize+1,
column = 0,
columnspan = self.gridsize//3)
# create label for number of flags user set
self.label3 = tkinter.Label(frame, text = "Flags: "+str(self.flags))
self.label3.grid(row = self.gridsize+1,
column = self.gridsize//3,
columnspan = self.gridsize//3)
# create label for timer
self.label4 = tkinter.Label(frame, text = "Time: "+str(self.timerCounter))
self.label4.grid(row = self.gridsize+1,
column = round(self.gridsize//1.5),
columnspan = self.gridsize//3)
# start timer
self.update_time()
def lclicked_wrapper(self, button_data):
return lambda Button: self.lclicked(button_data)
def rclicked_wrapper(self, button_data):
return lambda Button: self.rclicked(button_data)
def lclicked(self, button_data):
# initialize 2D grid of size 10 by 10
grid = [[0] * self.gridsize] * self.gridsize
# this is only ran once, on the very first click
if self.mines == 0:
# generate the grid with mines marked 'X'
# and number of mines nearby if not a mine
grid = self.setupgrid(button_data[3])
for y in range(self.gridsize):
for x in range(self.gridsize):
# if mine, set mine to be y
if grid[y][x] == 'X':
self.buttons[y,x][1] = 1
# else, set number of mines nearby
else:
self.buttons[y,x][4] = grid[y][x]
self.mines=self.numberofmines
# if user clicked a mine
if button_data[1] == 1:
# show all mines and check for flags
for key in self.buttons:
# if not a mine and flagged, show wrong flag image
if self.buttons[key][1] != 1 and self.buttons[key][2] == 2:
self.buttons[key][0].config(image = self.tile_wrong)
# if mine and not flagged, show mine image
if self.buttons[key][1] == 1 and self.buttons[key][2] != 2:
self.buttons[key][0].config(image = self.tile_mine)
# end game
self.gameover()
else:
# if neighbors have no mine, open up tiles until numbers or edges
if button_data[4] == 0:
self.showcells(grid,button_data)
# show number of nearby mines
else:
button_data[2] = 1
self.clicked += 1
button_data[0].config(image = self.tile_no[button_data[4]])
# if number of left clicks equal to total tiles - total mines in game
if self.clicked == self.gridsize*self.gridsize - self.numberofmines:
self.victory()
def rclicked(self, button_data):
# if tile not clicked, flag
if button_data[2] == 0:
button_data[0].config(image = self.tile_flag)
button_data[2] = 2
button_data[0].unbind('<Button-1>')
self.flags += 1
self.update_flags()
# if already flagged, unflag
elif button_data[2] == 2:
button_data[0].config(image = self.tile_plain)
button_data[2] = 0
button_data[0].bind('<Button-1>', self.lclicked_wrapper(button_data))
self.flags -= 1
self.update_flags()
def update_time(self):
if self.gameoverstatus == 0:
global root
self.timerCounter = self.timerCounter + 1;
self.label4.config(text = "Time: "+str(self.timerCounter))
root.after(1000, self.update_time)
def update_flags(self):
self.label3.config(text = "Flags: "+str(self.flags))
def gameover(self):
global root
self.gameoverstatus = 1
messagebox.showinfo("Game Over", "You Lose!")
root.destroy()
def victory(self):
global root
self.gameoverstatus = 1
name = simpledialog.askstring("Input", "Enter Your Name")
messagebox.showinfo("Game Over", "You Win!")
self.writeToFile(name)
root.destroy()
def writeToFile(self, username):
# username is case insensitive
username = username.lower()
newScore = self.calculateScore()
#write to googlesheet
gs.write_to_googleSpreadsheet(username, newScore)
#only calculate score when player wins
def calculateScore(self):
# first 1 min, decrease in score will decrease with time
if self.timerCounter <= 60:
rawScore = (300 ** 2 - 60 ** 2) * 60 / self.timerCounter
return math.ceil(rawScore) #math function
# next 4 min, decrease in score will increase with time
elif self.timerCounter <= 300:
rawScore = 300 ** 2 - self.timerCounter ** 2
return rawScore
# winning after 5 min, 0 score
else:
return 0
"""
This function generates a numbered grid by
1) locating positions of mines through getmines()
2) marking these mines in an empty 2D grid
3) passing this marked grid to getnumbers(),
which then locates the neighbors of each cell using getneighbors()
and count the number of mines in these neighbors
and return the numbers on the marked grid
"""
def setupgrid(self, start):
# emptygrid = [[0 for j in range(self.gridsize)] for i in range(self.gridsize)]
emptygrid = []
for i in range(self.gridsize):
row = []
for j in range(self.gridsize):
row.append(0)
emptygrid.append(row)
mines = self.getmines(emptygrid, start)
for i, j in mines:
emptygrid[i][j] = 'X'
grid = self.getnumbers(emptygrid)
return grid
def getrandomcell(self):
a = randint(0, self.gridsize - 1)
b = randint(0, self.gridsize - 1)
return (a, b)
def getneighbors(self, grid, rowno, colno):
neighbors = []
for i in range(-1, 2):
for j in range(-1, 2):
if i == 0 and j == 0:
continue
if (-1 < (rowno + i) < len(grid)
and -1 < (colno + j) < len(grid)):
neighbors.append((rowno + i, colno + j))
return neighbors
def getmines(self, grid, start):
mines = []
neighbors = self.getneighbors(grid, *start)
for i in range(self.numberofmines):
cell = self.getrandomcell()
while list(cell) == start or cell in mines or cell in neighbors:
cell = self.getrandomcell()
mines += [cell]
return mines
def getnumbers(self, grid):
for rowno, row in enumerate(grid):
for colno, cell in enumerate(row):
if cell != 'X':
# get the values of the neighbors
values = [grid[r][c]
for r, c in self.getneighbors(grid, rowno, colno)]
# count how many neighbors are mines
grid[rowno][colno] = values.count('X')
return grid
def showcells(self, grid, button_data):
# exit function if the cell was already clicked
if button_data[2]==1:
return
button_data[2]=1
self.clicked += 1
# show current cell
if button_data[4]==0:
button_data[0].config(image = self.tile_clicked)
else:
button_data[0].config(image = self.tile_no[button_data[4]])
# get the neighbors if the cell has 0 nearby mines
if button_data[4] == 0:
for r, c in self.getneighbors(grid, *button_data[3]):
# repeat function for each neighbor that doesn't have a flag
if self.buttons[r,c][2] != 2:
self.showcells(grid, self.buttons[r,c])
def main():
global root
# create Tk widget
root = tkinter.Tk()
# set program title
root.title("Minesweeper")
# create game instance
minesweeper = Minesweeper(root)
# run event loop
root.mainloop()
if __name__ == "__main__":
main()<file_sep>time_now = time.localtime().tm_hour
if 7 <= time_now <= 11:
print("Good Morning")
elif 12 <= time_now <= 19:
print("Good Afternoon")
else:
print("Good Night")
| 921f3e1b462ce3d13eb3c36bb97f0812b566ea8b | [
"Python"
] | 3 | Python | qiminwang/minesweeper | fae4ab722072ae28fd6e0b62c882a967ff67aa3a | 43da42170257f915ee00719e7be07ac366717749 | |
refs/heads/master | <repo_name>OnTheGoSystems/wpml-link-manager<file_sep>/tests/tests/classes/test-wpml-link-manager-helper.php
<?php
class Test_WPML_Link_Manager_Helper extends WPML_UnitTestCase {
private $lm_helper;
public function setUp() {
parent::setUp();
$package_type = 'Link Manager';
$this->lm_helper = new WPML_Link_Manager_Helper( $package_type );
}
public function test_get_package_type() {
$package_type = 'Link Manager';
$this->lm_helper = new WPML_Link_Manager_Helper( $package_type );
$this->assertEquals( $this->lm_helper->get_package_type(), $package_type);
}
public function test_get_package() {
$link = new stdClass();
$link->link_id = 3;
$link->link_name = 'test link';
$link->link_url = 'http://example.com';
$package = $this->lm_helper->get_package( $link );
$this->assertEquals( 'link', substr( $package['kind'], -4, 4 ) );
$cat = new stdClass();
$cat->term_id = 6;
$cat->name = 'test cat';
$package = $this->lm_helper->get_package( $cat, 'category' );
$this->assertEquals( 'category', substr( $package['kind'], -8, 8 ) );
$package = $this->lm_helper->get_package( 19 );
$this->assertEquals( 'link', substr( $package['kind'], -4, 4 ) );
$package = $this->lm_helper->get_package( 19, 'category' );
$this->assertEquals( 'category', substr( $package['kind'], -8, 8 ) );
}
public function test_get_link_string_name() {
$name = 'name';
$link = new stdClass();
$link->link_id = 12;
$str_name = $this->lm_helper->get_link_string_name( $name, $link );
$this->assertEquals( 'link-' . $link->link_id . '-' . $name, $str_name );
}
public function test_get_category_string_name() {
$name = 'name';
$cat = new stdClass();
$cat->term_id = 15;
$str_name = $this->lm_helper->get_category_string_name( $name, $cat );
$this->assertEquals( 'link-category-' . $cat->term_id . '-' . $name, $str_name );
}
}<file_sep>/tests/bootstrap.php
<?php
if ( ! defined( 'WPML_CORE_PATH' ) ) {
define( 'WPML_CORE_PATH', dirname( __FILE__ ) . '/../../sitepress-multilingual-cms' );
}
if ( ! defined( 'WPML_CORE_ST_PATH' ) ) {
define( 'WPML_CORE_ST_PATH', dirname( __FILE__ ) . '/../../wpml-string-translation' );
}
if ( ! defined( 'WPML_CORE_TM_PATH' ) ) {
define( 'WPML_CORE_TM_PATH', dirname( __FILE__ ) . '/../../wpml-translation-management' );
}
define( 'WPML_ST_TEST_DIR', dirname( __FILE__ ) . '/../../wpml-string-translation/tests' );
$_tests_dir = isset( $_ENV['WP_TEST_DIR'] ) ? $_ENV['WP_TEST_DIR'] : 'wordpress-tests-lib';
require_once $_tests_dir . '/includes/functions.php';
function _manually_load_plugin() {
require WPML_CORE_PATH . '/tests/util/functions.php';
require WPML_CORE_PATH . '/sitepress.php';
require WPML_CORE_ST_PATH . '/plugin.php';
require WPML_CORE_TM_PATH . '/plugin.php';
require dirname( __FILE__ ) . '/../plugin.php';
}
tests_add_filter( 'muplugins_loaded', '_manually_load_plugin' );
function _make_wpml_setup_complete() {
icl_set_setting( 'setup_complete', 1, true );
}
tests_add_filter( 'wpml_loaded', '_make_wpml_setup_complete' );
// Make sure Link Manager plugin is on
function _enable_link_manager() {
return true;
}
tests_add_filter( 'pre_option_link_manager_enabled', '_enable_link_manager', 1000 );
require $_tests_dir . '/includes/bootstrap.php';
require WPML_CORE_PATH . '/tests/util/wpml-unittestcase.class.php';
//require WPML_CORE_ST_PATH . '/tests/util/wpml-st-unittestcase.class.php';<file_sep>/readme.md
# WPML Link Manager
> Makes the plugin Link Manager compatible with WPML translating features
## Minimum requirements
* **Sitepress Multilingual CMS** installed and activated
* **WPML Translation management** installed and activated
* **WPML String translation** installed and activated
* **Link Manager plugin** installed and activated (available in the [WordPress official repository](https://wordpress.org/plugins/link-manager/))
## Installation
1. Download **WPML Link Manager** folder and paste it in `wp-content/plugins`
2. In **Plugins** admin page, activate **WPML Link Manager**
## How it works?
Basically, all link information such as link title, link description and category title are translated as strings.
So all the links (already existing or added) are considered in the default language of the site.
The link related strings are translatable in **WPML > String translation**
1. Create (or edit) a link in **Links** admin page.
2. Go to **WPML > String translation**
3. Select the domain related to this link (`link-manager-link-{link_id}`)
4. Translate the title and description in secondary languages
The same process applies for link categories:
1. Create (or edit) a link category in **Links** admin page.
2. Go to **WPML > String translation**
3. Select the domain related to this link category (`link-manager-category-{category_id}`)
4. Translate the title and description in secondary languages
<file_sep>/tests/tests/classes/test-wpml-link-manager.php
<?php
class Test_WPML_Link_Manager extends WPML_UnitTestCase {
/**
* @var WPML_Link_Manager
*/
private $lm;
/**
* @var WPML_Link_Manager_Helper
*/
private $lm_helper;
public function test_add_or_edit_link_action() {
$this->instantiate_link_manager( 'link.php' );
$args = array(
"link_url" => 'http://test.com',
"link_name" => 'The link name',
"link_description" => 'The link description',
);
$link_id = wp_insert_link( $args );
// check if name & description strings are registered
$link = get_bookmark( $link_id );
$context = $this->get_link_string_context( $link_id );
$name_name = $this->lm_helper->get_link_string_name( 'name', $link );
$name_desc = $this->lm_helper->get_link_string_name( 'description', $link );
$link_has_strings = false;
if ( icl_get_string_id( $link->link_name, $context, $name_name ) && icl_get_string_id( $link->link_description, $context, $name_desc ) ) {
$link_has_strings = true;
}
$this->assertTrue( $link_has_strings );
}
public function test_get_bookmarks_filter() {
global $sitepress, $wpdb, $WPML_String_Translation;
$orig_lang = 'en';
$sec_lang = 'fr';
$name_base = 'The link ';
$desc_base = 'The link description ';
$wpml_installation = new WPML_Installation( $wpdb, $sitepress );
$wpml_installation->set_active_languages( array( $orig_lang, $sec_lang) );
$WPML_String_Translation->init_active_languages();
$this->instantiate_link_manager( 'link.php' );
$sitepress->switch_lang( $orig_lang );
$links = array();
for ($i=0; $i < 3; $i++) {
$args = array(
"link_url" => 'http://test' . $i,
"link_name" => $name_base . $i,
"link_description" => $desc_base . $i,
);
$link_id = wp_insert_link( $args );
$links[ $i ] = get_bookmark( $link_id );
$context = $this->get_link_string_context( $link_id );
$name_st_id = icl_get_string_id( $links[ $i ]->link_name, $context, $this->lm_helper->get_link_string_name( 'name', $links[ $i ] ) );
$desc_st_id = icl_get_string_id( $links[ $i ]->link_description, $context, $this->lm_helper->get_link_string_name( 'description', $links[ $i ] ) );
icl_add_string_translation( $name_st_id, $sec_lang, $name_base . $i . $sec_lang, ICL_TM_COMPLETE );
icl_add_string_translation( $desc_st_id, $sec_lang, $desc_base . $i . $sec_lang, ICL_TM_COMPLETE );
}
$sitepress->switch_lang( $sec_lang );
$this->instantiate_link_manager( 'front' ); // Switch to front end
$translated_links = $this->lm->get_bookmarks_filter( $links );
foreach ( $translated_links as $i => $link ) {
$this->assertEquals( $name_base . $i . $sec_lang, $link->link_name );
$this->assertEquals( $desc_base . $i . $sec_lang, $link->link_description );
}
}
public function test_deleted_link_action() {
$this->instantiate_link_manager( 'link.php' );
$args = array(
"link_url" => 'http://test.com',
"link_name" => 'The link name',
"link_description" => 'The link description',
);
$link_id = wp_insert_link( $args );
$link = get_bookmark( $link_id );
$package = $this->lm_helper->get_package( $link, 'link' );
$this->assertTrue( $this->package_exist_in_DB( $package ) );
$this->lm->deleted_link_action( $link_id );
$this->assertFalse( $this->package_exist_in_DB( $package ) );
}
public function test_plugin_activation_action() {
wpml_link_manager_activation();
$option = get_option( 'wpml-package-translation-refresh-required' );
$this->assertTrue( $option );
}
public function test_get_terms_filter() {
global $sitepress, $wpdb, $WPML_String_Translation;
$orig_lang = 'en';
$sec_lang = 'fr';
$name_base = 'The cat ';
$desc_base = 'The cat description ';
$taxonomy = 'link_category';
$wpml_installation = new WPML_Installation( $wpdb, $sitepress );
$wpml_installation->set_active_languages( array( $orig_lang, $sec_lang) );
$WPML_String_Translation->init_active_languages();
$this->instantiate_link_manager( 'edit-tags.php' );
$sitepress->switch_lang( $orig_lang );
$cats = $cat_ids = array();
for ($i=0; $i < 3; $i++) {
$term = wp_insert_term( $name_base . $i, $taxonomy, array( 'description' => $desc_base . $i ) );
$cats[ $i ] = get_term( $term['term_id'], $taxonomy );
$cat_ids[] = $term['term_id'];
$context = $this->get_category_string_context( $cats[ $i ] );
$name_st_id = icl_get_string_id( $cats[ $i ]->name, $context, $this->lm_helper->get_category_string_name( 'name', $cats[ $i ] ) );
$desc_st_id = icl_get_string_id( $cats[ $i ]->description, $context, $this->lm_helper->get_category_string_name( 'description', $cats[ $i ] ) );
icl_add_string_translation( $name_st_id, $sec_lang, $name_base . $i . $sec_lang, ICL_TM_COMPLETE );
icl_add_string_translation( $desc_st_id, $sec_lang, $desc_base . $i . $sec_lang, ICL_TM_COMPLETE );
}
$sitepress->switch_lang( $sec_lang );
$this->instantiate_link_manager( 'front' );
/** @var WPML_String_Translation $WPML_String_Translation */
global $WPML_String_Translation;
$WPML_String_Translation->clear_string_filter( 'fr' );
$translated_cats = $this->lm->get_terms_filter( $cats, array( $taxonomy ) );
foreach ( $translated_cats as $i => $cat ) {
$this->assertEquals( $name_base . $i . $sec_lang, $cat->name );
$this->assertEquals( $desc_base . $i . $sec_lang, $cat->description );
}
}
public function test_created_or_edited_link_category_action() {
$this->instantiate_link_manager( 'edit-tags.php' );
$cat_name = "My cat";
$taxonomy = 'link_category';
$args = array(
'description' => "My cat description",
);
$term = wp_insert_term( $cat_name, $taxonomy, $args );
$cat = get_term( $term['term_id'], $taxonomy );
// check if name & description strings are registered
$context = $this->get_category_string_context( $cat );
$name_name = $this->lm_helper->get_category_string_name( 'name', $cat );
$name_desc = $this->lm_helper->get_category_string_name( 'description', $cat );
$cat_has_strings = false;
if ( icl_get_string_id( $cat->name, $context, $name_name ) && icl_get_string_id( $cat->description, $context, $name_desc ) ) {
$cat_has_strings = true;
}
$this->assertTrue( $cat_has_strings );
}
public function test_delete_term_action() {
$this->instantiate_link_manager( 'edit-tags.php' );
$cat_name = "My cat";
$taxonomy = 'link_category';
$args = array(
'description' => "My cat description",
);
$term = wp_insert_term( $cat_name, $taxonomy, $args );
$cat = get_term( $term['term_id'], $taxonomy );
$package = $this->lm_helper->get_package( $cat, 'category' );
$this->assertTrue( $this->package_exist_in_DB( $package ) );
$this->lm->delete_term_action( $cat->term_id, null, $taxonomy );
$this->assertFalse( $this->package_exist_in_DB( $package ) );
}
/**
* @param string $pagenow
*/
private function instantiate_link_manager( $pagenow = 'front' ) {
set_current_screen( $pagenow );
$this->reload_package_translation();
$package_type = 'Link Manager';
$this->lm_helper = new WPML_Link_Manager_Helper( $package_type );
$this->lm = new WPML_Link_Manager( $pagenow, $this->lm_helper );
// Fire again plugins_loaded action
$this->lm->plugins_loaded_action();
}
private function reload_package_translation() {
global $WPML_package_translation;
$WPML_package_translation = new WPML_Package_Translation();
$WPML_package_translation->loaded();
}
/**
* @param int $link_id
*
* @return string
*/
private function get_link_string_context( $link_id ) {
$package_helper = new WPML_Package_Helper();
return $package_helper->get_string_context_from_package( $this->lm_helper->get_package( $link_id, 'link' ) );
}
/**
* @param object $cat
*
* @return string
*/
private function get_category_string_context( $cat ) {
$package_helper = new WPML_Package_Helper();
return $package_helper->get_string_context_from_package( $this->lm_helper->get_package( $cat->term_id, 'category' ) );
}
/**
* @param array $package
*
* @return bool
*/
private function package_exist_in_DB( $package ) {
global $wpdb;
$query = "SELECT ID FROM {$wpdb->prefix}icl_string_packages WHERE kind=%s AND name=%s";
$query_prepare = $wpdb->prepare( $query, $package['kind'], $package['name'] );
$ret = $wpdb->get_var( $query_prepare );
return (bool) $ret;
}
}<file_sep>/classes/class-wpml-link-manager.php
<?php
Class WPML_Link_Manager {
private $pagenow;
private $helper;
/**
* @param string $pagenow
* @param object $helper
*/
public function __construct( &$pagenow, &$helper ) {
$this->pagenow = &$pagenow;
$this->helper = &$helper;
add_action( 'plugins_loaded', array( $this, 'plugins_loaded_action' ) );
}
public function plugins_loaded_action() {
if ( !apply_filters( 'pre_option_link_manager_enabled', false ) ) {
return;
}
$this->hooks();
$this->maybe_add_package_language_switcher();
}
public function hooks() {
add_action( 'add_link', array( $this, 'add_or_edit_link_action' ) );
add_action( 'edit_link', array( $this, 'add_or_edit_link_action' ) );
add_filter( 'get_bookmarks', array( $this, 'get_bookmarks_filter' ) );
add_action( 'deleted_link', array( $this, 'deleted_link_action' ) );
add_action( 'add_meta_boxes', array( $this, 'add_meta_boxes_action' ) );
add_action( 'wpml_register_string_packages', array( $this, 'wpml_register_string_packages_action' ) );
add_filter( 'get_terms', array( $this, 'get_terms_filter' ), 10, 2 );
add_action( 'created_link_category', array( $this, 'created_or_edited_link_category_action' ) );
add_action( 'edited_link_category', array( $this, 'created_or_edited_link_category_action' ) );
add_action( 'deleted_link', array( $this, 'deleted_link_action' ), 10, 4 );
add_action( 'delete_term', array( $this, 'delete_term_action' ), 10, 3 );
}
public function maybe_add_package_language_switcher() {
if ( $this->pagenow === 'link.php'
&& isset( $_GET['action'], $_GET['link_id'] )
&& $_GET['action'] === 'edit'
) {
$link_id = filter_input(INPUT_GET, 'link_id');
$package = $this->helper->get_package($link_id);
do_action('wpml_show_package_language_admin_bar', $package);
} else if ( $this->pagenow === 'edit-tags.php'
&& isset( $_GET['taxonomy'], $_GET['tag_ID'] )
&& $_GET['taxonomy'] === 'link_category'
) {
$tag_id = filter_input( INPUT_GET, 'tag_ID' );
$package = $this->helper->get_package( $tag_id, 'category' );
do_action( 'wpml_show_package_language_admin_bar', $package );
}
}
/**
* @param int $link_id
*/
public function add_or_edit_link_action( $link_id ) {
$this->add_strings_package( $link_id );
}
/**
* @param int $link_id
*/
private function add_strings_package( $link_id ) {
$link = get_bookmark( $link_id );
$package = $this->helper->get_package( $link );
$name_string_name = $this->helper->get_link_string_name( 'name', $link );
$description_string_name = $this->helper->get_link_string_name( 'description', $link );
do_action( 'wpml_register_string', $link->link_name, $name_string_name, $package, 'Link title', 'LINE');
do_action( 'wpml_register_string', $link->link_description, $description_string_name, $package, 'Link description', 'AREA');
}
/**
* @param array $links objects from get_bookmark()
*
* @return array
*/
public function get_bookmarks_filter( $links ) {
if ( !is_admin() ) {
foreach ( $links as &$link ) {
$package = $this->helper->get_package($link);
$name_string_name = $this->helper->get_link_string_name('name', $link);
$description_string_name = $this->helper->get_link_string_name('description', $link);
$link->link_name = apply_filters('wpml_translate_string', $link->link_name, $name_string_name, $package);
$link->link_description = apply_filters('wpml_translate_string', $link->link_description, $description_string_name, $package);
}
}
return $links;
}
/**
* @param int $link_id
*/
public function deleted_link_action( $link_id ) {
$this->delete_strings_package( $link_id, 'link' );
}
/**
* @param int $link_id
* @param string $subtype
*/
private function delete_strings_package( $link_id, $subtype ) {
do_action( 'wpml_delete_package_action', $link_id, $this->helper->get_package_type() . ' - ' . $subtype );
}
public function add_meta_boxes_action() {
add_meta_box( 'link-translation', __( 'Link translation', 'wpml-link-manager' ), array( $this, 'render_package_language_ui' ), 'link', 'side', 'default' );
}
public function render_package_language_ui() {
$link_id = isset( $_GET['link_id'] ) ? $_GET['link_id'] : false;
$package = $this->helper->get_package( $link_id );
do_action( 'wpml_show_package_language_ui', $package );
}
public function wpml_register_string_packages_action() {
$links = get_bookmarks();
if ( $links ) {
foreach ( $links as $link ) {
$this->add_strings_package( $link->link_id );
}
}
$link_categories = get_terms( 'link_category' );
if ( $link_categories ) {
foreach ( $link_categories as $link_category ) {
$this->created_or_edited_link_category_action( $link_category->term_id );
}
}
ICL_AdminNotifier::add_instant_message(
__( 'Previous existing links are now available for translation', 'wpml-link-manager' ),
'update'
);
}
/**
* @param array $categories
* @param array $taxonomies
*
* @return array $cat_name Category name
*/
public function get_terms_filter( $categories, $taxonomies ) {
if ( !is_admin() && in_array( 'link_category', $taxonomies ) ) {
foreach ( $categories as &$category ) {
$package = $this->helper->get_package( $category, 'category' );
$name_string_name = $this->helper->get_category_string_name( 'name', $category );
$description_string_name = $this->helper->get_category_string_name( 'description', $category );
$category->name = apply_filters( 'wpml_translate_string', $category->name, $name_string_name, $package );
$category->description = apply_filters( 'wpml_translate_string', $category->description, $description_string_name, $package );
}
}
return $categories;
}
/**
* @param int $term_id Term ID.
*/
public function created_or_edited_link_category_action( $term_id ) {
$link_category = get_term( $term_id, 'link_category' );
if ( $link_category ) {
$package = $this->helper->get_package( $link_category, 'category' );
$name_string_name = $this->helper->get_category_string_name( 'name', $link_category );
$description_string_name = $this->helper->get_category_string_name( 'description', $link_category );
do_action( 'wpml_register_string', $link_category->name, $name_string_name, $package, 'Link Category title', 'LINE');
do_action( 'wpml_register_string', $link_category->description, $description_string_name, $package, 'Link Category description', 'AREA');
}
}
/**
* @param int $term Term ID.
* @param int $tt_id Term taxonomy ID.
* @param string $taxonomy Taxonomy slug.
*/
public function delete_term_action( $term, $tt_id, $taxonomy ) {
if ( 'link_category' === $taxonomy ) {
$this->delete_strings_package( $term, 'category' );
}
}
}<file_sep>/plugin.php
<?php
/* Plugin Name: WPML Link Manager
* Description: Makes Link Manager (in the core before WP 3.5) compatible with WPML > 3.2
* Author: OnTheGoSystems
* Author URL: http://wpml.org/
* Version: 0.1-dev
*/
define( 'WPML_LINK_MANAGER_PATH', dirname( __FILE__ ) );
function wpml_link_manager_load_plugin() {
global $pagenow;
$wpml_auto_loader_instance = WPML_Auto_Loader::get_instance();
$wpml_auto_loader_instance->register( WPML_LINK_MANAGER_PATH . '/' );
$package_type = 'Link Manager';
$helper = new WPML_Link_Manager_Helper( $package_type );
new WPML_Link_Manager( $pagenow, $helper );
}
add_action( 'wpml_loaded', 'wpml_link_manager_load_plugin' );
function wpml_link_manager_maybe_remove_admin_ls() {
global $pagenow;
if ( $pagenow === 'link.php'
|| $pagenow === 'link-manager.php'
|| $pagenow === 'link-add.php'
|| ( $pagenow === 'edit-tags.php' && isset( $_GET['taxonomy'] ) && $_GET['taxonomy'] === 'link_category' ) ) {
add_filter( 'wpml_show_admin_language_switcher', '__return_false' );
}
}
add_action( 'wpml_before_init', 'wpml_link_manager_maybe_remove_admin_ls' );
function wpml_link_manager_activation() {
update_option( 'wpml-package-translation-refresh-required', true );
}
register_activation_hook( __FILE__, 'wpml_link_manager_activation' );<file_sep>/classes/class-wpml-link-manager-helper.php
<?php
Class WPML_Link_Manager_Helper {
private $package_type;
/**
* @param string $package_type
*/
public function __construct( $package_type ) {
$this->package_type = $package_type;
}
public function get_package_type() {
return $this->package_type;
}
/**
* @param int|object $link_or_cat
* @param string $subtype
*
* @return array $package
*/
public function get_package( $link_or_cat, $subtype = 'link' ) {
$package = null;
$package_subtype = $this->package_type . ' - ' . $subtype;
if ( is_object( $link_or_cat ) ) {
if ( 'link' === $subtype ) {
$package = array(
'kind' => $package_subtype,
'name' => $link_or_cat->link_id,
'title' => $link_or_cat->link_name,
'edit_link' => admin_url( 'link.php?action=edit&link_id=' . $link_or_cat->link_id ),
'view_link' => $link_or_cat->link_url,
);
} elseif ( 'category' === $subtype ) {
$package = array(
'kind' => $package_subtype,
'name' => $link_or_cat->term_id,
'title' => $link_or_cat->name,
'edit_link' => admin_url('edit-tags.php?action=edit&taxonomy=link_category&tag_ID=' . $link_or_cat->term_id),
);
}
} else {
$package = array(
'kind' => $package_subtype,
'name' => $link_or_cat,
);
}
return $package;
}
/**
* @param string $name
* @param object $link
*
* @return string formatted
*/
public function get_link_string_name( $name, $link ) {
return 'link-' . $link->link_id . '-' . $name;
}
/**
* @param string $name
* @param object $category
*
* @return string formatted
*/
public function get_category_string_name( $name, $category ) {
return 'link-category-' . $category->term_id . '-' . $name;
}
} | c75e1cc0414c1186de22ec8fb6194f9adea75680 | [
"Markdown",
"PHP"
] | 7 | PHP | OnTheGoSystems/wpml-link-manager | 6e1e68c949189442b4e5c79cdc07e3483c2aaae8 | d52c712a8a92193a766ea1b12be730a8c7ab95a9 | |
refs/heads/main | <repo_name>Shivam-Shandilya1/GetMaxandMin<file_sep>/main.cpp
#include<iostream>
using namespace std;
void getMaxandMin(int number[],int sizes,int*minm,int*maxm)
{
for(int i=0;i<sizes;i++)
{if(*maxm<number[i])
{
*maxm = number[i];
}
if(*minm>number[i])
{
*minm = number[i];
}
}
}
int main()
{
int number[4];
for(int i=0;i<4;i++)
{
cout<<"Number : ";
cin>>number[i];
}
int maxm = number[0];
int minm = number[0];
for(int i=0;i<4;i++)
{
cout<<number[i]<< " ";
}
getMaxandMin(number,4,&minm,&maxm);
cout<<"Max number is:- "<<maxm <<endl;
cout<<"Min number is:- "<<minm<<endl;
system("pause>0");
}
| 8c1e43301e9713b54fc37b889f575f94d5e3871f | [
"C++"
] | 1 | C++ | Shivam-Shandilya1/GetMaxandMin | 5f98292171f0a6d2920b9a7d32056d1d3224a59c | fa9b5ae90b16e100f5305e4ecc11cbc88f86b95c | |
refs/heads/master | <repo_name>DiacuR/EjemploDeGit<file_sep>/Clase 1/main.c
#include <stdio.h>
#include <stdlib.h>
int main()
{
int dinero;
char tipo;
int i;
int maxImporteDolares;
int flag = 0;
float promedioEnPesos;
int contadorDePesos = 0;
int acumuladorPesos = 0;
int porcentajeEnPesos;
for(i = 0; i < 5; i++)
{
printf("\nIngrese moneda: ");
fflush(stdin); //serbuff y fpurge en Linux.
scanf("%c", &tipo);
if(tipo == 'd' || tipo == 'p' || tipo == 'b' || tipo == 'l')
{
printf("\nIngrese valor: ");
fflush(stdin);
scanf("%d", &dinero);
if(tipo == 'd')
{
if(dinero > maxImporteDolares || flag == 0)
{
maxImporteDolares = dinero;
flag = 1;
}
}
if(tipo == 'p')
{
acumuladorPesos += dinero;
contadorDePesos++;
}
}
else
{
printf("ERROR. Tipo de moneda invalido\n");
}
}
porcentajeEnPesos = contadorDePesos*100/5;
if(flag == 0)
{
printf("\nNo se ingresaron dolares");
}
else
{
printf("\nMayor importe en dolares: %d", maxImporteDolares);
}
promedioEnPesos = ((float)acumuladorPesos)/contadorDePesos;
printf("\nPromedio en Pesos: %f", promedioEnPesos);
printf("\nPorcentaje de transacciones hechas en pesos: %d% ", porcentajeEnPesos);
return 0;
}
| 0b5ec921b3ec1942cdec852b2d9d2e999770897b | [
"C"
] | 1 | C | DiacuR/EjemploDeGit | 64f14ca57ff3ba1421a5836258b7b53265aeacb6 | f245acd9983460829e3d7d94bbd5579b9ee976a4 | |
refs/heads/master | <repo_name>W0nsu/Fitverse.CalendarService<file_sep>/Fitverse.CalendarService/Commands/SignOutOfClassCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class SignOutOfClassCommand : IRequest<ReservationDtoSetter>
{
public SignOutOfClassCommand(int reservationId)
{
ReservationId = reservationId;
}
public int ReservationId { get; }
}
}<file_sep>/Fitverse.CalendarService/Queries/GetClassTypeByIdQuery.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetClassTypeByIdQuery : IRequest<ClassTypeDto>
{
public GetClassTypeByIdQuery(int classTypeId)
{
ClassTypeId = classTypeId;
}
public int ClassTypeId { get; }
}
}<file_sep>/Fitverse.CalendarService/MessageBus/Senders/SignOutOfClassSender.cs
using System;
using Fitverse.CalendarService.Interfaces;
using Fitverse.Shared.MessageBus;
using Microsoft.Extensions.Options;
namespace Fitverse.CalendarService.MessageBus.Senders
{
public class SignOutOfClassSender : ISignOutOfClassSender
{
private readonly IOptions<RabbitMqConfiguration> _rabbitMqOptions;
public SignOutOfClassSender(IOptions<RabbitMqConfiguration> rabbitMqOptions)
{
_rabbitMqOptions = rabbitMqOptions;
}
public void DeleteReservation(int reservationId)
{
var exchangeConfig = new Tuple<string, string>("classes", "signOutOfClass");
SendEventHandler.SendEvent(reservationId, _rabbitMqOptions, exchangeConfig);
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetClassByDateHandler.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetClassByDateHandler : IRequestHandler<GetClassByDateQuery, List<CalendarClassDto>>
{
private readonly CalendarContext _dbContext;
public GetClassByDateHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<List<CalendarClassDto>> Handle(GetClassByDateQuery request,
CancellationToken cancellationToken)
{
var classList = await _dbContext
.Classes
.Where(c => c.Date.Date >= request.DateRange.DateFrom && c.Date.Date <= request.DateRange.DateTo)
.ToListAsync(cancellationToken);
if (classList is null)
{
throw new NullReferenceException(
$"There is no classes for given period. [Date from: {request.DateRange.DateFrom}, Date to: {request.DateRange.DateTo}]");
}
return classList
.Select(calendarClass => calendarClass.Adapt<CalendarClassDto>())
.OrderBy(x => x.Date)
.ThenBy(x => x.StartingTime.TimeOfDay)
.ToList();
}
}
}<file_sep>/Fitverse.CalendarService/Controllers/SettingsController.cs
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Models;
using Fitverse.CalendarService.Queries;
using MediatR;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.JsonPatch;
using Microsoft.AspNetCore.Mvc;
namespace Fitverse.CalendarService.Controllers
{
[Authorize]
[Route("/api/cs/calendar/settings")]
[ApiController]
public class SettingsController : Controller
{
private readonly IMediator _mediator;
public SettingsController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet]
public async Task<IActionResult> GetAllClassTypes()
{
var query = new GetAllClassTypesQuery();
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpPost]
public async Task<IActionResult> AddClassType([FromBody] ClassTypeDto membershipDto)
{
var command = new AddClassTypeCommand(membershipDto);
var result = await _mediator.Send(command);
return Ok(result);
}
[HttpGet("{classTypeId}")]
public async Task<IActionResult> GetClassTypeById([FromRoute] int classTypeId)
{
var query = new GetClassTypeByIdQuery(classTypeId);
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpPatch("{classTypeId}")]
public async Task<IActionResult> EditClassType([FromRoute] int classTypeId,
[FromBody] JsonPatchDocument<ClassType> classTypeEntity)
{
var command = new EditClassTypeCommand(classTypeId, classTypeEntity);
var result = await _mediator.Send(command);
return Ok(result);
}
[HttpDelete("{classTypeId}")]
public async Task<IActionResult> DeleteClassType([FromRoute] int classTypeId)
{
var command = new DeleteClassTypeCommand(classTypeId);
var result = await _mediator.Send(command);
return Ok($"Class type [ClassTypeId: {result.ClassTypeId}] has been deleted");
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/EditClassTypeHandler.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Validators;
using FluentValidation;
using Mapster;
using MediatR;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class EditClassTypeHandler : ControllerBase, IRequestHandler<EditClassTypeCommand, ClassTypeDto>
{
private readonly CalendarContext _dbContext;
public EditClassTypeHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<ClassTypeDto> Handle(EditClassTypeCommand request, CancellationToken cancellationToken)
{
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == request.ClassTypeId, cancellationToken);
if (classTypeEntity is null)
{
throw new NullReferenceException($"ClassType [classTypeId: {request.ClassTypeId}] not found");
}
var editedClassType = request.NewClassTypeEntity;
var nameBeforeChange = classTypeEntity.Name;
editedClassType.ApplyTo(classTypeEntity, ModelState);
var nameAfterChange = classTypeEntity.Name;
// var validator =
// new ClassTypeValidator(_dbContext, new Tuple<string, string>(nameBeforeChange, nameAfterChange));
// var validationResult = await validator.ValidateAsync(classTypeEntity, cancellationToken);
//
// if (!validationResult.IsValid)
// throw new ValidationException(validationResult.Errors.ToList());
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var patchedClassTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == request.ClassTypeId, cancellationToken);
if (patchedClassTypeEntity is null)
{
throw new NullReferenceException(
$"Failed to fetch patched class type [ClassTypeId: {request.ClassTypeId}]");
}
var patchedClassTypeDto = patchedClassTypeEntity.Adapt<ClassTypeDto>();
return patchedClassTypeDto;
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/DeleteClassByIdHandler.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class DeleteClassByIdHandler : IRequestHandler<DeleteClassByIdCommand, CalendarClassDto>
{
private readonly CalendarContext _dbContext;
public DeleteClassByIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<CalendarClassDto> Handle(DeleteClassByIdCommand request, CancellationToken cancellationToken)
{
var classEntity = await _dbContext
.Classes
.SingleOrDefaultAsync(m => m.ClassId == request.ClassId, cancellationToken);
_dbContext.Remove(classEntity);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var deletedReservationsForClass = await _dbContext.Reservations
.Where(x => x.ClassId == request.ClassId)
.ToListAsync(cancellationToken);
foreach (var reservation in deletedReservationsForClass)
_ = _dbContext.Remove(reservation);
_ = _dbContext.SaveChangesAsync(cancellationToken);
var classDto = classEntity.Adapt<CalendarClassDto>();
return classDto;
}
}
}<file_sep>/Fitverse.CalendarService/Dtos/CalendarClassDto.cs
using System;
using System.Collections.Generic;
using System.Globalization;
namespace Fitverse.CalendarService.Dtos
{
public class CalendarClassDto
{
public int ClassId { get; private set; }
public int ClassTypeId { get; set; }
public DateTime Date { get; set; }
public DateTime StartingTime { get; set; }
public DateTime EndingTime { get; set; }
public string ClassName { get; set; }
public int Limit { get; set; }
public string Description { get; set; }
public int TimetableId { get; set; }
public List<int> Reservations { get; set; }
public string ShortDate => Date.ToShortDateString();
}
}<file_sep>/Fitverse.CalendarService/Queries/GetClassByDateQuery.cs
using System;
using System.Collections.Generic;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Helpers;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetClassByDateQuery : IRequest<List<CalendarClassDto>>
{
public GetClassByDateQuery(DateRange dateRange)
{
DateRange = dateRange;
}
public DateRange DateRange { get; }
}
}<file_sep>/Fitverse.CalendarService/Data/CalendarContext.cs
using Fitverse.CalendarService.Models;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Data
{
public class CalendarContext : DbContext
{
public CalendarContext(DbContextOptions<CalendarContext> options) : base(options)
{
}
public DbSet<ClassType> ClassTypes { get; set; }
public DbSet<CalendarClass> Classes { get; set; }
public DbSet<Timetable> Timetables { get; set; }
public DbSet<Reservation> Reservations { get; set; }
public DbSet<Member> Members { get; set; }
protected override void OnModelCreating(ModelBuilder builder)
{
builder.Entity<Member>()
.HasIndex(u => u.MemberId)
.IsUnique();
builder.Entity<ClassType>()
.Property(x => x.IsDeleted)
.HasDefaultValue(false);
}
}
}<file_sep>/Fitverse.CalendarService/Commands/DeleteTimetableByIdCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class DeleteTimetableByIdCommand : IRequest<TimetableDto>
{
public DeleteTimetableByIdCommand(int timetableId)
{
TimetableId = timetableId;
}
public int TimetableId { get; }
}
}<file_sep>/Fitverse.CalendarService/Helpers/DateRange.cs
using System;
namespace Fitverse.CalendarService.Helpers
{
public class DateRange
{
public DateTime DateFrom { get; set; }
public DateTime DateTo { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Interfaces/ISignUpForClassSender.cs
using Fitverse.CalendarService.Models;
namespace Fitverse.CalendarService.Interfaces
{
public interface ISignUpForClassSender
{
public void AddReservation(Reservation reservation);
}
}<file_sep>/Fitverse.CalendarService/Dtos/TimetableDto.cs
using System;
namespace Fitverse.CalendarService.Dtos
{
public class TimetableDto
{
public int TimetableId { get; private set; }
public int ClassTypeId { get; set; }
public string ClassTypeName { get; set; }
public DateTime StartingDate { get; set; }
public DateTime EndingDate { get; set; }
public DateTime ClassesStartingTime { get; set; }
public int PeriodType { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Controllers/MembersController.cs
using System.Threading.Tasks;
using Fitverse.CalendarService.Queries;
using MediatR;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace Fitverse.CalendarService.Controllers
{
[Authorize]
[ApiController]
[Route("/api/cs/members")]
public class MembersController : Controller
{
private readonly IMediator _mediator;
public MembersController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet]
public async Task<IActionResult> GetAllMembers()
{
var query = new GetAllMembersQuery();
var result = await _mediator.Send(query);
return Ok(result);
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/SignUpForClassHandler.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Interfaces;
using Fitverse.CalendarService.Models;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class SignUpForClassHandler : IRequestHandler<SignUpForClassCommand, ReservationDtoSetter>
{
private readonly CalendarContext _dbContext;
private readonly ISignUpForClassSender _signUpForClassSender;
private int _classTypeParticipantsLimit;
public SignUpForClassHandler(CalendarContext dbContext, ISignUpForClassSender signUpForClassSender)
{
_dbContext = dbContext;
_signUpForClassSender = signUpForClassSender;
}
public async Task<ReservationDtoSetter> Handle(SignUpForClassCommand request, CancellationToken cancellationToken)
{
var reservationEntity = request.Reservation.Adapt<Reservation>();
if (await IsLimitExceeded(reservationEntity, cancellationToken))
{
throw new ArgumentException(
$"Limit of participants for this classes has been exceeded [Limit: {_classTypeParticipantsLimit}]");
}
_ = await _dbContext.AddAsync(reservationEntity, cancellationToken);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var newReservation = await _dbContext
.Reservations
.SingleOrDefaultAsync(
m => m.ClassId == reservationEntity.ClassId && m.MemberId == reservationEntity.MemberId,
cancellationToken);
if (newReservation is null)
throw new NullReferenceException("Failed to sign in for classes. Try again");
_signUpForClassSender.AddReservation(newReservation);
var newReservationDto = newReservation.Adapt<ReservationDtoSetter>();
return newReservationDto;
}
private async Task<bool> IsLimitExceeded(Reservation newReservation,
CancellationToken cancellationToken = default)
{
var classEntity = await _dbContext
.Classes
.SingleOrDefaultAsync(x => x.ClassId == newReservation.ClassId, cancellationToken);
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(x => x.ClassTypeId == classEntity.ClassTypeId, cancellationToken);
_classTypeParticipantsLimit = classTypeEntity.Limit;
var numberOfReservations = await _dbContext
.Reservations
.CountAsync(x => x.ClassId == newReservation.ClassId, cancellationToken);
return numberOfReservations >= _classTypeParticipantsLimit;
}
}
}<file_sep>/Fitverse.CalendarService/Validators/AddTimetableCommandValidator.cs
using System;
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.Shared.Helpers;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class AddTimetableCommandValidator : AbstractValidator<AddTimetableCommand>
{
public AddTimetableCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.NewTimetableDto.ClassTypeId)
.GreaterThan(0);
RuleFor(x => x.NewTimetableDto.ClassTypeId)
.Must(id => dbContext.ClassTypes.Any(m => m.ClassTypeId == id))
.WithMessage(x => $"ClassType [ClassTypeId: {x.NewTimetableDto.ClassTypeId}] doesn't exists.");
RuleFor(x => x.NewTimetableDto.StartingDate)
.NotEmpty();
RuleFor(x => x.NewTimetableDto.StartingDate)
.Must(startingDate => startingDate >= DateTime.Now)
.WithMessage($"Select a date later than {DateTime.Now.ToShortDateString()}");
RuleFor(x => x.NewTimetableDto.EndingDate)
.NotEmpty();
RuleFor(x => x.NewTimetableDto.EndingDate)
.Must(endingDate => endingDate > DateTime.Now)
.WithMessage("Select a date later than Timetable starting date");
RuleFor(x => x.NewTimetableDto.ClassesStartingTime)
.NotEmpty();
RuleFor(x => x.NewTimetableDto.PeriodType)
.NotEmpty();
RuleFor(x => x.NewTimetableDto.PeriodType)
.Must(periodType => Enum.IsDefined(typeof(PeriodType), periodType))
.WithMessage("Available PeriodType: " + Enum.GetNames(typeof(PeriodType))
.Aggregate("", (current, value) => current + value + ", "));
}
}
}<file_sep>/Fitverse.CalendarService/Queries/GetClassByIdQuery.cs
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetClassByIdQuery : IRequest<CalendarClassDto>
{
public GetClassByIdQuery(int classId)
{
ClassId = classId;
}
public int ClassId { get; }
}
}<file_sep>/Fitverse.CalendarService/Queries/GetAllTimetablesQuery.cs
using System.Collections.Generic;
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetAllTimetablesQuery : IRequest<List<TimetableDto>>
{
}
}<file_sep>/Fitverse.CalendarService/Dtos/ReservationDtoSetter.cs
namespace Fitverse.CalendarService.Dtos
{
public class ReservationDtoSetter
{
public int ReservationId { get; private set; }
public int ClassId { get; set; }
public int MemberId { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Queries/GetReservationsByClassIdCommand.cs
using System.Collections.Generic;
using Fitverse.CalendarService.Dtos;
using MediatR;
using Microsoft.EntityFrameworkCore.Query.Internal;
namespace Fitverse.CalendarService.Queries
{
public class GetReservationsByClassIdCommand : IRequest<List<ReservationDtoGetter>>
{
public GetReservationsByClassIdCommand(int classId)
{
ClassId = classId;
}
public int ClassId { get; }
}
}<file_sep>/Fitverse.CalendarService/Queries/GetAllMembersQuery.cs
using System.Collections.Generic;
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetAllMembersQuery : IRequest<List<MemberDto>>
{
}
}<file_sep>/Fitverse.CalendarService/Queries/GetTimetableByIdQuery.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetTimetableByIdQuery : IRequest<TimetableDto>
{
public GetTimetableByIdQuery(int timetableId)
{
TimetableId = timetableId;
}
public int TimetableId { get; }
}
}<file_sep>/Fitverse.CalendarService/Dtos/MemberDto.cs
namespace Fitverse.CalendarService.Dtos
{
public class MemberDto
{
public int MemberId { get; set; }
public string Name { get; set; }
public string SurName { get; set; }
public string Email { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Migrations/20210424164208_ClassNameInClassModel.cs
using Microsoft.EntityFrameworkCore.Migrations;
namespace Fitverse.CalendarService.Migrations
{
public partial class ClassNameInClassModel : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<bool>(
name: "IsDeleted",
table: "ClassTypes",
type: "bit",
nullable: false,
defaultValue: false,
oldClrType: typeof(bool),
oldType: "bit");
migrationBuilder.AddColumn<string>(
name: "ClassName",
table: "Classes",
type: "nvarchar(max)",
nullable: false,
defaultValue: "");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "ClassName",
table: "Classes");
migrationBuilder.AlterColumn<bool>(
name: "IsDeleted",
table: "ClassTypes",
type: "bit",
nullable: false,
oldClrType: typeof(bool),
oldType: "bit",
oldDefaultValue: false);
}
}
}
<file_sep>/Fitverse.CalendarService/Validators/ClassTypeValidator.cs
using System;
using System.Linq;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Models;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class ClassTypeValidator : AbstractValidator<ClassType>
{
// public ClassTypeValidator(CalendarContext dbContext, Tuple<string, string> nameBeforeAndAfterChange)
// {
// string nameBeforeChange, nameAfterChange;
// (nameBeforeChange, nameAfterChange) = nameBeforeAndAfterChange;
//
// RuleFor(x => x.Name)
// .NotEmpty()
// .MinimumLength(3)
// .MaximumLength(30);
//
// if (nameBeforeChange != nameAfterChange)
// {
// RuleFor(x => x.Name)
// .Must(name => !dbContext.ClassTypes.Any(m => m.Name == name))
// .WithMessage(x => $"Name [Name: {x.Name}] already in use");
// }
//
// RuleFor(x => x.Description)
// .MaximumLength(255);
//
// RuleFor(x => x.Limit)
// .NotEmpty()
// .GreaterThan(0);
//
// RuleFor(x => x.Room)
// .MinimumLength(3)
// .MaximumLength(30);
//
// RuleFor(x => x.Duration)
// .NotEmpty()
// .GreaterThan(0);
// }
}
}<file_sep>/Fitverse.CalendarService/Dtos/ReservationDtoGetter.cs
namespace Fitverse.CalendarService.Dtos
{
public class ReservationDtoGetter
{
public int ReservationId { get; private set; }
public int ClassId { get; set; }
public int MemberId { get; set; }
public MemberDto Member { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetTimetableByIdHandler.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetTimetableByIdHandler : IRequestHandler<GetTimetableByIdQuery, TimetableDto>
{
private readonly CalendarContext _dbContext;
public GetTimetableByIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<TimetableDto> Handle(GetTimetableByIdQuery request, CancellationToken cancellationToken)
{
var timetableEntity = await _dbContext
.Timetables
.SingleOrDefaultAsync(m => m.TimetableId == request.TimetableId, cancellationToken);
if (timetableEntity is null)
throw new NullReferenceException($"Timetable[TimetableId: {request.TimetableId} not found]");
var timetableDto = timetableEntity.Adapt<TimetableDto>();
timetableDto.ClassTypeName = _dbContext
.ClassTypes
.FirstOrDefault(x => x.ClassTypeId == timetableEntity.ClassTypeId)
?.Name;
return timetableDto;
}
}
}<file_sep>/Fitverse.CalendarService/Validators/EditClassTypeCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class EditClassTypeCommandValidator : AbstractValidator<EditClassTypeCommand>
{
public EditClassTypeCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.ClassTypeId)
.GreaterThan(0);
RuleFor(x => x.ClassTypeId)
.Must(id => dbContext.ClassTypes.Any(m => m.ClassTypeId == id))
.WithMessage(x => $"Class type [ClassTypeId: {x.ClassTypeId}] not found");
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/AddTimetableHandler.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Helpers;
using Fitverse.CalendarService.Models;
using Mapster;
using MediatR;
namespace Fitverse.CalendarService.Handlers
{
public class AddTimetableHandler : IRequestHandler<AddTimetableCommand, TimetableDto>
{
private readonly CalendarContext _dbContext;
public AddTimetableHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<TimetableDto> Handle(AddTimetableCommand request, CancellationToken cancellationToken)
{
var timetableEntity = request.NewTimetableDto.Adapt<Timetable>();
_ = await _dbContext.AddAsync(timetableEntity, cancellationToken);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var newTimetable = _dbContext
.Timetables
.Where(m => m.ClassTypeId == request.NewTimetableDto.ClassTypeId)
.AsEnumerable()
.LastOrDefault();
if (newTimetable is null)
throw new NullReferenceException("Failed to add timetable. Try again");
var classGenerator = new ClassGenerator(_dbContext);
await classGenerator.AddClassesForTimetableAsync(timetableEntity, cancellationToken);
var newTimetableDto = newTimetable.Adapt<TimetableDto>();
return newTimetableDto;
}
}
}<file_sep>/Fitverse.CalendarService/Validators/SignUpForClassCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class SignUpForClassCommandValidator : AbstractValidator<SignUpForClassCommand>
{
public SignUpForClassCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.Reservation.ClassId)
.GreaterThan(0);
RuleFor(x => x.Reservation.ClassId)
.Must(id => dbContext.Classes.Any(m => m.ClassId == id))
.WithMessage(x => $"Classes [ClassId: {x.Reservation.ClassId}] doesn't exists.");
RuleFor(x => x.Reservation.MemberId)
.GreaterThan(0);
RuleFor(x => x.Reservation.MemberId)
.Must(id => dbContext.Members.Any(m => m.MemberId == id))
.WithMessage(x => $"Member [MemberId: {x.Reservation.MemberId}] doesn't exists.");
RuleFor(x => x.Reservation)
.Must(reservation =>
!dbContext.Reservations.Any(r =>
r.ClassId == reservation.ClassId && r.MemberId == reservation.MemberId))
.WithMessage(x =>
$"Member [MemberId: {x.Reservation.MemberId}] is already registered to Classes [ClassId: {x.Reservation.ClassId}]");
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/DeleteTimetableByIdHandler.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Helpers;
using Fitverse.CalendarService.Models;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class DeleteTimetableByIdHandler : IRequestHandler<DeleteTimetableByIdCommand, TimetableDto>
{
private readonly CalendarContext _dbContext;
public DeleteTimetableByIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<TimetableDto> Handle(DeleteTimetableByIdCommand request, CancellationToken cancellationToken)
{
var timetableEntity = await _dbContext
.Timetables
.SingleOrDefaultAsync(m => m.TimetableId == request.TimetableId, cancellationToken);
var classGenerator = new ClassGenerator(_dbContext);
await classGenerator.DeleteAllClassesByTimetableIdAsync(timetableEntity, cancellationToken);
_dbContext.Remove(timetableEntity);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var timetableDto = timetableEntity.Adapt<TimetableDto>();
return timetableDto;
}
}
}<file_sep>/Fitverse.CalendarService/Models/Reservation.cs
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Fitverse.CalendarService.Models
{
public class Reservation
{
[Key]
[Required]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int ReservationId { get; set; }
[Required] public int ClassId { get; set; }
[Required] public int MemberId { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Validators/DeleteTimetableByIdCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class DeleteTimetableByIdCommandValidator : AbstractValidator<DeleteTimetableByIdCommand>
{
public DeleteTimetableByIdCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.TimetableId)
.GreaterThan(0);
RuleFor(x => x.TimetableId)
.Must(id => dbContext.Classes.Any(m => m.TimetableId == id))
.WithMessage(x => $"Timetable [TimetableId: {x.TimetableId}] not found");
}
}
}<file_sep>/Fitverse.CalendarService/Commands/AddTimetableCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class AddTimetableCommand : IRequest<TimetableDto>
{
public AddTimetableCommand(TimetableDto timetableDto)
{
NewTimetableDto = timetableDto;
}
public TimetableDto NewTimetableDto { get; }
}
}<file_sep>/Fitverse.CalendarService/Models/Timetable.cs
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Fitverse.CalendarService.Models
{
public class Timetable
{
[Key]
[Required]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int TimetableId { get; set; }
[Required] public int ClassTypeId { get; set; }
[Required] [Column(TypeName = "Date")] public DateTime StartingDate { get; set; }
[Required] [Column(TypeName = "Date")] public DateTime EndingDate { get; set; }
[Required] public DateTime ClassesStartingTime { get; set; }
[Required] public int PeriodType { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Interfaces/ISignOutOfClassSender.cs
namespace Fitverse.CalendarService.Interfaces
{
public interface ISignOutOfClassSender
{
public void DeleteReservation(int reservationId);
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetAllClassTypesHandler.cs
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetAllClassTypesHandler : IRequestHandler<GetAllClassTypesQuery, List<ClassTypeDto>>
{
private readonly CalendarContext _dbContext;
public GetAllClassTypesHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<List<ClassTypeDto>> Handle(GetAllClassTypesQuery request, CancellationToken cancellationToken)
{
var classTypesList = await _dbContext
.ClassTypes
.Where(x => !x.IsDeleted)
.ToListAsync(cancellationToken);
return classTypesList.Select(classType => classType.Adapt<ClassTypeDto>()).ToList();
}
}
}<file_sep>/Fitverse.CalendarService/Commands/DeleteClassByIdCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class DeleteClassByIdCommand : IRequest<CalendarClassDto>
{
public DeleteClassByIdCommand(int classId)
{
ClassId = classId;
}
public int ClassId { get; }
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetAllTimetablesHandler.cs
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetAllTimetablesHandler : IRequestHandler<GetAllTimetablesQuery, List<TimetableDto>>
{
private readonly CalendarContext _dbContext;
public GetAllTimetablesHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<List<TimetableDto>> Handle(GetAllTimetablesQuery request, CancellationToken cancellationToken)
{
var timetablesList = await _dbContext
.Timetables
.ToListAsync(cancellationToken);
var classTypesList = await _dbContext.ClassTypes
.ToListAsync(cancellationToken);
var timetablesDtoList = new List<TimetableDto>();
foreach (var timetable in timetablesList)
{
var timetableDto = timetable.Adapt<TimetableDto>();
timetableDto.ClassTypeName =
classTypesList.FirstOrDefault(x => x.ClassTypeId == timetable.ClassTypeId)?.Name;
timetablesDtoList.Add(timetableDto);
}
return timetablesDtoList;
}
}
}<file_sep>/Fitverse.CalendarService/Controllers/TimetablesController.cs
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using MediatR;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace Fitverse.CalendarService.Controllers
{
[Authorize]
[ApiController]
[Route("/api/cs/timetable")]
public class TimetablesController : Controller
{
private readonly IMediator _mediator;
public TimetablesController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet]
public async Task<IActionResult> GetAllTimetables()
{
var query = new GetAllTimetablesQuery();
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpGet]
[Route("{timetableId}")]
public async Task<IActionResult> GetTimetableById(int timetableId)
{
var query = new GetTimetableByIdQuery(timetableId);
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpPost]
public async Task<IActionResult> AddTimetable([FromBody] TimetableDto timetableDto)
{
var command = new AddTimetableCommand(timetableDto);
var result = await _mediator.Send(command);
return Ok(result);
}
[HttpDelete]
[Route("{timetableId}")]
public async Task<IActionResult> DeleteTimetable([FromRoute]int timetableId)
{
var command = new DeleteTimetableByIdCommand(timetableId);
var result = await _mediator.Send(command);
return Ok($"Timetable [timetableId: {result.TimetableId}] has been deleted");
}
}
}<file_sep>/Fitverse.CalendarService/Controllers/ClassesController.cs
using System;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Helpers;
using Fitverse.CalendarService.Queries;
using MediatR;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore.Query.Internal;
namespace Fitverse.CalendarService.Controllers
{
[Authorize]
[ApiController]
[Route("/api/cs/classes")]
public class ClassesController : Controller
{
private readonly IMediator _mediator;
public ClassesController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet]
public async Task<IActionResult> GetClassByDate([FromQuery] DateRange dateRange)
{
var query = new GetClassByDateQuery(dateRange);
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpGet]
[Route("{classId}")]
public async Task<IActionResult> GetClassById([FromRoute] int classId)
{
var query = new GetClassByIdQuery(classId);
var result = await _mediator.Send(query);
return Ok(result);
}
[HttpDelete]
[Route("{classId}")]
public async Task<IActionResult> DeleteClassById([FromRoute] int classId)
{
var command = new DeleteClassByIdCommand(classId);
var result = await _mediator.Send(command);
return Ok($"Class [ClassId: {result.ClassId}] has been deleted");
}
[HttpGet]
[Route("{classId}/reservation")]
public async Task<IActionResult> GetReservationsByClassId([FromRoute] int classId)
{
var command = new GetReservationsByClassIdCommand(classId);
var result = await _mediator.Send(command);
return Ok(result);
}
[HttpPost]
[Route("reservation")]
public async Task<IActionResult> SignUpForClass([FromBody] ReservationDtoSetter reservation)
{
var command = new SignUpForClassCommand(reservation);
var result = await _mediator.Send(command);
return Ok(result);
}
[HttpDelete]
[Route("reservation/{reservationId}")]
public async Task<IActionResult> SignOutOfClass([FromRoute] int reservationId)
{
var command = new SignOutOfClassCommand(reservationId);
var result = await _mediator.Send(command);
return Ok(result);
}
}
}<file_sep>/Fitverse.CalendarService/Helpers/DayOfTheWeek.cs
namespace Fitverse.CalendarService.Helpers
{
public enum DayOfTheWeek
{
Monday = 0,
Tuesday = 1,
Wednesday = 2,
Thursday = 3,
Friday = 4,
Saturday = 5,
Sunday = 6
}
}<file_sep>/Fitverse.CalendarService/Queries/GetAllClassTypesQuery.cs
using System.Collections.Generic;
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Queries
{
public class GetAllClassTypesQuery : IRequest<List<ClassTypeDto>>
{
}
}<file_sep>/Fitverse.CalendarService/Handlers/DeleteClassTypeHandler.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Helpers;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class DeleteClassTypeHandler : IRequestHandler<DeleteClassTypeCommand, ClassTypeDto>
{
private readonly CalendarContext _dbContext;
public DeleteClassTypeHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<ClassTypeDto> Handle(DeleteClassTypeCommand request, CancellationToken cancellationToken)
{
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == request.ClassTypeId, cancellationToken);
classTypeEntity.IsDeleted = true;
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var timetablesList = await _dbContext
.Timetables
.Where(x => x.ClassTypeId == request.ClassTypeId && x.EndingDate > DateTime.Today)
.ToListAsync(cancellationToken);
var classGenerator = new ClassGenerator(_dbContext);
foreach (var timetable in timetablesList)
{
await classGenerator.DeleteAllFutureClassesByTimetableIdAsync(timetable, cancellationToken);
timetable.EndingDate = DateTime.Today;
}
await _dbContext.SaveChangesAsync(cancellationToken);
var classTypeDto = classTypeEntity.Adapt<ClassTypeDto>();
return classTypeDto;
}
}
}<file_sep>/Fitverse.CalendarService/Validators/SignOutOfClassCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class SignOutOfClassCommandValidator : AbstractValidator<SignOutOfClassCommand>
{
public SignOutOfClassCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.ReservationId)
.GreaterThan(0);
RuleFor(x => x.ReservationId)
.Must(id => dbContext.Reservations.Any(m => m.ReservationId == id))
.WithMessage(x => $"Reservation [ReservationId: {x.ReservationId}] not found.");
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetAllMembersHandler.cs
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetAllMembersHandler : IRequestHandler<GetAllMembersQuery, List<MemberDto>>
{
private readonly CalendarContext _dbContext;
public GetAllMembersHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<List<MemberDto>> Handle(GetAllMembersQuery request, CancellationToken cancellationToken)
{
var membersList = await _dbContext.Members.ToListAsync(cancellationToken);
return membersList.Select(member => member.Adapt<MemberDto>())
.OrderBy(x => x.SurName).ToList();
}
}
}<file_sep>/Fitverse.CalendarService/Commands/AddClassTypeCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class AddClassTypeCommand : IRequest<ClassTypeDto>
{
public AddClassTypeCommand(ClassTypeDto classType)
{
NewClassType = classType;
}
public ClassTypeDto NewClassType { get; }
}
}<file_sep>/Fitverse.CalendarService/Commands/DeleteClassTypeCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class DeleteClassTypeCommand : IRequest<ClassTypeDto>
{
public DeleteClassTypeCommand(int classTypeId)
{
ClassTypeId = classTypeId;
}
public int ClassTypeId { get; }
}
}<file_sep>/Fitverse.CalendarService/Validators/AddClassTypeCommandValidator.cs
using System.Data;
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class AddClassTypeCommandValidator : AbstractValidator<AddClassTypeCommand>
{
public AddClassTypeCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.NewClassType.Name)
.NotEmpty()
.MinimumLength(3)
.MaximumLength(30);
RuleFor(x => x.NewClassType.Name)
.Must(n => !dbContext.ClassTypes.Any(c => c.Name == n))
.WithMessage(x => $"Class type name [ClassTypeName: {x.NewClassType.Name}] already in use.");
RuleFor(x => x.NewClassType.Description)
.NotEmpty()
.MinimumLength(3)
.MaximumLength(255);
RuleFor(x => x.NewClassType.Limit)
.NotEmpty()
.GreaterThan(0);
RuleFor(x => x.NewClassType.Room)
.MinimumLength(3)
.MaximumLength(30);
RuleFor(x => x.NewClassType.Duration)
.NotEmpty()
.GreaterThan(0);
}
}
}<file_sep>/Fitverse.CalendarService/Helpers/ClassGenerator.cs
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Models;
using Fitverse.Shared.Helpers;
using Mapster;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Helpers
{
public class ClassGenerator
{
private readonly CalendarContext _dbContext;
public ClassGenerator(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task AddClassesForTimetableAsync(Timetable timetable, CancellationToken cancellationToken = default)
{
var timetableStartingDate = timetable.StartingDate;
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == timetable.ClassTypeId, cancellationToken);
var classesStartingTime = timetable.ClassesStartingTime;
var classDate = timetableStartingDate;
var classesDto = new CalendarClassDto {Date = classDate};
while (classDate >= timetableStartingDate && classDate <= timetable.EndingDate)
{
if (classDate == timetableStartingDate)
{
classesDto.ClassName = classTypeEntity.Name;
classesDto.ClassTypeId = timetable.ClassTypeId;
classesDto.StartingTime = classesStartingTime;
classesDto.EndingTime = classesStartingTime.AddMinutes(classTypeEntity.Duration);
classesDto.TimetableId = timetable.TimetableId;
}
else
classesDto.Date = classDate;
var classEntity = classesDto.Adapt<CalendarClass>();
_ = await _dbContext.AddAsync(classEntity, cancellationToken);
classDate = CalculateNextClassDate(classDate, timetable);
}
_ = await _dbContext.SaveChangesAsync(cancellationToken);
}
public async Task DeleteAllClassesByTimetableIdAsync(Timetable timetable, CancellationToken cancellationToken = default)
{
var classesList = await _dbContext
.Classes
.Where(m => m.TimetableId == timetable.TimetableId)
.ToListAsync(cancellationToken);
foreach (var calendarClass in classesList)
{
await DeleteReservationsAsync(calendarClass, cancellationToken);
_dbContext.Remove(calendarClass);
}
_ = await _dbContext.SaveChangesAsync(cancellationToken);
}
public async Task DeleteAllFutureClassesByTimetableIdAsync(Timetable timetable, CancellationToken cancellationToken = default)
{
var classesList = await _dbContext
.Classes
.Where(m => m.TimetableId == timetable.TimetableId && m.Date > DateTime.Now)
.ToListAsync(cancellationToken);
foreach (var calendarClass in classesList)
{
await DeleteReservationsAsync(calendarClass, cancellationToken);
_dbContext.Remove(calendarClass);
}
_ = await _dbContext.SaveChangesAsync(cancellationToken);
}
private async Task DeleteReservationsAsync(CalendarClass calendarClass,
CancellationToken cancellationToken = default)
{
var reservationsForClass = await _dbContext
.Reservations
.Where(m => m.ClassId == calendarClass.ClassId)
.ToListAsync(cancellationToken);
foreach (var reservation in reservationsForClass)
_dbContext.Remove(reservation);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
}
private DateTime CalculateNextClassDate(DateTime classDate, Timetable timetable)
{
switch ((PeriodType) timetable.PeriodType)
{
case PeriodType.Day:
return classDate.AddDays(1);
case PeriodType.Month:
return classDate.AddMonths(1);
case PeriodType.Year:
return classDate.AddYears(1);
case PeriodType.Week:
return classDate.AddDays(7);
default:
throw new ArgumentException(
$"Timetable period [period: {timetable.PeriodType}] do not exists.");
}
}
}
}<file_sep>/Fitverse.CalendarService/Migrations/20210315153423_ClassesAdded.cs
using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace Fitverse.CalendarService.Migrations
{
public partial class ClassesAdded : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Classes",
columns: table => new
{
ClassId = table.Column<int>(type: "int", nullable: false)
.Annotation("SqlServer:Identity", "1, 1"),
ClassTypeId = table.Column<int>(type: "int", nullable: true),
Date = table.Column<DateTime>(type: "datetime2", nullable: false),
StartingTime = table.Column<DateTime>(type: "datetime2", nullable: false),
EndingTime = table.Column<DateTime>(type: "datetime2", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Classes", x => x.ClassId);
table.ForeignKey(
name: "FK_Classes_ClassTypes_ClassTypeId",
column: x => x.ClassTypeId,
principalTable: "ClassTypes",
principalColumn: "ClassTypeId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateIndex(
name: "IX_Classes_ClassTypeId",
table: "Classes",
column: "ClassTypeId");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "Classes");
}
}
}
<file_sep>/Fitverse.CalendarService/Handlers/SignOutOfClassHandler.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Interfaces;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class SignOutOfClassHandler : IRequestHandler<SignOutOfClassCommand, ReservationDtoSetter>
{
private readonly CalendarContext _dbContext;
private readonly ISignOutOfClassSender _signOutOfClassSender;
public SignOutOfClassHandler(CalendarContext dbContext, ISignOutOfClassSender signOutOfClassSender)
{
_dbContext = dbContext;
_signOutOfClassSender = signOutOfClassSender;
}
public async Task<ReservationDtoSetter> Handle(SignOutOfClassCommand request, CancellationToken cancellationToken)
{
var reservationEntity = await _dbContext
.Reservations
.SingleOrDefaultAsync(m => m.ReservationId == request.ReservationId, cancellationToken);
_ = _dbContext.Remove(reservationEntity);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
_signOutOfClassSender.DeleteReservation(reservationEntity.ReservationId);
var reservationDto = reservationEntity.Adapt<ReservationDtoSetter>();
return reservationDto;
}
}
}<file_sep>/Fitverse.CalendarService/Models/CalendarClass.cs
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Fitverse.CalendarService.Models
{
public class CalendarClass
{
[Key]
[Required]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int ClassId { get; set; }
[Required] public int ClassTypeId { get; set; }
[Required] public string ClassName { get; set; }
[Required] public DateTime Date { get; set; }
[Required] public DateTime StartingTime { get; set; }
[Required] public DateTime EndingTime { get; set; }
[Required] public int TimetableId { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Commands/EditClassTypeCommand.cs
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Models;
using MediatR;
using Microsoft.AspNetCore.JsonPatch;
namespace Fitverse.CalendarService.Commands
{
public class EditClassTypeCommand : IRequest<ClassTypeDto>
{
public EditClassTypeCommand(int classTypeId, JsonPatchDocument<ClassType> classTypeEntity)
{
ClassTypeId = classTypeId;
NewClassTypeEntity = classTypeEntity;
}
public int ClassTypeId { get; }
public JsonPatchDocument<ClassType> NewClassTypeEntity { get; }
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetClassTypeByIdHandler.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetClassTypeByIdHandler : IRequestHandler<GetClassTypeByIdQuery, ClassTypeDto>
{
private readonly CalendarContext _dbContext;
public GetClassTypeByIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<ClassTypeDto> Handle(GetClassTypeByIdQuery request, CancellationToken cancellationToken)
{
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == request.ClassTypeId && !m.IsDeleted, cancellationToken);
if (classTypeEntity is null)
throw new NullReferenceException($"ClassType [ClassTypeId: {request.ClassTypeId} not found]");
var classTypeDto = classTypeEntity.Adapt<ClassTypeDto>();
return classTypeDto;
}
}
}<file_sep>/Fitverse.CalendarService/Models/ClassType.cs
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Fitverse.CalendarService.Models
{
public class ClassType
{
[Key]
[Required]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int ClassTypeId { get; set; }
[Required]
[MinLength(3)]
[MaxLength(30)]
public string Name { get; set; }
[Required]
[MaxLength(255)]
public string Description { get; set; }
[Required]
public int Limit { get; set; }
[MinLength(3)]
[MaxLength(30)]
public string Room { get; set; }
[Required]
public int Duration { get; set; }
[Required]
public bool IsDeleted { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Validators/DeleteClassByIdCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class DeleteClassByIdCommandValidator : AbstractValidator<DeleteClassByIdCommand>
{
public DeleteClassByIdCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.ClassId)
.GreaterThan(0);
RuleFor(x => x.ClassId)
.Must(id => dbContext.Classes.Any(m => m.ClassId == id))
.WithMessage(x => $"Class [ClassId: {x.ClassId}] not found.");
}
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetClassByIdHandler.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetClassByIdHandler : IRequestHandler<GetClassByIdQuery, CalendarClassDto>
{
private readonly CalendarContext _dbContext;
public GetClassByIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<CalendarClassDto> Handle(GetClassByIdQuery request, CancellationToken cancellationToken)
{
var classEntity = await _dbContext
.Classes
.SingleOrDefaultAsync(m => m.ClassId == request.ClassId, cancellationToken);
if (classEntity is null)
throw new NullReferenceException($"Class [ClassId: {request.ClassId} not found]");
var classDto = classEntity.Adapt<CalendarClassDto>();
var classTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.ClassTypeId == classEntity.ClassTypeId, cancellationToken);
if (classTypeEntity is null)
throw new NullReferenceException($"ClassType [ClassTypeId: {classEntity.ClassTypeId} not found]");
classDto.ClassName = classTypeEntity.Name;
classDto.Description = classTypeEntity.Description;
classDto.Limit = classTypeEntity.Limit;
var reservations = await _dbContext
.Reservations
.Where(m => m.ClassId == classDto.ClassId)
.ToListAsync(cancellationToken);
classDto.Reservations = new List<int>();
foreach (var reservation in reservations)
classDto.Reservations.Add(reservation.ReservationId);
return classDto;
}
}
}<file_sep>/Fitverse.CalendarService/Commands/SignUpForClassCommand.cs
using Fitverse.CalendarService.Dtos;
using MediatR;
namespace Fitverse.CalendarService.Commands
{
public class SignUpForClassCommand : IRequest<ReservationDtoSetter>
{
public SignUpForClassCommand(ReservationDtoSetter reservation)
{
Reservation = reservation;
}
public ReservationDtoSetter Reservation { get; }
}
}<file_sep>/Fitverse.CalendarService/Handlers/GetReservationsByClassIdHandler.cs
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Queries;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class GetReservationsByClassIdHandler : IRequestHandler<GetReservationsByClassIdCommand, List<ReservationDtoGetter>>
{
private readonly CalendarContext _dbContext;
public GetReservationsByClassIdHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<List<ReservationDtoGetter>> Handle(GetReservationsByClassIdCommand request, CancellationToken cancellationToken)
{
var reservationsList = await _dbContext
.Reservations
.Where(x => x.ClassId == request.ClassId)
.ToListAsync(cancellationToken);
var reservationsDtoList = new List<ReservationDtoGetter>();
foreach (var reservation in reservationsList)
{
var reservationDto = reservation.Adapt<ReservationDtoGetter>();
var member = await _dbContext
.Members
.FirstAsync(x => x.MemberId == reservation.MemberId, cancellationToken);
reservationDto.Member = member.Adapt<MemberDto>();
reservationsDtoList.Add(reservationDto);
}
reservationsDtoList = reservationsDtoList.OrderBy(x => x.Member.SurName).ToList();
return reservationsDtoList;
}
}
}<file_sep>/Fitverse.CalendarService/Validators/DeleteClassTypeCommandValidator.cs
using System.Linq;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using FluentValidation;
namespace Fitverse.CalendarService.Validators
{
public class DeleteClassTypeCommandValidator : AbstractValidator<DeleteClassTypeCommand>
{
public DeleteClassTypeCommandValidator(CalendarContext dbContext)
{
RuleFor(x => x.ClassTypeId)
.GreaterThan(0);
RuleFor(x => x.ClassTypeId)
.Must(id => dbContext.ClassTypes.Any(m => m.ClassTypeId == id))
.WithMessage(x => $"Class type [ClassTypeId: {x.ClassTypeId}] not found");
}
}
}<file_sep>/Fitverse.CalendarService/MessageBus/Senders/SignUpForClassSender.cs
using System;
using Fitverse.CalendarService.Interfaces;
using Fitverse.CalendarService.Models;
using Fitverse.Shared.MessageBus;
using Microsoft.Extensions.Options;
namespace Fitverse.CalendarService.MessageBus.Senders
{
public class SignUpForClassSender : ISignUpForClassSender
{
private readonly IOptions<RabbitMqConfiguration> _rabbitMqOptions;
public SignUpForClassSender(IOptions<RabbitMqConfiguration> rabbitMqOptions)
{
_rabbitMqOptions = rabbitMqOptions;
}
public void AddReservation(Reservation reservation)
{
var exchangeConfig = new Tuple<string, string>("classes", "signUpForClass");
SendEventHandler.SendEvent(reservation, _rabbitMqOptions, exchangeConfig);
}
}
}<file_sep>/Fitverse.CalendarService/Dtos/ClassTypeDto.cs
namespace Fitverse.CalendarService.Dtos
{
public class ClassTypeDto
{
public int ClassTypeId { get; private set; }
public string Name { get; set; }
public string Description { get; set; }
public int Limit { get; set; }
public string Room { get; set; }
public int Duration { get; set; }
}
}<file_sep>/Fitverse.CalendarService/Handlers/AddClassTypeHandler.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Fitverse.CalendarService.Commands;
using Fitverse.CalendarService.Data;
using Fitverse.CalendarService.Dtos;
using Fitverse.CalendarService.Models;
using Mapster;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace Fitverse.CalendarService.Handlers
{
public class AddClassTypeHandler : IRequestHandler<AddClassTypeCommand, ClassTypeDto>
{
private readonly CalendarContext _dbContext;
public AddClassTypeHandler(CalendarContext dbContext)
{
_dbContext = dbContext;
}
public async Task<ClassTypeDto> Handle(AddClassTypeCommand request, CancellationToken cancellationToken)
{
var name = request.NewClassType.Name;
var classTypeEntity = request.NewClassType.Adapt<ClassType>();
_ = await _dbContext.AddAsync(classTypeEntity, cancellationToken);
_ = await _dbContext.SaveChangesAsync(cancellationToken);
var newClassTypeEntity = await _dbContext
.ClassTypes
.SingleOrDefaultAsync(m => m.Name == name, cancellationToken);
if (newClassTypeEntity is null)
throw new NullReferenceException("Failed to add class type. Try again");
var newClassTypeDto = newClassTypeEntity.Adapt<ClassTypeDto>();
return newClassTypeDto;
}
}
} | e907691bb10506c802c4e1ba7303b4eb0ddc2ed0 | [
"C#"
] | 65 | C# | W0nsu/Fitverse.CalendarService | 5ea4d2422629e6a0e0d89a6ff5dd23eb85f6b132 | 1ee4a6de1e132d782c56d26891bb36516fc560a1 | |
refs/heads/master | <file_sep>#include "threads.h"
/*Inicializes a specific lock*/
void lock_init(){
#ifdef MUTEX
pthread_mutex_init(&lock,NULL);
pthread_mutex_init(&lock_commands,NULL);
#elif RWLOCK
pthread_rwlock_init(&rwlock,NULL);
pthread_rwlock_init(&rwlock_commands,NULL);
#endif
}
/*Destroys a specific lock*/
void lock_destroy(){
#ifdef MUTEX
pthread_mutex_destroy(&lock);
pthread_mutex_destroy(&lock_commands);
#elif RWLOCK
pthread_rwlock_destroy(&rwlock);
pthread_rwlock_destroy(&rwlock_commands);
#endif
}
/* Lock_function chooses between using Mutex_lock
or Rw/Wrlock depending on the executable */
void lock_function(int i, pthread_mutex_t mutex, pthread_rwlock_t rw){
#ifdef MUTEX
pthread_mutex_lock(&mutex);
#elif RWLOCK
if (i) /*If it's !=0, than it locks for write*/
pthread_rwlock_wrlock(&rw);
else /*else, it locks for reading*/
pthread_rwlock_rdlock(&rw);
#endif
}
/* unlock_function chooses between using Mutex_unlock
or Rwlock_unlock depending on the executable */
void unlock_function(pthread_mutex_t mutex, pthread_rwlock_t rw){
#ifdef MUTEX
pthread_mutex_unlock(&mutex);
#elif RWLOCK
pthread_rwlock_unlock(&rw);
#endif
}
<file_sep># Makefile, versao 1
# Sistemas Operativos, DEI/IST/ULisboa 2019-20
SOURCES = main.c fs.c sync.c
SOURCES+= lib/bst.c
OBJS_NOSYNC = $(SOURCES:%.c=%.o)
OBJS_MUTEX = $(SOURCES:%.c=%-mutex.o)
OBJS_RWLOCK = $(SOURCES:%.c=%-rwlock.o)
OBJS = $(OBJS_NOSYNC) $(OBJS_MUTEX) $(OBJS_RWLOCK)
CC = gcc
LD = gcc
CFLAGS =-Wall -std=gnu99 -I../ -g
LDFLAGS=-lm -pthread
TARGETS = tecnicofs-nosync tecnicofs-mutex tecnicofs-rwlock
.PHONY: all clean
all: $(TARGETS)
$(TARGETS):
$(LD) $(CFLAGS) $^ -o $@ $(LDFLAGS)
### no sync ###
lib/bst.o: lib/bst.c lib/bst.h
fs.o: fs.c fs.h lib/bst.h
the.o: threads.c threads.h
main.o: main.c fs.h lib/bst.h threads.h
tecnicofs-nosync: lib/bst.o fs.o the.o main.o
### MUTEX ###
lib/bst-mutex.o: CFLAGS+=-DMUTEX
lib/bst-mutex.o: lib/bst.c lib/bst.h
fs-mutex.o: CFLAGS+=-DMUTEX
fs-mutex.o: fs.c fs.h lib/bst.h
the-mutex.o: CFLAGS+=-DMUTEX
the-mutex.o: threads.c threads.h
main-mutex.o: CFLAGS+=-DMUTEX
main-mutex.o: main.c fs.h lib/bst.h threads.h
tecnicofs-mutex: lib/bst-mutex.o fs-mutex.o the-mutex.o main-mutex.o
### RWLOCK ###
lib/bst-rwlock.o: CFLAGS+=-DRWLOCK
lib/bst-rwlock.o: lib/bst.c lib/bst.h
fs-rwlock.o: CFLAGS+=-DRWLOCK
fs-rwlock.o: fs.c fs.h lib/bst.h
the-rwlock.o: CFLAGS+=-DRWLOCK
the-rwlock.o: threads.c threads.h
main-rwlock.o: CFLAGS+=-DRWLOCK
main-rwlock.o: main.c fs.h lib/bst.h threads.h
tecnicofs-rwlock: lib/bst-rwlock.o fs-rwlock.o the-rwlock.o main-rwlock.o
%.o:
$(CC) $(CFLAGS) -c -o $@ $<
clean:
@echo Cleaning...
rm -f $(OBJS) $(TARGETS)
<file_sep># Projeto SO
>VERIFICAR SE FICHEIRO ESTA ABERTO PARA QUE
## Open
```
Lista para cada sessao de inodes (Max:5)
```
Erros:
+ Verificar se existe o ficheiro (lockup)
+ Verificar se há posições na qual conseguimos escrever
+ ...
## Write
```
Escrever no inode o conteudo do buffer
```
Erros:
+ Verificar se existe o ficheiro (lockup)
+ Verificar se o ficheiro tá aberto na tabela ou se está na Tabela
+ ...
## Read
```
Ler no inode o conteudo do buffer
```
Erros:
+ Verificar se existe o ficheiro (lockup)
+ Verificar se o ficheiro tá aberto na tabela ou se está na Tabela
+ ...
## Close
```
Retirar elemento da lista
```
Erros:
+ Verificar se existe o ficheiro (lockup)
+ Verificar se o ficheiro tá aberto na tabela ou se está na Tabela
+ ...
# Perguntas
+ No servidor utilizar mutexes ou rwlocks?
+
----
# O que falta
+ Meter locks em tudo o que falta
+ Ver qual o free que me falta
+ pthread_signal
+ Ver multiplos clientes, clientes iguais da erro de sessao repetida...
<file_sep>#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
#define UNIXSTR_PATH "/tmp/s.unixstr"
#define UNIXDG_PATH "/tmp/s.unixdgx"
#define UNIXDG_TMP "/tmp/dgXXXXXXX"
int mount(int argc , char *argv[]){
struct sockaddr_un serv_addr, cli_addr;
int sockfd;
if ((sockfd = socket(AF_UNIX,SOCK_STREAM,0) ) < 0)
puts("server: can't open stream socket");
bzero((char *) &serv_addr, sizeof(serv_addr));
serv_addr.sun_family = AF_UNIX;
strcpy(serv_addr.sun_path, argv[1]);
int servlen = strlen(serv_addr.sun_path) + sizeof(serv_addr.sun_family);
if(connect(sockfd, (struct sockaddr *) &serv_addr, servlen) < 0)
puts("client: can't connect to server");
char buf[10];
read(STDIN_FILENO, buf, sizeof(buf));
write(sockfd, buf, sizeof(buf));
return 0;
}
int main(int argc , char *argv[]){
mount(argc,argv);
return 0;
}
<file_sep>/**
* Sistemas Operativos, DEI/IST/ULisboa 2019-20
* Modified by <NAME> Nelson, group 22
*/
#include "fs.h"
#include "lib/bst.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "sync.h"
tecnicofs* new_tecnicofs(int max){
tecnicofs*fs = malloc(sizeof(tecnicofs));
if (!fs) {
perror("failed to allocate tecnicofs");
exit(EXIT_FAILURE);
}
fs->hashMax=max;
fs->nextINumber = 0;
fs->bstRoot = (node **) malloc(sizeof(node)*max);
fs->bstLock = (syncMech *) malloc(sizeof(syncMech)*max);
for (int i=0;i < max;i++){
fs->bstRoot[i] = NULL;
sync_init(&(fs->bstLock[i]));
}
return fs;
}
void free_tecnicofs(tecnicofs* fs){
for (int i=0;i < fs->hashMax;i++){
free_tree(fs->bstRoot[i]);
sync_destroy(&(fs->bstLock[i]));
}
free(fs->bstRoot);
free(fs->bstLock);
free(fs);
}
void create(tecnicofs* fs, char *name, int inumber, int flag){ /* if flag==0 then it locks*/
int hashcode=hash(name,fs->hashMax);
if (!flag) sync_wrlock(&(fs->bstLock[hashcode]));
fs->bstRoot[hashcode] = insert(fs->bstRoot[hashcode], name, inumber);
if (!flag) sync_unlock(&(fs->bstLock[hashcode]));
}
void delete(tecnicofs* fs, char *name, int flag){ /* if flag==0 then it locks*/
int hashcode=hash(name,fs->hashMax);
if (!flag) sync_wrlock(&(fs->bstLock[hashcode]));
fs->bstRoot[hashcode] = remove_item(fs->bstRoot[hashcode], name);
if (!flag) sync_unlock(&(fs->bstLock[hashcode]));
}
int lookup(tecnicofs* fs, char *name){
int hashcode=hash(name,fs->hashMax);
sync_rdlock(&(fs->bstLock[hashcode]));
int inumber = -1;
node* searchNode = search(fs->bstRoot[hashcode], name);
if ( searchNode ) {
inumber = searchNode->inumber;
}
sync_unlock(&(fs->bstLock[hashcode]));
return inumber;
}
void renameFile(char* oldName,char* newName,tecnicofs *fs) {
int locknew = hash(newName,fs->hashMax);
int lockold = hash(oldName, fs->hashMax);
int iNumberOld = lookup(fs,oldName);
int iNumberNew = lookup(fs,newName);
if (iNumberNew==-1){
if(iNumberOld!=-1){
if (locknew==lockold){
sync_wrlock(&(fs->bstLock[locknew]));
delete(fs,oldName,1);
create(fs,newName,iNumberOld,1);
sync_unlock(&(fs->bstLock[locknew]));
return;
}
else
while(1){
sync_wrlock(&(fs->bstLock[lockold]));
int err = syncMech_try_lock(&(fs->bstLock[locknew]));
if (!err){
delete(fs,oldName,1);
create(fs,newName,iNumberOld,1);
sync_unlock(&(fs->bstLock[locknew]));
sync_unlock(&(fs->bstLock[lockold]));
return;
}
sync_unlock(&(fs->bstLock[lockold]));
}
}
else {printf("%s doesn't existes!\n",oldName);return;} //Erro de nao existir
}
else {printf("%s already existes!\n",newName);return;} //Erro de ja existir
}
void print_tecnicofs_tree(FILE * fp, tecnicofs *fs){
for (int i=0; i < fs->hashMax ;i++)
if (fs->bstRoot[i]!=NULL)
print_tree(fp, fs->bstRoot[i]);
}
<file_sep>#!/bin/bash
#Modified by <NAME> Nelson, group 22
#Checks if there are 4 elements after the executable
if [ ! $# -eq 4 ];then
echo "Wrong format!"
echo "Correct form: ./runTests.sh inputFolder outputFolder numThreads numHash"
exit 1
fi
#Inputs
inputdir=$1
outputdir=$2
maxthreads=$3
numbuckets=$4
#Scrpit
if [ ! -d "tecnicofs-*" ]; then
make all | grep !""
fi
mkdir -p $2
for inputFile in "$inputdir"/*
do
echo "InputFile = ${inputFile/"$inputdir/"/""}" "NumThreads = 1"
auxFile=${inputFile/"$inputdir"/$outputdir}
auxFile=${auxFile%.*}
outFile="$auxFile-1.txt"
./tecnicofs-nosync $inputFile $outFile 1 1 | grep "TecnicoFS completed in"
for thread in $(seq 2 $maxthreads)
do
echo "InputFile = ${inputFile/"$inputdir/"/""}" "NumThreads = $thread"
auxFile=${inputFile/"$inputdir"/$outputdir}
auxFile=${auxFile%.*}
outFile="$auxFile-$thread.txt"
./tecnicofs-mutex $inputFile $outFile $thread $numbuckets | grep "TecnicoFS completed in"
done
done
#make clean | grep !""
exit 0
<file_sep>/*
First Project for Operating systems.
Modified by <NAME> and <NAME>,
ist191593 and ist193743, Group 22.
*/
#include <stdio.h>
#include <stdlib.h>
#include <getopt.h>
#include <string.h>
#include <ctype.h>
#include <sys/time.h>
#include "threads.h"
#define MAX_COMMANDS 150000
#define MAX_INPUT_SIZE 100
int numberThreads = 0;
tecnicofs* fs;
char inputCommands[MAX_COMMANDS][MAX_INPUT_SIZE];
int numberCommands = 0;
int headQueue = 0;
static void displayUsage (const char* appName){
printf("Usage: %s\n", appName);
exit(EXIT_FAILURE);
}
static void parseArgs (long argc, char* const argv[]){
if (argc != 4) {
fprintf(stderr, "Invalid format:\n");
displayUsage(argv[0]);
}
if (argv[3] <= 0){
fprintf(stderr, "Invalid number of threats:\n");
exit(EXIT_FAILURE);
}
}
int insertCommand(char* data) {
if(numberCommands != MAX_COMMANDS) {
strcpy(inputCommands[numberCommands++], data);
return 1;
}
return 0;
}
char* removeCommand() {
lock_function(1,lock_commands, rwlock_commands);
if(numberCommands > 0){
numberCommands--;
unlock_function(lock_commands, rwlock_commands);
return inputCommands[headQueue++];
}
unlock_function(lock_commands, rwlock_commands);
return NULL;
}
void errorParse(){
fprintf(stderr, "Error: command invalid\n");
//exit(EXIT_FAILURE);
}
void processInput(char* f_in){
FILE *fin = fopen(f_in, "r");
if (fin==NULL){
fprintf(stderr, "Error: Not existing input file\n");
exit(EXIT_FAILURE);
}
char line[MAX_INPUT_SIZE];
while (fgets(line, sizeof(line)/sizeof(char), fin)) {
char token;
char name[MAX_INPUT_SIZE];
int numTokens = sscanf(line, "%c %s", &token, name);
/* perform minimal validation */
if (numTokens < 1) {
continue;
}
switch (token) {
case 'c':
case 'l':
case 'd':
if(numTokens != 2)
errorParse();
if(insertCommand(line))
break;
return;
case '#':
break;
default: { /* error */
errorParse();
}
}
}
fclose(fin);
}
void* applyCommands(void *args){
while(numberCommands > 0){
const char* command = removeCommand();
if (command == NULL){
continue;
}
char token;
char name[MAX_INPUT_SIZE];
int numTokens = sscanf(command, "%c %s", &token, name);
if (numTokens != 2) {
fprintf(stderr, "Error: invalid command in Queue\n");
exit(EXIT_FAILURE);
}
int searchResult;
int iNumber;
switch (token) {
case 'c':
iNumber = obtainNewInumber(fs);
create(fs, name, iNumber);
break;
case 'l':
searchResult = lookup(fs, name);
if(!searchResult)
printf("%s not found\n", name);
else
printf("%s found with inumber %d\n", name, searchResult);
break;
case 'd':
delete(fs, name);
break;
default: { /* error */
fprintf(stderr, "Error: command to apply\n");
exit(EXIT_FAILURE);
}
}
}
return NULL;
}
void apply_command_main(int maxThreads){
#if defined(MUTEX) || defined(RWLOCK)
for (int i=0;i<maxThreads;i++)
if (!pthread_create(&tid[i],NULL,applyCommands,NULL))
fprintf(stderr, "Error: pthread_create failed to execute\n");
for (int i=0;i<maxThreads;i++)
if (!pthread_join(tid[i],NULL))
fprintf(stderr, "Error: pthread_join failed to execute\n");
#else
applyCommands(NULL);
#endif
}
int main(int argc, char* argv[]) {
parseArgs(argc, argv);
FILE *fout;
double time_taken=0;
struct timeval start, end;
lock_init();
fs = new_tecnicofs();
processInput(argv[1]);
gettimeofday(&start, NULL); /*Start clock*/
apply_command_main(atoi(argv[3]));
gettimeofday(&end, NULL); /*Ends clock*/
fout = fopen(argv[2],"w");
print_tecnicofs_tree(fout, fs);
fclose(fout);
lock_destroy();
free_tecnicofs(fs);
/*Execution Time*/
time_taken = (end.tv_sec - start.tv_sec) * 1e6; /*Seconds*/
time_taken += (end.tv_usec - start.tv_usec) * 1e-6; /*Micro-Seconds*/
printf("TecnicoFS completed in %.04f seconds.\n", time_taken);
exit(EXIT_SUCCESS);
}
<file_sep>#!/bin/bash
echo Nesta aula vamos aprender a
echo desenvolver os nossos primeiros scripts em bash!
<file_sep>#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
#define UNIXSTR_PATH "/tmp/s.unixstr"
#define UNIXDG_PATH "/tmp/s.unixdgx"
#define UNIXDG_TMP "/tmp/dgXXXXXXX"
int main(int argc , char *argv[]){
struct sockaddr_un serv_addr, cli_addr;
int sockfd;
FILE* fout=fopen(argv[2],"w");
sockfd = socket(AF_UNIX,SOCK_STREAM,0);
if (sockfd < 0)
puts("server: can't open stream socket");
unlink(argv[1]);
bzero((char *)&serv_addr, sizeof(serv_addr));
serv_addr.sun_family = AF_UNIX;
strcpy(serv_addr.sun_path, argv[1]);
int servlen = strlen(serv_addr.sun_path) + sizeof(serv_addr.sun_family);
if (bind(sockfd, (struct sockaddr *) &serv_addr, servlen) < 0)
puts("server, can't bind local address");
listen(sockfd, 5);
int len=sizeof(cli_addr);
puts("Waiting..");
int newsockfd = accept(sockfd,(struct sockaddr *) &serv_addr, &len);
if (newsockfd < 0) puts("server: accept error");
char recvline[10];
int n = read(newsockfd, recvline, 10);
recvline[n]=0;
fputs(recvline, fout);
fclose(fout);
}
<file_sep>if [ $1 -eq 1 ];then
echo nosync;
for i in $(seq 1 $2)
do
./tecnicofs-nosync inputs/test1.txt outF 1 1;
cat outF
echo ---
done
elif [ $1 -eq 2 ];then
echo mutex;
for i in $(seq 1 $2)
do
./tecnicofs-mutex inputs/test1.txt outF 4 4;
cat outF;
echo ---
done
fi
<file_sep>/*
First Project for Operating systems.
Modified by <NAME> and <NAME>,
ist191593 and ist193743, Group 22.
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "fs.h"
int obtainNewInumber(tecnicofs* fs) {
lock_function(1, lock, rwlock);
int newInumber = ++(fs->nextINumber);
unlock_function(lock,rwlock);
return newInumber;
}
tecnicofs* new_tecnicofs(){
tecnicofs*fs = malloc(sizeof(tecnicofs));
if (!fs) {
perror("failed to allocate tecnicofs");
exit(EXIT_FAILURE);
}
fs->bstRoot = NULL;
fs->nextINumber = 0;
return fs;
}
void free_tecnicofs(tecnicofs* fs){
free_tree(fs->bstRoot);
free(fs);
}
void create(tecnicofs* fs, char *name, int inumber){
lock_function(1, lock, rwlock);
fs->bstRoot = insert(fs->bstRoot, name, inumber);
unlock_function(lock,rwlock);
}
void delete(tecnicofs* fs, char *name){
lock_function(1, lock, rwlock);
fs->bstRoot = remove_item(fs->bstRoot, name);
unlock_function(lock,rwlock);
}
int lookup(tecnicofs* fs, char *name){
lock_function(0, lock, rwlock);
node* searchNode = search(fs->bstRoot, name);
unlock_function(lock,rwlock);
if ( searchNode ) return searchNode->inumber;
return 0;
}
void print_tecnicofs_tree(FILE * fp, tecnicofs *fs){
print_tree(fp, fs->bstRoot);
}
<file_sep>/**
* Sistemas Operativos, DEI/IST/ULisboa 2019-20
* Created by <NAME> Nelson, group 22
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
#include "tecnicofs-client-api.h"
int sockfd;
int tfsCreate(char *filename, permission ownerPermissions, permission othersPermissions) {
char buff[MAX_INPUT]="";
dprintf(sockfd, "c %s %d", filename, (ownerPermissions*10+othersPermissions));
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsDelete(char *filename){
char buff[MAX_INPUT]="";
dprintf(sockfd, "d %s", filename);
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsRename(char *filenameOld, char *filenameNew){
char buff[MAX_INPUT]="";
dprintf(sockfd, "r %s %s", filenameOld, filenameNew);
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsOpen(char *filename, permission mode){
char buff[MAX_INPUT]="";
dprintf(sockfd, "o %s %d", filename, mode);
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsClose(int fd){
char buff[MAX_INPUT]="";
dprintf(sockfd, "x %d", fd);
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsRead(int fd, char *buffer, int len){
char buff[MAX_INPUT]="";
dprintf(sockfd, "l %d %d", fd,len);
read(sockfd,&buff,sizeof(buff));
buff[MAX_INPUT]=0;
sscanf(buff, "%s %d", buffer, &len);
return len;
}
int tfsWrite(int fd, char *buffer, int len){
char buff[MAX_INPUT]="";
dprintf(sockfd, "w %d %s", fd,buffer);
read(sockfd,&buff,sizeof(buff));
return atoi(buff);
}
int tfsMount(char * address){
struct sockaddr_un serv_addr;
sockfd = socket(AF_UNIX,SOCK_STREAM,0);
if (sockfd < 0)
puts("server: can't open stream socket");
bzero((char *) &serv_addr, sizeof(serv_addr));
serv_addr.sun_family = AF_UNIX;
strcpy(serv_addr.sun_path, address);
int servlen = strlen(serv_addr.sun_path) + sizeof(serv_addr.sun_family);
if(connect(sockfd, (struct sockaddr *) &serv_addr, servlen) < 0){
puts("client: can't connect to server");
exit(TECNICOFS_ERROR_CONNECTION_ERROR);
}
}
int tfsUnmount(){
dprintf(sockfd, "e");
close(sockfd);
}
<file_sep># Makefile, versao 1
# Sistemas Operativos, DEI/IST/ULisboa 2019-20
SOURCES = main.c fs.c sync.c
SOURCES+= lib/bst.c lib/hash.c
OBJS_NOSYNC = $(SOURCES:%.c=%.o)
OBJS_MUTEX = $(SOURCES:%.c=%-mutex.o)
OBJS_RWLOCK = $(SOURCES:%.c=%-rwlock.o)
OBJS = $(OBJS_NOSYNC) $(OBJS_MUTEX) $(OBJS_RWLOCK)
CC = gcc
LD = gcc
CFLAGS =-Wall -std=gnu99 -I../ -g
LDFLAGS=-lm -pthread
TARGETS = tecnicofs-nosync tecnicofs-mutex tecnicofs-rwlock
.PHONY: all clean
all: $(TARGETS)
$(TARGETS):
$(LD) $(CFLAGS) $^ -o $@ $(LDFLAGS)
### no sync ###
lib/bst.o: lib/bst.c lib/bst.h
lib/hash.o: lib/hash.c lib/hash.h
fs.o: fs.c fs.h lib/bst.h
sync.o: sync.c sync.h constants.h
main.o: main.c fs.h lib/bst.h constants.h lib/timer.h sync.h lib/hash.h
tecnicofs-nosync: lib/bst.o fs.o sync.o main.o lib/hash.o
### MUTEX ###
lib/bst-mutex.o: CFLAGS+=-DMUTEX
lib/bst-mutex.o: lib/bst.c lib/bst.h
lib/hash-mutex.o: CFLAGS+=-DMUTEX
lib/hash-mutex.o: lib/hash.c lib/hash.h
fs-mutex.o: CFLAGS+=-DMUTEX
fs-mutex.o: fs.c fs.h lib/bst.h
sync-mutex.o: CFLAGS+=-DMUTEX
sync-mutex.o: sync.c sync.h constants.h
main-mutex.o: CFLAGS+=-DMUTEX
main-mutex.o: main.c fs.h lib/bst.h constants.h lib/timer.h sync.h lib/hash.h
tecnicofs-mutex: lib/bst-mutex.o lib/hash-mutex.o fs-mutex.o sync-mutex.o main-mutex.o
### RWLOCK ###
lib/bst-rwlock.o: CFLAGS+=-DRWLOCK
lib/bst-rwlock.o: lib/bst.c lib/bst.h
lib/hash-rwlock.o: CFLAGS+=-DRWLOCK
lib/hash-rwlock.o: lib/hash.c lib/hash.h
fs-rwlock.o: CFLAGS+=-DRWLOCK
fs-rwlock.o: fs.c fs.h lib/bst.h
sync-rwlock.o: CFLAGS+=-DRWLOCK
sync-rwlock.o: sync.c sync.h constants.h
main-rwlock.o: CFLAGS+=-DRWLOCK
main-rwlock.o: main.c fs.h lib/bst.h constants.h lib/timer.h sync.h lib/hash.h
tecnicofs-rwlock: lib/bst-rwlock.o lib/hash-rwlock.o fs-rwlock.o sync-rwlock.o main-rwlock.o
%.o:
$(CC) $(CFLAGS) -c -o $@ $<
clean:
@echo Cleaning...
rm -f $(OBJS) $(TARGETS)
<file_sep>void renameFile(char* oldName,char* newName,tecnicofs *fs) {
int hashNew = hash(newName,fs->hashMax);
int hashOld = hash(oldName, fs->hashMax);
while (!lookup(fs,newName)) {
int iNumber = lookup(fs,oldName);
if (lookup(fs,oldName)) {
if (!lookup(fs,newName)) {
if (hashNew == hashOld) {
if (!syncMech_try_lock(&(fs->bstLock[hashNew]))) {
delete(fs,oldName,1);
create(fs,newName,iNumber,1);
sync_unlock(&(fs->bstLock[hashNew]));
return;
} else {
sync_unlock(&(fs->bstLock[hashNew]));
}
} else {
if (!syncMech_try_lock(&(fs->bstLock[hashOld]))) {
delete(fs,oldName,1);
sync_unlock(&(fs->bstLock[hashOld]));
}
if (!syncMech_try_lock(&(fs->bstLock[hashNew]))) {
create(fs,newName,iNumber,1);
sync_unlock(&(fs->bstLock[hashNew]));
}
}
}
}
}
return;
}<file_sep>#!/bin/bash
outdir=out/;
for outFile in "$outdir"/*
do
echo »» $outFile:
cat $outFile
echo
done<file_sep>#include "../tecnicofs-api-constants.h"
#include "../tecnicofs-client-api.h"
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <assert.h>
#include <string.h>
int main(int argc, char* argv[]) {
assert(tfsMount(argv[1]) == 0);
printf("Test: create file sucess\n");
assert(tfsCreate("abc", RW, READ) == 0);
sleep(25);
assert(tfsUnmount() == 0);
exit( 0);
}<file_sep>/* Sistemas Operativos, DEI/IST/ULisboa 2019-20 */
/* Modified by <NAME> Nelson, group 22 */
#define _GNU_SOURCE
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/un.h>
#include <pthread.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <signal.h>
#include "fs.h"
#include "sync.h"
#include "constants.h"
#include "lib/timer.h"
#include "lib/inodes.h"
int numBuckets;
char *nomeSocket, *global_outputFile;
int sockfd, newsockfd;
sigset_t set;
tecnicofs *fs;
pthread_t vector_threads[MAX_CLIENTS];
int flag_end=0;
struct ucred ucred;
TIMER_T startTime, stopTime;
struct file {
int iNumber;
enum permission mode;
};
static void displayUsage (const char* appName){
printf("Usage: %s input_filepath output_filepath threads_number\n", appName);
exit(EXIT_FAILURE);
}
static void parseArgs (long argc, char* const argv[]){
if (argc != 4) {
fprintf(stderr, "Invalid format:\n");
displayUsage(argv[0]);
}
nomeSocket = argv[1];
global_outputFile = argv[2];
numBuckets = atoi(argv[3]);
if (!numBuckets) {
fprintf(stderr, "Invalid number of buckets\n");
displayUsage(argv[0]);
}
}
/**
*
* * Checks if the permission passed as argument is sufficient
*
* */
int isPermitted(permission othersPermission, enum permission perm) {
int ret;
switch (othersPermission) {
case 1:
if (perm == WRITE || perm == RW) ret = 0;
else ret = -1;
break;
case 2:
if (perm == READ || perm == RW) ret = 0;
else ret = -1;
break;
case 3:
ret = 0;
break;
case 0:
default:
ret = -1;
break;
}
return ret;
}
/*
* Verifies if the user can do the operation desired
*/
int user_allowed(int userid, int fd, struct file *files, enum permission perm) {
permission ownerPermission;
permission othersPermission;
uid_t creatorId;
inode_get(files[fd].iNumber,&creatorId,&ownerPermission,&othersPermission,NULL,0);
if (isPermitted(perm,files[fd].mode) == 0) {
if (userid != creatorId) {
if (isPermitted(othersPermission,perm) == 0) return 0;
}
else if (isPermitted(ownerPermission,files[fd].mode) == 0) return 0;
return TECNICOFS_ERROR_PERMISSION_DENIED;
}
return TECNICOFS_ERROR_INVALID_MODE;
}
int apply_create(uid_t userid, char *buff){
int iNumber=0;
int permissions;
char token,name[MAX_INPUT_SIZE];
permission ownerPermissions,otherPermissions;
sscanf(buff, "%s %s %d", &token, name, &permissions);
if (lookup(fs,name)==-1){
otherPermissions = permissions%10;
ownerPermissions = permissions/10;
iNumber = inode_create(userid,ownerPermissions,otherPermissions);
create(fs, name, iNumber,1);
return 0;
}
return TECNICOFS_ERROR_FILE_ALREADY_EXISTS;
}
int apply_delete(uid_t userid, char *buff){
uid_t owner;
int iNumber=0;
char token,name[MAX_INPUT_SIZE];
sscanf(buff, "%s %s", &token, name);
iNumber=lookup(fs,name);
if (iNumber==-1) return TECNICOFS_ERROR_FILE_NOT_FOUND;
inode_get(iNumber,&owner,NULL,NULL,NULL,0);
if(userid==owner){
inode_delete(iNumber);
delete(fs, name,0);
return 0;
}
return TECNICOFS_ERROR_PERMISSION_DENIED;
}
int apply_rename(uid_t userid, char *buff){
uid_t ownerold;
int iNumberold=0, iNumbernew=0;
char token,nameold[MAX_INPUT_SIZE], namenew[MAX_INPUT_SIZE];
permission ownerPermissions,otherPermissions;
sscanf(buff, "%s %s %s", &token, nameold, namenew);
iNumberold=lookup(fs,nameold);
iNumbernew=lookup(fs,namenew);
if (iNumberold==-1) return TECNICOFS_ERROR_FILE_NOT_FOUND;
if (iNumbernew!=-1) return TECNICOFS_ERROR_FILE_ALREADY_EXISTS;
inode_get(iNumberold,&ownerold,&ownerPermissions,&otherPermissions,NULL,0);
if (ownerold==userid){
renameFile(nameold, namenew, fs);
return 0;
}
return TECNICOFS_ERROR_PERMISSION_DENIED;
}
int apply_open(uid_t userid, char* buff,struct file *files){
int iNumber=0;
uid_t owner;
char token,name[MAX_INPUT_SIZE];
enum permission mode, ownerperm,otherperm;
sscanf(buff, "%s %s %u", &token, name, &mode);
iNumber = lookup(fs,name);
if (iNumber == -1) return TECNICOFS_ERROR_FILE_NOT_FOUND;
inode_get(iNumber,&owner,&ownerperm,&otherperm,NULL,0);
for(int i=0; i<5; i++)
if (files[i].iNumber==iNumber) return TECNICOFS_ERROR_FILE_IS_OPEN;
for(int i=0; i<5; i++)
if (files[i].iNumber==-1){
files[i].iNumber=iNumber;
files[i].mode=mode;
return i;
}
return TECNICOFS_ERROR_MAXED_OPEN_FILES;
}
int apply_close(uid_t userid, char* buff,struct file *files){
int fileDescriptor=-1;
char token;
sscanf(buff, "%s %d",&token, &fileDescriptor);
if (fileDescriptor>5 || fileDescriptor<0) return TECNICOFS_ERROR_OTHER;
files[fileDescriptor].iNumber=-1;
files[fileDescriptor].mode=0;
return 0;
}
int apply_write(uid_t userid, char* buff,struct file *files){
int len, fd=-1;
char token,name[MAX_INPUT_SIZE];
sscanf(buff, "%s %d %s", &token, &fd, name);
if (fd>5 || fd<0) return TECNICOFS_ERROR_FILE_ALREADY_EXISTS;
if (files[fd].iNumber == -1) return TECNICOFS_ERROR_FILE_NOT_OPEN;
int rc = user_allowed(userid,fd,files,WRITE);
if (!rc) {
len = inode_set(files[fd].iNumber, name, strlen(name));
return len;
}
return rc;
}
int apply_read(int socket, uid_t userid, char* buff,struct file *files){
char token;
int len=0, fd=-1;
sscanf(buff, "%s %d %d", &token, &fd, &len);
char content[len];
memset(content, '\0', len);
if (fd>5 || fd<0){
dprintf(socket, "%s %d", "e", TECNICOFS_ERROR_FILE_ALREADY_EXISTS);
return TECNICOFS_ERROR_FILE_ALREADY_EXISTS;
}
if (files[fd].iNumber == -1) {
dprintf(socket, "%s %d", "e", TECNICOFS_ERROR_FILE_NOT_OPEN);
return TECNICOFS_ERROR_FILE_NOT_OPEN;
}
int rc=user_allowed(userid,fd,files,READ);
if (!rc) {
inode_get(files[fd].iNumber,NULL,NULL,NULL,content,len-1);
dprintf(socket, "%s %ld", content, strlen(content));
return len;
}
dprintf(socket, "%s %d", "e", rc);
return rc;
}
void* applyComands(void *args){
int userid = *(int*) args;
struct ucred owner;
socklen_t len = sizeof(struct ucred);
getsockopt(userid, SOL_SOCKET, SO_PEERCRED, &owner, &len);
pthread_sigmask(SIG_BLOCK, &set, NULL);
char buff[MAX_INPUT_SIZE];
struct file files[MAX_TABLE_SIZE];
for (int i=0;i<MAX_TABLE_SIZE;i++){
files[i].iNumber = -1;
files[i].mode = 0;
}
while(1){
bzero(buff, MAX_INPUT_SIZE);
read(userid, buff, sizeof(buff));
int n=sizeof(buff);
buff[n]=0;
int rc=0;
char token = buff[0];
switch (token){
case 'c':
rc = apply_create(owner.uid, buff);
dprintf(userid,"%d",rc);
break;
case 'd':
rc = apply_delete(owner.uid, buff);
dprintf(userid,"%d",rc);
break;
case 'r':
rc = apply_rename(owner.uid, buff);
dprintf(userid,"%d",rc);
break;
case 'o':
rc = apply_open(owner.uid, buff,files);
dprintf(userid,"%d",rc);
break;
case 'x':
rc = apply_close(owner.uid, buff, files);
dprintf(userid,"%d",rc);
break;
case 'l':
rc = apply_read(userid, owner.uid, buff, files);
break;
case 'w':
rc = apply_write(owner.uid, buff, files);
dprintf(userid,"%d",rc);
break;
case 'e':
return NULL;
}
}
return NULL;
}
int socket_create(){
int i=0;
struct sockaddr_un serv_addr, cli_addr;
sockfd = socket(AF_UNIX,SOCK_STREAM,0);
if (sockfd < 0){
puts("server: can't open stream socket");
return TECNICOFS_ERROR_CONNECTION_ERROR;
}
unlink(nomeSocket);
bzero((char *)&serv_addr, sizeof(serv_addr));
serv_addr.sun_family = AF_UNIX;
strcpy(serv_addr.sun_path, nomeSocket);
int servlen = strlen(serv_addr.sun_path) + sizeof(serv_addr.sun_family);
if (bind(sockfd, (struct sockaddr *) &serv_addr, servlen) < 0){
puts("server, can't bind local address");
return TECNICOFS_ERROR_CONNECTION_ERROR;
}
listen(sockfd, 5);
TIMER_READ(startTime);
for (;;){
socklen_t len = sizeof(cli_addr);
if (!flag_end) {
newsockfd = accept(sockfd,(struct sockaddr *) &cli_addr, &len);
if (newsockfd < 0) {
puts("server: accept error");
return TECNICOFS_ERROR_CONNECTION_ERROR;
}
if (pthread_create(&vector_threads[i++], NULL, applyComands, &newsockfd) != 0){
puts("Erro");
return TECNICOFS_ERROR_CONNECTION_ERROR;
}
}
else return TECNICOFS_ERROR_OTHER;
}
}
void end_server(){
FILE*out = fopen(global_outputFile, "w");
flag_end=-1;
for(int i=0;i<MAX_CLIENTS;i++)
pthread_join(vector_threads[i],NULL);
TIMER_READ(stopTime);
fprintf(out, "TecnicoFS completed in %.4f seconds.\n", TIMER_DIFF_SECONDS(startTime, stopTime));
print_tecnicofs_tree(out,fs);
fflush(out);
fclose(out);
unlink(nomeSocket);
inode_table_destroy();
free_tecnicofs(fs);
exit(EXIT_SUCCESS);
}
int main(int argc, char* argv[]) {
parseArgs(argc,argv);
signal(SIGINT, end_server);
sigemptyset(&set);
sigaddset(&set, SIGINT);
for(int i=0;i<MAX_CLIENTS;i++) vector_threads[i]=0;
fs = new_tecnicofs(numBuckets);
inode_table_init();
socket_create();
}
<file_sep>#ifndef THREADS_H
#define THREADS_H
#include "fs.h"
#include <pthread.h> /*For creating threats*/
#define MAX_THREADS 100
/*Global Variables (locks)*/
pthread_t tid[MAX_THREADS];
pthread_mutex_t lock_commands,lock;
pthread_rwlock_t rwlock_commands,rwlock;
void lock_init();
void lock_destroy();
void lock_function(int i, pthread_mutex_t mutex, pthread_rwlock_t rw);
void unlock_function(pthread_mutex_t mutex, pthread_rwlock_t rw);
#endif<file_sep>//Example code: A simple server side code, which echos back the received message.
//Handle multiple socket connections with select and fd_set on Linux
#include <stdio.h>
#include <string.h> //strlen
#include <stdlib.h>
#include <errno.h>
#include <unistd.h> //close
#include <arpa/inet.h> //close
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <sys/time.h> //FD_SET, FD_ISSET, FD_ZERO macros
#define PORT 8080
int main(int argc , char *argv[]){
int master_socket , addrlen , new_socket , client_socket[30] ,
max_clients = 30 , activity, i , valread , sd;
int max_sd;
fd_set readfds;
char buffer[1025]; //data buffer of 1K
struct sockaddr_in address;
//create a master socket
master_socket = socket(AF_UNIX, SOCK_STREAM , 0);
if( master_socket == 0){
perror("socket failed");
exit(EXIT_FAILURE);
}
//type of socket created
address.sin_family = AF_INET;
strcpy(address.sin_addr.s_addr, argv[1]);
address.sin_port = htons( PORT );
//bind the socket to localhost port 8888
if (bind(master_socket, (struct sockaddr *)&address, sizeof(address))<0) {
perror("bind failed");
exit(EXIT_FAILURE);
}
//try to specify maximum of 5 pending connections for the master socket
if (listen(master_socket, 5) < 0){
perror("listen");
exit(EXIT_FAILURE);
}
//accept the incoming connection
addrlen = sizeof(address);
puts("Waiting for connections ...");
while(1){
FD_ZERO(&readfds);
FD_SET(master_socket, &readfds);
max_sd = master_socket;
for ( i = 0 ; i < max_clients ; i++){
sd = client_socket[i];
if(sd > 0) FD_SET( sd , &readfds);
if(sd > max_sd) max_sd = sd;
}
activity = select( max_sd + 1 , &readfds , NULL , NULL , NULL);
if (FD_ISSET(master_socket, &readfds)){
if ((new_socket = accept(master_socket,(struct sockaddr *)&address,
(socklen_t*)&addrlen))<0){
perror("accept");
exit(EXIT_FAILURE);
}
for (i = 0; i < max_clients; i++)
if( client_socket[i] == 0 ){
client_socket[i] = new_socket;
printf("Adding to list of sockets as %d\n" , i);
break;
}
}
for (i = 0; i < max_clients; i++){
sd = client_socket[i];
if (FD_ISSET( sd , &readfds)){
if ((valread = read( sd , buffer, 1024)) == 0){
buffer[valread] = '\0';
send(sd , buffer , strlen(buffer) , 0 );
close( sd );
client_socket[i] = 0;
}
}
}
}
return 0;
}
<file_sep>#!/bin/bash
#Criado por <NAME> Matheus
#Checks if there are 4 elements after the executable
if [ ! $# -eq 3 ];then
echo "Wrong format!";
echo "Curret: ./runTests.sh inputFolder outputFolder numThreads numHash";
exit 1;
fi
#Inputs
inputFolder=$1;
outputFolder=$2;
numThreads=$3;
#Scrpit
if [ ! -d "tecnicofs-*" ]; then
make all | grep !"";
fi
if [ ! -d "$outputFolder" ]; then
mkdir $2;
fi
for inputFile in "$1"/*
do
echo "InputFile= $inputFile NumThreads= 1"
./tecnicofs-nosync $inputFile ${inputFile/"inputs"/$2} 1 | grep "TecnicoFS completed in";
for i in $(seq 2 $numThreads)
do
echo "InputFile= $inputFile NumThreads= $i"
auxFile=${inputFile/"inputs"/$2}
outFile=${auxFile/".txt"/"-$i.txt"}
./tecnicofs-mutex $inputFile $outFile $i | grep "TecnicoFS completed in";
done
done
make clean | grep !"";
exit 0;<file_sep>// In main.c
// In processInput
case 'r';
// In applyCommands
case 'r':
renameFile(name,hashcode,fs,hashMax);
break;
// In fs.c
#include "lib/hash.h"
void renameFile(char* names,int hashCode,tecnicofs *fs,int hashMax) {
char *oldName,*newName;
oldName = malloc(sizeof(char));
newName = malloc(sizeof(char));
int iNumber,newHash;
sscanf(names,"%s %s",oldName,newName);
iNumber = lookup(fs,oldName,hashCode);
delete(fs,oldName,hashCode);
newHash = hash(newName,hashMax);
create(fs,newName,iNumber,newHash);
free(oldName);
free(newName);
}
// In fs.h
void renameFile(char* names,int hashCode,tecnicofs *fs,int hashMax);
<file_sep># SO_2019-2020
Operating Systems 2019/2020
## Project 1
* [Project](Project_1/docs/projeto-ex1-SO-19-20.pdf)
* [Files](Project_1/)
**Grade:** *13.40* / 20.0
## Project 2
* [Project](Project_2/docs/projeto-ex2-SO-19-20.pdf)
* [Files](Project_2/)
**Grade:** *15.40* / 20.0
## Project 3
* [Project](Project_3/docs/projeto-ex3-SO-19-20.pdf)
* [Files](Project_3/)
**Grade:** *10.35* / 20.0
<file_sep>/* Sistemas Operativos, DEI/IST/ULisboa 2019-20 */
/* Modified by <NAME> Nelson, group 22 */
#ifndef FS_H
#define FS_H
#include "lib/bst.h"
#include "lib/hash.h"
#include "sync.h"
typedef struct tecnicofs {
node** bstRoot;
int nextINumber;
int hashMax;
syncMech* bstLock;
} tecnicofs;
int obtainNewInumber(tecnicofs* fs);
tecnicofs* new_tecnicofs(int max);
void free_tecnicofs(tecnicofs* fs);
void create(tecnicofs* fs, char *name, int inumber, int flag);
void delete(tecnicofs* fs, char *name, int flag);
int lookup(tecnicofs* fs, char *name);
void renameFile(char* oldName,char* newName,tecnicofs *fs);
void print_tecnicofs_tree(FILE * fp, tecnicofs *fs);
#endif /* FS_H */
<file_sep>#ifndef HASH_H
#define HASH_H
int hash(char* name, int n);
#endif
<file_sep>#include "../tecnicofs-api-constants.h"
#include "../tecnicofs-client-api.h"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <assert.h>
#include <string.h>
int main(int argc, char* argv[]) {
int fd = -1;
char readBuffer[4] = {0};
assert(tfsMount(argv[1]) == 0);
assert((fd = tfsOpen("abc", WRITE)) == 0);
assert(tfsRead(fd,readBuffer,5) == TECNICOFS_ERROR_INVALID_MODE);
assert(tfsRename("abc","bc") == TECNICOFS_ERROR_PERMISSION_DENIED);
assert(tfsClose(fd) == 0);
assert(tfsUnmount()==0);
exit(0);
}<file_sep>/* Sistemas Operativos, DEI/IST/ULisboa 2019-20 */
/* Modified by <NAME> Nelson, group 22 */
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include "fs.h"
#include "constants.h"
#include "lib/timer.h"
#include "sync.h"
/*GLOBAL VARIABLES*/
char* global_inputFile = NULL;
char* global_outputFile = NULL;
int hashMax = 0;
int numberThreads = 0;
pthread_mutex_t commandsLock;
tecnicofs* fs;
sem_t sem_prod, sem_cons;
char inputCommands[MAX_COMMANDS][MAX_INPUT_SIZE];
int numberCommands = 0;
int headQueue = 0;
int sleepTime=0;
static void displayUsage (const char* appName){
printf("Usage: %s input_filepath output_filepath threads_number\n", appName);
exit(EXIT_FAILURE);
}
static void parseArgs (long argc, char* const argv[]){
if (argc != 5 && argc!=6) {
fprintf(stderr, "Invalid format:\n");
displayUsage(argv[0]);
}
global_inputFile = argv[1];
global_outputFile = argv[2];
numberThreads = atoi(argv[3]);
if (!numberThreads) {
fprintf(stderr, "Invalid number of threads\n");
displayUsage(argv[0]);
}
hashMax=atoi(argv[4]);
if(!hashMax){
fprintf(stderr, "Invalid number of Hash Size\n");
displayUsage(argv[0]);
}
sleepTime = (argc==6)? atoi(argv[5]): 0;
}
int insertCommand(char* data) {
sem_wait_err(&sem_prod, "Producer");
mutex_lock(&commandsLock);
strcpy(inputCommands[(numberCommands++)%MAX_COMMANDS], data);
mutex_unlock(&commandsLock);
sem_post_err(&sem_cons, "Consumer");
return 1;
}
char* removeCommand() {
return inputCommands[(headQueue++)%MAX_COMMANDS];
}
void errorParse(int lineNumber){
fprintf(stderr, "Error: line %d invalid\n", lineNumber);
exit(EXIT_FAILURE);
}
void* processInput(void*agrs){
FILE* inputFile;
inputFile = fopen(global_inputFile, "r");
if(!inputFile){
fprintf(stderr, "Error: Could not read %s\n", global_inputFile);
exit(EXIT_FAILURE);
}
int lineNumber = 0;
char line[MAX_INPUT_SIZE];
while (fgets(line, sizeof(line)/sizeof(char), inputFile)) {
char token;
char name[MAX_INPUT_SIZE];
char name2[MAX_INPUT_SIZE];
lineNumber++;
int numTokens = sscanf(line, "%c %s %s", &token, name, name2);
/* perform minimal validation */
if (numTokens < 1) { continue; }
switch (token){
case 'r':
if (numTokens != 3) errorParse(lineNumber);
if(insertCommand(line))
break;
case 'c':
case 'l':
case 'd':
if(numTokens != 2) errorParse(lineNumber);
if(insertCommand(line))
break;
case '#':
break;
default: { /* error */
errorParse(lineNumber);
}
}
}
for (int i=0;i<numberThreads;i++)
insertCommand("e"); //Command e: end a Consumer thread
fclose(inputFile);
return NULL;
}
FILE * openOutputFile() {
FILE *fp;
fp = fopen(global_outputFile, "w");
if (fp == NULL) {
perror("Error opening output file");
exit(EXIT_FAILURE);
}
return fp;
}
void* applyCommands(){
while(1){
sem_wait_err(&sem_cons, "Consumer");
mutex_lock(&commandsLock);
const char* command = removeCommand();
char token;
char name[MAX_INPUT_SIZE],name2[MAX_INPUT_SIZE];
sscanf(command, "%c %s", &token, name);
int iNumber;
switch (token) {
case 'c':
iNumber = obtainNewInumber(fs);
mutex_unlock(&commandsLock);
sem_post_err(&sem_prod,"Producer");
create(fs, name, iNumber,0);
break;
case 'l':
mutex_unlock(&commandsLock);
sem_post_err(&sem_prod,"Producer");
int searchResult = lookup(fs, name);
if(!searchResult)
printf("%s not found\n", name);
else
printf("%s found with inumber %d\n", name, searchResult);
break;
case 'd':
mutex_unlock(&commandsLock);
sem_post_err(&sem_prod,"Producer");
delete(fs, name,0);
break;
case 'r':
sscanf(command, "%c %s %s", &token, name, name2);
mutex_unlock(&commandsLock);
sem_post_err(&sem_prod,"Producer");
renameFile(name,name2,fs);
break;
case 'e':
mutex_unlock(&commandsLock);
//sem_post_err(&sem_prod,"Producer");
return NULL;
break;
default: { /* error */
mutex_unlock(&commandsLock);
fprintf(stderr, "Error: commands to apply\n");
exit(EXIT_FAILURE);
}
}
}
return NULL;
}
void runThreads(FILE* timeFp){
TIMER_T startTime, stopTime;
pthread_t producer_th;
pthread_t* workers = (pthread_t*) malloc((numberThreads) * sizeof(pthread_t));
TIMER_READ(startTime);
if (pthread_create(&producer_th, NULL, processInput, NULL)!= 0){
perror("Can't create thread: Producer");
exit(EXIT_FAILURE);
}
for(int i = 0; i < numberThreads; i++){
if (pthread_create(&workers[i], NULL, applyCommands, NULL)!= 0){
perror("Can't create thread: Consumers");
exit(EXIT_FAILURE);
}
}
for(int i = 0; i < numberThreads; i++)
if(pthread_join(workers[i], NULL))
perror("Can't join thread: Consumers");
if(pthread_join(producer_th, NULL))
perror("Can't join thread: Producer");
TIMER_READ(stopTime);
fprintf(timeFp, "TecnicoFS completed in %.4f seconds.\n", TIMER_DIFF_SECONDS(startTime, stopTime));
free(workers);
}
void init_variables(){
mutex_init(&commandsLock);
sem_init(&sem_prod,0,MAX_COMMANDS);
sem_init(&sem_cons,0,0);
}
void destroy_variables(){
mutex_destroy(&commandsLock);
sem_destroy(&sem_cons);
sem_destroy(&sem_prod);
}
int main(int argc, char* argv[]) {
parseArgs(argc, argv);
FILE * outputFp = openOutputFile();
init_variables();
fs = new_tecnicofs(hashMax);
printf("Sleeping..\n");
sleep(sleepTime);
printf("Awake..\n");
runThreads(stdout);
print_tecnicofs_tree(outputFp, fs);
fflush(outputFp);
fclose(outputFp);
destroy_variables();
free_tecnicofs(fs);
exit(EXIT_SUCCESS);
}
| aecbd21417a12fd03ac6b959135d847001458406 | [
"Markdown",
"C",
"Makefile",
"Shell"
] | 26 | C | nelsontr/SO_2019-2020 | 41b1c410be3a13c74e34ca45cdf888ba8b163f93 | 3fc083306d96521c1e4cfcab04ae6ba55dc33e8e | |
refs/heads/master | <repo_name>JSDiez/ThemePRO<file_sep>/js/onetone.js
//top menu
jQuery(".site-navbar,.home-navbar").click(function(){
jQuery(".top-nav").toggle();
});
jQuery('.top-nav ul li').hover(function(){
jQuery(this).find('ul:first').slideDown(100);
jQuery(this).addClass("hover");
},function(){
jQuery(this).find('ul').css('display','none');
jQuery(this).removeClass("hover");
});
jQuery('.top-nav li ul li:has(ul)').find("a:first").append(" <span class='menu_more'>»</span> ");
jQuery(".top-nav > ul > li,.main-nav > li").click(function(){
jQuery(".top-nav > ul > li,.main-nav > li").removeClass("active");
jQuery(this).addClass("active");
});
//
////
var windowWidth = jQuery(window).width();
if(windowWidth > 939){
if(jQuery(".site-main .sidebar").height() > jQuery(".site-main .main-content").height()){
jQuery(".site-main .main-content").css("height",(jQuery(".site-main .sidebar").height()+140)+"px");
}
}else{
jQuery(".site-main .main-content").css("height","auto");
}
jQuery(window).resize(function() {
var windowWidth = jQuery(window).width();
if(windowWidth > 939){
if(jQuery(".site-main .sidebar").height() > jQuery(".site-main .main-content").height()){
jQuery(".site-main .main-content").css("height",(jQuery(".site-main .sidebar").height()+140)+"px");
}
} else{
jQuery(".site-main .main-content").css("height","auto");
}
if(windowWidth > 919){
jQuery(".top-nav").show();
}else{
jQuery(".top-nav").hide();
}
});
// sticky menu
(function($){
$.fn.sticky = function( options ) {
// adding a class to users div
$(this).addClass('sticky-header');
var settings = $.extend({
'scrollSpeed ' : 500
}, options);
////// get homepage sections
var sections = [];
jQuery(".top-nav .onetone-menuitem > a").each(function() {
linkHref = $(this).attr('href').split('#')[1];
$target = $('#' + linkHref);
if($target.length) {
topPos = $target.offset().top;
sections[linkHref] = Math.round(topPos);
}
});
//////////
return $('.sticky-header .home-navigation ul li.onetone-menuitem a').each( function() {
if ( settings.scrollSpeed ) {
var scrollSpeed = settings.scrollSpeed
}
if( $("body.admin-bar").length){
if( $(window).width() < 765) {
stickyTop = 46;
} else {
stickyTop = 32;
}
}
else{
stickyTop = 0;
}
$(this).css({'top':stickyTop});
var stickyMenu = function(){
var scrollTop = $(window).scrollTop();
if (scrollTop > 111) {
$('.sticky-header').css({ 'position': 'fixed'}).addClass('fxd');
} else {
$('.sticky-header').css({ 'position': 'static' }).removeClass('fxd');
}
//// set nav menu active status
var returnValue = null;
var windowHeight = Math.round($(window).height() * 0.3);
for(var section in sections) {
if((sections[section] - windowHeight) < scrollTop) {
position = section;
}
}
if( typeof position !== "undefined" && position !== null ) {
jQuery(".home-navigation .onetone-menuitem ").removeClass("current");
jQuery(".home-navigation .onetone-menuitem ").find('a[href$="#' + position + '"]').parent().addClass("current");;
}
////
};
stickyMenu();
$(window).scroll(function() {
stickyMenu();
});
$(this).on('click', function(e){
var selectorHeight = $('.sticky-header').height();
e.preventDefault();
var id = $(this).attr('href');
if(typeof $('section'+ id).offset() !== 'undefined'){
if( $("header").css("position") === "static")
goTo = $(id).offset().top - 2*selectorHeight;
else
goTo = $(id).offset().top - selectorHeight;
$("html, body").animate({ scrollTop: goTo }, scrollSpeed);
}
});
});
}
})(jQuery);
jQuery(document).ready(function($){
//slider
if(jQuery("section.homepage-slider .item").length >1 ){
jQuery("#onetone-owl-slider").owlCarousel({
navigation : false, // Show next and prev buttons
slideSpeed : 300,
items:1,
autoplay:true,
margin:0,
loop:true,
paginationSpeed : 400,
singleItem:true,
autoplayTimeout:parseInt(onetone_params.slideSpeed)
});
}
if(jQuery("section.homepage-slider .item").length ==1 ){
jQuery("section.homepage-slider .owl-carousel").show();
}
$(".site-nav-toggle").click(function(){
$(".site-nav").toggle();
});
// retina logo
if( window.devicePixelRatio > 1 ){
if($('.normal_logo').length && $('.retina_logo').length){
$('.normal_logo').hide();
$('.retina_logo').show();
}
//
$('.page-title-bar').addClass('page-title-bar-retina');
}
//video background
var myPlayer;
$(function () {
myPlayer = $("#onetone-youtube-video").YTPlayer();
});
// BACK TO TOP
$(window).scroll(function(){
if($(window).scrollTop() > 300){
$("#back-to-top").fadeIn(200);
} else{
$("#back-to-top").fadeOut(200);
}
});
$('#back-to-top, .back-to-top').click(function() {
$('html, body').animate({ scrollTop:0 }, '800');
return false;
});
/* ------------------------------------------------------------------------ */
/* parallax background image */
/* ------------------------------------------------------------------------ */
$('.onetone-parallax').parallax("50%", 0.1);
// parallax scrolling
if( $('.parallax-scrolling').length ){
$('.parallax-scrolling').parallax({speed : 0.15});
}
//woocommerce
$(document).on('click','.onetone-quantity .minus',function(){
var qtyWrap = $(this).parent('.quantity');
var quantity = parseInt(qtyWrap.find('.qty').val());
var min_num = parseInt(qtyWrap.find('.qty').attr('min'));
var max_num = parseInt(qtyWrap.find('.qty').attr('max'));
var step = parseInt(qtyWrap.find('.qty').attr('step'));
$('input[name="update_cart"]').removeAttr("disabled");
if( quantity > min_num){
quantity = quantity - step;
if( quantity > 0 )
qtyWrap.find('.qty').val(quantity);
}
});
$(document).on('click','.onetone-quantity .plus',function(){
var qtyWrap = $(this).parent('.quantity');
var quantity = parseInt(qtyWrap.find('.qty').val());
var min_num = parseInt(qtyWrap.find('.qty').attr('min'));
var max_num = parseInt(qtyWrap.find('.qty').attr('max'));
var step = parseInt(qtyWrap.find('.qty').attr('step'));
$('input[name="update_cart"]').removeAttr("disabled");
if( max_num ){
if( quantity < max_num ){
quantity = quantity + step;
qtyWrap.find('.qty').val(quantity);
}
}else{
quantity = quantity + step;
qtyWrap.find('.qty').val(quantity);
}
});
$('.variations_form .single_add_to_cart_button').prepend('<i class="fa fa-shopping-cart"></i> ');
/* ------------------------------------------------------------------------ */
/* sticky header */
/* ------------------------------------------------------------------------ */
jQuery(window).scroll(function(){
if( jQuery("body.admin-bar").length ){
if( jQuery(window).width() < 765 ) {
stickyTop = 46;
} else {
stickyTop = 32;
}
}else{
stickyTop = 0;
}
var scrollTop = $(window).scrollTop();
if (scrollTop > 200) {
$('.fxd-header').css({'top':stickyTop}).show();
$('header').addClass('fixed-header');
}else{
$('.fxd-header').hide();
$('header').removeClass('fixed-header');
}
});
// scheme
if( typeof onetone_params.primary_color !== 'undefined' && onetone_params.primary_color !== '' ){
less.modifyVars({
'@color-main': onetone_params.primary_color
});
}
/* ------------------------------------------------------------------------ */
/* sticky header */
/* ------------------------------------------------------------------------ */
$(document).on('click',"header .main-header .site-nav ul a[href^='#'],a.scroll,.onetone-nav a[href^='#']", function(e){
if($("body.admin-bar").length){
if($(window).width() < 765) {
stickyTop = 46;
} else {
stickyTop = 32;
}
}
else{
stickyTop = 0;
}
var selectorHeight = 0;
if( $('.fxd-header').length )
var selectorHeight = $('.fxd-header').height();
if($(window).width() <= 919) {
$(".site-nav").hide();
}
var scrollTop = $(window).scrollTop();
e.preventDefault();
var id = $(this).attr('href');
if(typeof $(id).offset() !== 'undefined'){
var goTo = $(id).offset().top - 2*selectorHeight - stickyTop + 1;
$("html, body").animate({ scrollTop: goTo }, 1000);
}
});
$('header .fxd-header .site-nav ul').onePageNav({filter: 'a[href^="#"]',scrollThreshold:0.3});
/* ------------------------------------------------------------------------ */
/* smooth scrolling btn */
/* ------------------------------------------------------------------------ */
$("div.page a[href^='#'],div.post a[href^='#'],div.home-wrapper a[href^='#'],.banner-scroll a[href^='#'], .go-to-item").on('click', function(e){
var selectorHeight = $('header').height();
var scrollTop = $(window).scrollTop();
e.preventDefault();
var id = $(this).attr('href');
if(typeof $(id).offset() !== 'undefined'){
var goTo = $(id).offset().top - selectorHeight;
$("html, body").animate({ scrollTop: goTo }, 1000);
}
});
//portfolio carousel
if($("#related-portfolio").length){
$("#related-portfolio").owlCarousel({
navigation : false, // Show next and prev buttons
pagination: false,
items:4,
slideSpeed : 300,
paginationSpeed : 400,
singleItem:false,
autoPlay:parseInt(onetone_params.slideSpeed)
});
}
// portfolio filter
jQuery(function ($) {
var filterList = {
init: function () {
// MixItUp plugin
// http://mixitup.io
$('.portfolio-list-filter .portfolio-list-items').mixitup({
targetSelector: '.portfolio-box-wrap',
filterSelector: '.filter',
effects: ['fade'],
easing: 'snap',
// call the hover effect
onMixEnd: filterList.hoverEffect()
});
},
hoverEffect: function () {
}
};
// Run the show!
filterList.init();
});
$('iframe').each(function(){
if( typeof $(this).attr('width') !=='undefined' && typeof $(this).attr('height') !=='undefined'){
if( $(this).attr('width') > $(this).outerWidth() ){
var iframe_height = $(this).attr('height')*$(this).outerWidth()/$(this).attr('width');
$(this).css({'height':iframe_height});
}
}
});
//shop carousel
if($(".woocommerce.single-product .thumbnails").length){
$(".woocommerce.single-product .thumbnails").owlCarousel({
navigation : true, // Show next and prev buttons
pagination: false,
items:4,
navigationText : ['<i class="fa fa-angle-double-left"></i>', '<i class="fa fa-angle-double-right"></i>'],
slideSpeed : 300,
paginationSpeed : 400,
singleItem:false
});
}
//woo
$(".product-image").each(function() {
$(this).hover(function() {
if($(this).find('.product-image-back img').length){
$(this).find('.product-image-front').css({'opacity':'0'});
}
},
function() {
$(this).find('.product-image-front').css({'opacity':'1'});
});
});
//masonry
// portfolio
$('.onetone-masonry').masonry({
// options
itemSelector : '.portfolio-box-wrap'
});
// blog
$('.blog-grid').masonry({
// options
itemSelector : '.entry-box-wrap'
});
var timeline_row_width;
$('.magee-blog .blog-timeline-wrap .entry-box-wrap').each(function(){
var wrap_width = $(this).parents('.blog-timeline-wrap').innerWidth();
timeline_row_width = timeline_row_width+$(this).outerWidth();
if( 2*timeline_item_width >= wrap_width){
$(this).removeClass('timeline-left').addClass('timeline-right');
}else{
$(this).removeClass('timeline-right').addClass('timeline-left');
}
});
//prettyPhoto
$("a[rel^='portfolio-image']").prettyPhoto();
// gallery lightbox
$(".gallery .gallery-item a").prettyPhoto({animation_speed:'fast',slideshow:10000, hideflash: true});
/* ------------------------------------------------------------------------ */
/* parallax background image */
/* ------------------------------------------------------------------------ */
$('.onetone-parallax').parallax("50%", 0.1);
/* ------------------------------------------------------------------------ */
/* Section Heading Color */
/* ------------------------------------------------------------------------ */
$('section').each(function(){
var headingcolor = $(this).data("headingcolor");
if(headingcolor != ""){
$(this).find("h1,h2,h3,h4,h5,h6").css("color",headingcolor);
}
});
$(".section-banner").each(function(){
var videoHeight =$(window).height();
if( typeof onetone_params.header_cover_video_background !== 'undefined' && onetone_params.header_cover_video_background == '0'){
var videoHeight = videoHeight-$('.sticky-header').height();
}
if( typeof onetone_video !== 'undefined' && typeof onetone_video.header_cover_video_background_html5 !== 'undefined' && onetone_video.header_cover_video_background_html5 == '0'){
var videoHeight = videoHeight-$('.sticky-header').height();
$(this).find("#big-video-wrap").css({"position":"absolute"});
}
$(this).css({"min-height":videoHeight});
$(this).find("#tubular-container,#big-video-vid").css({"height":videoHeight});
});
//
if($(window).width() <1200){
newPercentage = (($(window).width() / 1200) * 100) + "%";
$(".home-banner .heading-inner").css({"font-size": newPercentage});
}
$(window).on("resize", function (){
if($(window).width() <1200){
newPercentage = (($(window).width() / 1200) * 100) + "%";
$(".home-banner .heading-inner").css({"font-size": newPercentage});
}else{
$(".home-banner .heading-inner").css({"font-size": "100%"});
}
});
// section fullheight
var win_height = $(window).height();
$("section.fullheight").each(function(){
var section_height = $(this).height();
if($(this).is('#home-header')) {
var win_height = $(window).height();
var navBar_height = $('.top-wrap').height();
var win_height = win_height - navBar_height;
}
$(this).css({'height':section_height,'min-height':win_height});
});
});
if(jQuery().waypoint && jQuery(window).width() > 919 ) {
jQuery('.onetone-animated').each(function(){
if(jQuery(this).data('imageanimation')==="yes"){
jQuery(this).find("img,i.fa").css("visibility","hidden");
}
else{
jQuery(this).css("visibility","hidden");
}
});
}
/* ------------------------------------------------------------------------ */
/* home page animation */
/* ------------------------------------------------------------------------ */
function onetone_animation(e){
e.css({'visibility':'visible'});
e.find("img,i.fa").css({'visibility':'visible'});
// this code is executed for each appeared element
var animation_type = e.data('animationtype');
var animation_duration = e.data('animationduration');
var image_animation = e.data('imageanimation');
if(image_animation === "yes"){
e.find("img,i.fa").addClass("animated "+animation_type);
if(animation_duration) {
e.find("img,i.fa").css('-moz-animation-duration', animation_duration+'s');
e.find("img,i.fa").css('-webkit-animation-duration', animation_duration+'s');
e.find("img,i.fa").css('-ms-animation-duration', animation_duration+'s');
e.find("img,i.fa").css('-o-animation-duration', animation_duration+'s');
e.find("img,i.fa").css('animation-duration', animation_duration+'s');
}
}else{
e.addClass("animated "+animation_type);
if(animation_duration) {
e.css('-moz-animation-duration', animation_duration+'s');
e.css('-webkit-animation-duration', animation_duration+'s');
e.css('-ms-animation-duration', animation_duration+'s');
e.css('-o-animation-duration', animation_duration+'s');
e.css('animation-duration', animation_duration+'s');
}
}
}
jQuery(window).load(function ($){
jQuery('.onetone-animated').each(function(){
if( jQuery(window).height() > jQuery(this).offset().top){
onetone_animation(jQuery(this));
}
});
});
var animated = false;
jQuery(window).scroll(function () {
if(jQuery(window).width() > 919 ){
if(jQuery().waypoint && animated == false ) {
jQuery('.onetone-animated').waypoint(function() {onetone_animation(jQuery(this));},{ triggerOnce: true, offset: '90%' });
}
animated = true;
}
});
/*
Plugin: jQuery Parallax
Version 1.1.3
Author: <NAME>
Twitter: @IanLunn
Author URL: http://www.ianlunn.co.uk/
Plugin URL: http://www.ianlunn.co.uk/plugins/jquery-parallax/
Dual licensed under the MIT and GPL licenses:
http://www.opensource.org/licenses/mit-license.php
http://www.gnu.org/licenses/gpl.html
*/
(function( $ ){
var $window = $(window);
var windowHeight = $window.height();
$window.resize(function () {
windowHeight = $window.height();
});
$.fn.parallax = function(xpos, speedFactor, outerHeight) {
var $this = $(this);
var getHeight;
var firstTop;
var paddingTop = 0;
//get the starting position of each element to have parallax applied to it
$this.each(function(){
firstTop = $this.offset().top;
});
if (outerHeight) {
getHeight = function(jqo) {
return jqo.outerHeight(true);
};
} else {
getHeight = function(jqo) {
return jqo.height();
};
}
// setup defaults if arguments aren't specified
if (arguments.length < 1 || xpos === null) xpos = "50%";
if (arguments.length < 2 || speedFactor === null) speedFactor = 0.1;
if (arguments.length < 3 || outerHeight === null) outerHeight = true;
// function to be called whenever the window is scrolled or resized
function update(){
var pos = $window.scrollTop();
$this.each(function(){
var $element = $(this);
var top = $element.offset().top;
var height = getHeight($element);
// Check if totally above or totally below viewport
if (top + height < pos || top > pos + windowHeight) {
return;
}
$this.css('backgroundPosition', xpos + " " + Math.round((firstTop - pos) * speedFactor) + "px");
});
}
$window.bind('scroll', update).resize(update);
update();
};
/* var ua = navigator.userAgent;
var vi = jQuery("#big-video-vid_html5_api");
if(vi.length){
if (ua.indexOf("iPhone") > 0) {
setTimeout(function(){
vi[0].play();
},1000);
}
else if (ua.indexOf("Android") > 0) {
vi[0].play();
if ( vi[0].currentTime){
vi[0].pause();
setTimeout(function(){
vi[0].play();
},1000)
}
}
}*/
// Detect province selected in Job´s page
function filterProvince(provinceVal) {
$(".job-item, .offers-list .clear").hide();
$(".job-item").each(function(){
if($(this).hasClass(provinceVal)){
$(this).fadeIn();
}
});
}
$("#location-job-filter select").change(function() {
var provinceVal = $("#location-job-filter :selected").val();
if(!provinceVal == "") {
filterProvince(provinceVal);
}
});
})(jQuery);
<file_sep>/includes/theme-setup.php
<?php
function onetone_setup(){
global $content_width;
$lang = get_template_directory(). '/languages';
load_theme_textdomain('onetone', $lang);
add_theme_support( 'post-thumbnails' );
$args = array();
$header_args = array(
'default-image' => '',
'default-repeat' => 'repeat',
'default-text-color' => 'CC9966',
'width' => 1120,
'height' => 80,
'flex-height' => true
);
// Enable support for Post Formats.
add_theme_support( 'post-formats', array( 'aside', 'image', 'video', 'quote', 'link', 'gallery', 'status', 'audio' ) );
add_theme_support( 'custom-background', $args );
add_theme_support( 'custom-header', $header_args );
add_theme_support( 'automatic-feed-links' );
add_theme_support('nav_menus');
add_theme_support( "title-tag" );
register_nav_menus( array(
'primary' => __( 'Primary Menu', 'onetone' ),
'home_menu' => __( 'Home Page Header Menu', 'onetone' ),
'top_bar_menu' => __( 'Top Bar Menu', 'onetone' ),
));
/*
* Switch default core markup for search form, comment form, and comments
* to output valid HTML5.
*/
add_theme_support( 'html5', array(
'search-form', 'comment-form', 'comment-list', 'gallery', 'caption',
) );
// Woocommerce Support
add_theme_support( 'woocommerce' );
add_editor_style("editor-style.css");
if ( ! isset( $content_width ) ) $content_width = 1120;
}
add_action( 'after_setup_theme', 'onetone_setup' );
function onetone_custom_scripts(){
global $page_meta,$post,$shop_style;
if($post){
$page_meta = get_post_meta( $post->ID ,'_onetone_post_meta');
}
if( isset($page_meta[0]) && $page_meta[0]!='' )
$page_meta = @json_decode( $page_meta[0],true );
$theme_info = wp_get_theme();
$detect = new Mobile_Detect;
$body_font = str_replace(''','\'', esc_attr(onetone_option('body_font')));
$standard_body_font = str_replace(''','\'',esc_attr(onetone_option('standard_body_font')));
$menu_font = str_replace(''','\'',esc_attr(onetone_option('menu_font')));
$standard_menu_font = str_replace(''','\'',esc_attr(onetone_option('standard_menu_font')));
$headings_font = str_replace(''','\'',esc_attr(onetone_option('headings_font')));
$standard_headings_font = str_replace(''','\'',esc_attr(onetone_option('standard_headings_font')));
$footer_headings_font = str_replace(''','\'',esc_attr(onetone_option('headings_font')));
$standard_footer_headings_font = str_replace(''','\'',esc_attr(onetone_option('standard_footer_headings_font')));
$button_font = str_replace(''','\'',esc_attr(onetone_option('button_font')));
$standard_button_font = str_replace(''','\'',esc_attr(onetone_option('standard_button_font')));
$shop_style = absint(onetone_option('shop_style',1));
if( $body_font ){
$body_font_url = str_replace(', ','|',$body_font);
wp_enqueue_style( 'google-fonts-'.sanitize_title($body_font), esc_url('//fonts.googleapis.com/css?family=' . str_replace(' ','+',$body_font_url), array(), '' ));
}
if( $menu_font ){
$menu_font_url = str_replace(', ','|',$menu_font);
wp_enqueue_style( 'google-fonts-'.sanitize_title($menu_font), esc_url('//fonts.googleapis.com/css?family=' . str_replace(' ','+',$menu_font_url), array(), '' ));
}
if( $headings_font ){
$headings_font_url = str_replace(', ','|',$headings_font);
wp_enqueue_style( 'google-fonts-'.sanitize_title($headings_font), esc_url('//fonts.googleapis.com/css?family=' . str_replace(' ','+',$headings_font_url), array(), '' ));
}
if( $footer_headings_font ){
$footer_headings_font_url = str_replace(', ','|',$footer_headings_font);
wp_enqueue_style( 'google-fonts-'.sanitize_title($footer_headings_font), esc_url('//fonts.googleapis.com/css?family=' . str_replace(' ','+',$footer_headings_font_url), array(), '' ));
}
if( $button_font ){
$button_font_url = str_replace(', ','|',$button_font);
wp_enqueue_style( 'google-fonts-'.sanitize_title($button_font), esc_url('//fonts.googleapis.com/css?family=' . str_replace(' ','+',$button_font_url), array(), '' ));
}
$google_fonts = onetone_option('google_fonts','');
if( trim($google_fonts) !='' ){
$google_fonts = str_replace(' ','+',trim($google_fonts));
wp_enqueue_style('google-fonts', esc_url('//fonts.googleapis.com/css?family='.$google_fonts), false, '', false );
}
wp_enqueue_style('font-awesome', get_template_directory_uri() .'/plugins/font-awesome/css/font-awesome.min.css', false, '4.3.0', false);
wp_enqueue_style('bootstrap', get_template_directory_uri() .'/plugins/bootstrap/css/bootstrap.min.css', false, '3.3.4', false);
wp_enqueue_style( 'owl.carousel', get_template_directory_uri() .'/plugins/owl-carousel/assets/owl.carousel.css', false, '2.0.0', false );
wp_enqueue_style('owl-theme', get_template_directory_uri() .'/css/owl.theme.css', false, '1.3.3', false);
wp_enqueue_style('bigvideo', get_template_directory_uri() .'/css/bigvideo.css', false, '1.3.3', false);
wp_enqueue_style('prettyPhoto', get_template_directory_uri() .'/css/prettyPhoto.css', false, '3.1.5', false);
wp_enqueue_style( 'onetone-main', get_stylesheet_uri(), array(), $theme_info->get( 'Version' ) );
wp_enqueue_style('onetone-onetone-shortcodes', get_template_directory_uri() .'/css/onetone-shortcodes.css', false, $theme_info->get( 'Version' ), false);
wp_enqueue_style('onetone-onetone', get_template_directory_uri() .'/css/onetone.css', false, $theme_info->get( 'Version' ), false);
if ( class_exists( 'WooCommerce' ) ) {
wp_enqueue_style('onetone-woocommerce', get_template_directory_uri() .'/css/woo'.$shop_style.'.css', false, $theme_info->get( 'Version' ), false);
}
wp_enqueue_style('onetone-ms', get_template_directory_uri() .'/css/onetone-ms.css', false, $theme_info->get( 'Version' ), false);
// Jesús: Disable less theme file
//wp_enqueue_style('onetone-scheme', get_template_directory_uri() .'/css/scheme.less', array('magee-shortcode'), $theme_info->get( 'Version' ), false);
wp_enqueue_style('onetone-home', get_template_directory_uri() .'/css/home.css', false, $theme_info->get( 'Version' ), false);
if ( is_rtl() ) {
wp_enqueue_style('onetone-rtl', get_template_directory_uri() .'/rtl.css', false, $theme_info->get( 'Version' ), false);
}
$background_array = onetone_option("page_background");
$background = onetone_get_background($background_array);
$header_image = get_header_image();
$onetone_custom_css = "";
if (isset($header_image) && ! empty( $header_image )) {
$onetone_custom_css .= ".home-header{background:url(".$header_image. ") repeat;}\n";
}
if ( 'blank' != get_header_textcolor() && '' != get_header_textcolor() ){
$header_color = ' color:#' . get_header_textcolor() . ';';
$onetone_custom_css .= 'header .site-name,header .site-description,header .site-tagline{'.$header_color.'}';
}
else{
$onetone_custom_css .= 'header .site-name,header .site-description,header .site-tagline{display:none;}';
}
$custom_css = onetone_option("custom_css");
$onetone_custom_css .= '.site{'.$background.'}';
$links_color = onetone_option( 'links_color','#963');
//scheme
$primary_color = esc_attr(onetone_option('primary_color',$links_color));
$links_color = onetone_option( 'links_color');
//if($links_color == "" || $links_color == null)
//$links_color = "#963";
if($links_color )
$onetone_custom_css .= '.entry-content a,.home-section-content a{color:'.$links_color.' ;}';
$top_menu_font_color = onetone_option( 'font_color');
if($top_menu_font_color !="" && $top_menu_font_color!=null){
$onetone_custom_css .= 'header .site-nav > ul > li > a {color:'.$top_menu_font_color.'}';
}
// header
$sticky_header_background_color = esc_attr(onetone_option('sticky_header_background_color',''));
$sticky_header_background_opacity = esc_attr(onetone_option('sticky_header_background_opacity','1'));
$header_background_color = esc_attr(onetone_option('header_background_color',''));
$header_background_opacity = esc_attr(onetone_option('header_background_opacity','1'));
$header_border_color = esc_attr(onetone_option('header_border_color',''));
$page_title_bar_background_color = esc_attr(onetone_option('page_title_bar_background_color',''));
$page_title_bar_borders_color = esc_attr(onetone_option('page_title_bar_borders_color',''));
// sticky header background
if($sticky_header_background_color){
$rgb = onetone_hex2rgb( $sticky_header_background_color );
$onetone_custom_css .= ".fxd-header {
background-color: rgba(".$rgb[0].",".$rgb[1].",".$rgb[2].",".$sticky_header_background_opacity.");
}";
}
// main header background
if( $header_background_color ){
$rgb = onetone_hex2rgb( $header_background_color );
$onetone_custom_css .= ".main-header {
background-color: rgba(".$rgb[0].",".$rgb[1].",".$rgb[2].",".$header_background_opacity.");
}";
}
// sticky header
$sticky_header_opacity = onetone_option('sticky_header_background_opacity','1');
$sticky_header_menu_item_padding = onetone_option('sticky_header_menu_item_padding','');
$sticky_header_navigation_font_size = onetone_option('sticky_header_navigation_font_size','');
$sticky_header_logo_width = onetone_option('sticky_header_logo_width','');
$logo_left_margin = onetone_option('logo_left_margin','');
$logo_right_margin = onetone_option('logo_right_margin','');
$logo_top_margin = onetone_option('logo_top_margin','');
$logo_bottom_margin = onetone_option('logo_bottom_margin','');
if( $sticky_header_background_color ){
$rgb = onetone_hex2rgb( $sticky_header_background_color );
$onetone_custom_css .= ".fxd-header{background-color: rgba(".$rgb[0].",".$rgb[1].",".$rgb[2].",".esc_attr($sticky_header_opacity).");}\r\n";
}
if( $sticky_header_menu_item_padding )
$onetone_custom_css .= ".fxd-header .site-nav > ul > li > a {padding:".absint($sticky_header_menu_item_padding)."px;}\r\n";
if( $sticky_header_navigation_font_size )
$onetone_custom_css .= ".fxd-header .site-nav > ul > li > a {font-size:".absint($sticky_header_navigation_font_size)."px;}\r\n";
if( $sticky_header_logo_width )
$onetone_custom_css .= ".fxd-header img.site-logo{ width:".absint($sticky_header_logo_width)."px;}\r\n";
if( $logo_left_margin )
$onetone_custom_css .= ".fxd-header img.site-logo{ margin-left:".absint($logo_left_margin)."px;}\r\n";
if( $logo_right_margin )
$onetone_custom_css .= ".fxd-header img.site-logo{ margin-right:".absint($logo_right_margin)."px;}\r\n";
if( $logo_top_margin )
$onetone_custom_css .= ".fxd-header img.site-logo{ margin-top:".absint($logo_top_margin)."px;}\r\n";
if( $logo_bottom_margin )
$onetone_custom_css .= ".fxd-header img.site-logo{ margin-bottom:".absint($logo_bottom_margin)."px;}\r\n";
// top bar
$display_top_bar = onetone_option('display_top_bar','yes');
$top_bar_background_color = onetone_option('top_bar_background_color','');
$top_bar_info_color = onetone_option('top_bar_info_color','');
$top_bar_menu_color = onetone_option('top_bar_menu_color','');
if( $top_bar_background_color )
$onetone_custom_css .= ".top-bar{background-color:".$top_bar_background_color.";}";
if( $display_top_bar == 'yes' )
$onetone_custom_css .= ".top-bar{display:block;}";
if( $top_bar_info_color )
$onetone_custom_css .= ".top-bar-info{color:".$top_bar_info_color.";}";
if( $top_bar_menu_color )
$onetone_custom_css .= ".top-bar ul li a{color:".$top_bar_menu_color.";}";
// Header background
$header_background_image = onetone_option('header_background_image','');
$header_background_full = onetone_option('header_background_full','');
$header_background_repeat = onetone_option('header_background_repeat','');
$header_background_parallax = onetone_option('header_background_parallax','');
$header_background = '';
if( $header_background_image ){
$header_background .= "header .main-header{\r\n";
$header_background .= "background-image: url(".esc_url($header_background_image).");\r\n";
if( $header_background_full == 'yes' )
$header_background .= "-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;\r\n";
if( $header_background_parallax == 'no' )
$header_background .= "background-repeat:".$header_background_repeat.";";
if( $header_background_parallax == 'yes' )
$header_background .= "background-attachment: fixed;
background-position:top center;
background-repeat: no-repeat;";
$header_background .= "}\r\n";
}
$onetone_custom_css .= $header_background;
// Header Padding
$header_top_padding = onetone_option('header_top_padding','');
$header_bottom_padding = onetone_option('header_bottom_padding','');
if( $header_top_padding )
$onetone_custom_css .= ".site-nav > ul > li > a{padding-top:".$header_top_padding."}";
if( $header_bottom_padding )
$onetone_custom_css .= ".site-nav > ul > li > a{padding-bottom:".$header_bottom_padding."}";
// page title bar
$page_title_bar_top_padding = esc_attr(onetone_option('page_title_bar_top_padding','210px'));
$page_title_bar_bottom_padding = esc_attr(onetone_option('page_title_bar_bottom_padding','160px'));
$page_title_bar_mobile_top_padding = esc_attr(onetone_option('page_title_bar_mobile_top_padding','70px'));
$page_title_bar_mobile_bottom_padding = esc_attr(onetone_option('page_title_bar_mobile_bottom_padding','50px'));
$page_title_bar_background_img = esc_url(onetone_option('page_title_bar_background',''));
$page_title_bar_retina_background = esc_url(onetone_option('page_title_bar_retina_background',''));
$page_title_bg_full = esc_attr(onetone_option('page_title_bg_full','no'));
$page_title_bg_parallax = esc_attr(onetone_option('page_title_bg_parallax','no'));
$page_title_bar_background = '';
if( $page_title_bar_background_img ){
$page_title_bar_background .= ".page-title-bar{\r\n";
$page_title_bar_background .= "background-image: url(".esc_url($page_title_bar_background_img).");\r\n";
if( $page_title_bg_full == 'yes' )
$page_title_bar_background .= "-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;\r\n";
if( $header_background_parallax == 'no' )
$page_title_bar_background .= "background-repeat:".$header_background_repeat.";";
if( $page_title_bg_parallax == 'yes' )
$page_title_bar_background .= "background-attachment: fixed;
background-position:top center;
background-repeat: no-repeat;";
$page_title_bar_background .= "}\r\n";
}
$onetone_custom_css .= $page_title_bar_background ;
$page_title_bar_background = '';
if( $page_title_bar_retina_background ){
$page_title_bar_background .= ".page-title-bar-retina{\r\n";
$page_title_bar_background .= "background-image: url(".esc_url($page_title_bar_retina_background).") !important;\r\n";
if( $page_title_bg_full == 'yes' )
$page_title_bar_background .= "-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;\r\n";
if( $header_background_parallax == 'no' )
$page_title_bar_background .= "background-repeat:".$header_background_repeat.";";
if( $page_title_bg_parallax == 'yes' )
$page_title_bar_background .= "background-attachment: fixed;
background-position:top center;
background-repeat: no-repeat;";
$page_title_bar_background .= "}\r\n";
}
$onetone_custom_css .= $page_title_bar_background ;
if( $detect->isMobile() ){
$onetone_custom_css .= ".page-title-bar{
padding-top:".$page_title_bar_mobile_top_padding .";
padding-bottom:".$page_title_bar_mobile_bottom_padding .";
}";
}else{
$onetone_custom_css .= ".page-title-bar{
padding-top:".$page_title_bar_top_padding .";
padding-bottom:".$page_title_bar_bottom_padding .";
}";
}
//background
$content_background_color = esc_attr(onetone_option('content_background_color',''));
$sidebar_background_color = esc_attr(onetone_option('sidebar_background_color',''));
$footer_background_color = esc_attr(onetone_option('footer_background_color',''));
$copyright_background_color = esc_attr(onetone_option('copyright_background_color',''));
// content backgroud color
if( $content_background_color )
$onetone_custom_css .= ".col-main {background-color:".$content_background_color.";}";
if( $sidebar_background_color )
$onetone_custom_css .= ".col-aside-left,.col-aside-right {background-color:".$sidebar_background_color.";}";
//footer background
if( $footer_background_color )
$onetone_custom_css .= "footer .footer-widget-area{background-color:".$footer_background_color.";}";
if( $copyright_background_color )
$onetone_custom_css .= "footer .footer-info-area{background-color:".$copyright_background_color."}";
// Element Colors
$form_background_color = esc_attr(onetone_option('form_background_color',''));
$form_text_color = esc_attr(onetone_option('form_text_color',''));
$form_border_color = esc_attr(onetone_option('form_border_color',''));
if( $form_background_color )
$onetone_custom_css .= "footer input,footer textarea{background-color:".$form_background_color.";}";
if( $form_text_color )
$onetone_custom_css .= "footer input,footer textarea{color:".$form_text_color.";}";
if( $form_border_color )
$onetone_custom_css .= "footer input,footer textarea{border-color:".$form_border_color.";}";
// body font
if( $body_font ){
$onetone_custom_css .= "body{
font-family:".$body_font.";
}\r\n";
}else{
if( $standard_body_font ){
$onetone_custom_css .= "body{
font-family:".$standard_body_font.";
}\r\n";
}
}
// menu font
if( $menu_font ){
$onetone_custom_css .= "#menu-main li a span{
font-family:".$menu_font.";
}\r\n";
}else{
if( $standard_menu_font ){
$onetone_custom_css .= "#menu-main li a span{
font-family:".$standard_menu_font.";
}\r\n";
}
}
// headings font
if( $headings_font ){
$onetone_custom_css .= "h1,h2,h3,h4,h5,h6{
font-family:".$headings_font.";
}\r\n";
}else{
if( $standard_headings_font ){
$onetone_custom_css .= "h1,h2,h3,h4,h5,h6{
font-family:".$standard_headings_font.";
}\r\n";
}
}
// footer headings font
if( $headings_font ){
$onetone_custom_css .= "footer h1,footer h2,footer h3,footer h4,footer h5,footer h6{
font-family:".$headings_font.";
}\r\n";
}else{
if( $standard_headings_font ){
$onetone_custom_css .= "footer h1,footer h2,footer h3,footer h4,footer h5,footer h6{
font-family:".$standard_headings_font.";
}\r\n";
}
}
// button font
if( $button_font ){
$onetone_custom_css .= "a.btn-normal{
font-family:".$button_font.";
}\r\n";
}else{
if( $standard_button_font ){
$onetone_custom_css .= "a.btn-normal{
font-family:".$standard_button_font.";
}\r\n";
}
}
//Layout Options
$page_content_top_padding = esc_attr(onetone_option('page_content_top_padding',''));
$page_content_bottom_padding = esc_attr(onetone_option('page_content_bottom_padding',''));
$hundredp_padding = esc_attr(onetone_option('hundredp_padding',''));
$sidebar_padding = esc_attr(onetone_option('sidebar_padding',''));
$column_top_margin = esc_attr(onetone_option('column_top_margin',''));
$column_bottom_margin = esc_attr(onetone_option('column_bottom_margin',''));
if( $page_content_top_padding )
$onetone_custom_css .= ".post-inner,.page-inner{padding-top:".$page_content_top_padding.";}";
if( $page_content_bottom_padding )
$onetone_custom_css .= ".post-inner,.page-inner{padding-bottom:".$page_content_bottom_padding.";}";
if( isset($page_meta['padding_top']) && $page_meta['padding_top'] !='' )
$onetone_custom_css .= ".post-inner,.page-inner{padding-top:".esc_attr($page_meta['padding_top']).";}";
if( isset($page_meta['padding_bottom']) && $page_meta['padding_bottom'] !='' )
$onetone_custom_css .= ".post-inner,.page-inner{padding-bottom:".esc_attr($page_meta['padding_bottom']).";}";
if( $sidebar_padding )
$onetone_custom_css .= ".col-aside-left,.col-aside-right{padding:".$sidebar_padding.";}";
if( $column_top_margin )
$onetone_custom_css .= ".col-lg-1, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-md-1, .col-md-10, .col-md-11, .col-md-12, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-sm-1, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-xs-1, .col-xs-10, .col-xs-11, .col-xs-12, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9{margin-top:".$column_top_margin.";}";
if( $column_bottom_margin )
$onetone_custom_css .= ".col-lg-1, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-md-1, .col-md-10, .col-md-11, .col-md-12, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-sm-1, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-xs-1, .col-xs-10, .col-xs-11, .col-xs-12, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9{margin-bottom:".$column_bottom_margin.";}";
//fonts color
$header_tagline_color = esc_attr(onetone_option('header_tagline_color',''));
$page_title_color = esc_attr(onetone_option('page_title_color',''));
$h1_color = esc_attr(onetone_option('h1_color',''));
$h2_color = esc_attr(onetone_option('h2_color',''));
$h3_color = esc_attr(onetone_option('h3_color',''));
$h4_color = esc_attr(onetone_option('h4_color',''));
$h5_color = esc_attr(onetone_option('h5_color',''));
$h6_color = esc_attr(onetone_option('h6_color',''));
$body_text_color = esc_attr(onetone_option('body_text_color',''));
$link_color = esc_attr(onetone_option('link_color',''));
$breadcrumbs_text_color = esc_attr(onetone_option('breadcrumbs_text_color',''));
$sidebar_widget_headings_color = esc_attr(onetone_option('sidebar_widget_headings_color',''));
$footer_headings_color = esc_attr(onetone_option('footer_headings_color',''));
$footer_text_color = esc_attr(onetone_option('footer_text_color',''));
$footer_link_color = esc_attr(onetone_option('footer_link_color',''));
if( $header_tagline_color )
$onetone_custom_css .= ".site-tagline{color:".$header_tagline_color.";}";
if( $page_title_color )
$onetone_custom_css .= ".page-title h1{color:".$page_title_color.";}";
if( $h1_color )
$onetone_custom_css .= "h1{color:".$h1_color.";}";
if( $h2_color )
$onetone_custom_css .= "h2{color:".$h2_color.";}";
if( $h3_color )
$onetone_custom_css .= "h3{color:".$h3_color.";}";
if( $h4_color )
$onetone_custom_css .= "h4{color:".$h4_color.";}";
if( $h5_color )
$onetone_custom_css .= "h5{color:".$h5_color.";}";
if( $h6_color )
$onetone_custom_css .= "h6{color:".$h6_color.";}";
if( $body_text_color )
$onetone_custom_css .= ".entry-content,.entry-content p{color:".$body_text_color.";}";
if( $link_color )
$onetone_custom_css .= ".entry-summary a, .entry-content a{color:".$link_color.";}";
if( $breadcrumbs_text_color )
$onetone_custom_css .= ".breadcrumb-nav span,.breadcrumb-nav a{color:".$breadcrumbs_text_color.";}";
if( $sidebar_widget_headings_color )
$onetone_custom_css .= ".col-aside-left .widget-title,.col-aside-right .widget-title{color:".$sidebar_widget_headings_color.";}";
if( $footer_headings_color )
$onetone_custom_css .= ".footer-widget-area .widget-title{color:".$footer_headings_color.";}";
if( $footer_text_color )
$onetone_custom_css .= ".footer-widget-area,.footer-widget-area p,.footer-widget-area span{color:".$footer_text_color.";}";
if( $footer_link_color )
$onetone_custom_css .= ".footer-widget-area a{color:".$footer_link_color.";}";
//Main Menu Colors
$main_menu_background_color_1 = esc_attr(onetone_option('main_menu_background_color_1',''));
$main_menu_font_color_1 = esc_attr(onetone_option('main_menu_font_color_1',''));
$main_menu_font_hover_color_1 = esc_attr(onetone_option('main_menu_font_hover_color_1',''));
$main_menu_background_color_2 = esc_attr(onetone_option('main_menu_background_color_2',''));
$main_menu_font_color_2 = esc_attr(onetone_option('main_menu_font_color_2',''));
$main_menu_font_hover_color_2 = esc_attr(onetone_option('main_menu_font_hover_color_2',''));
$main_menu_separator_color_2 = esc_attr(onetone_option('main_menu_separator_color_2',''));
$woo_cart_menu_background_color = esc_attr(onetone_option('woo_cart_menu_background_color',''));
if( $main_menu_background_color_1 )
$onetone_custom_css .= ".main-header{background-color:".$main_menu_background_color_1.";}";
if( $main_menu_font_color_1 )
$onetone_custom_css .= "#menu-main > li > a {color:".$main_menu_font_color_1.";}";
if( $main_menu_font_hover_color_1 )
$onetone_custom_css .= "#menu-main > li > a:hover,#menu-main > li.current > a{color:".$main_menu_font_hover_color_1.";}";
if( $main_menu_background_color_2 )
$onetone_custom_css .= ".main-header .sub-menu{background-color:".$main_menu_background_color_2.";}";
if( $main_menu_font_color_2 )
$onetone_custom_css .= "#menu-main li li a{color:".$main_menu_font_color_2.";}";
if( $main_menu_font_hover_color_2 )
$onetone_custom_css .= "#menu-main li li a:hover{color:".$main_menu_font_hover_color_2.";}";
if( $main_menu_separator_color_2 )
$onetone_custom_css .= ".site-nav ul li li a{border-color:".$main_menu_separator_color_2." !important;}";
// footer
$footer_background_image = onetone_option('footer_background_image','');
$footer_bg_full = onetone_option('footer_bg_full','yes');
$footer_background_repeat = onetone_option('footer_background_repeat','');
$footer_background_position = onetone_option('footer_background_position','');
$footer_top_padding = onetone_option('footer_top_padding','');
$footer_bottom_padding = onetone_option('footer_bottom_padding','');
$copyright_top_padding = onetone_option('copyright_top_padding','');
$copyright_bottom_padding = onetone_option('copyright_bottom_padding','');
$footer_background = "";
if( $footer_background_image ){
$footer_background .= ".footer-widget-area{\r\n";
$footer_background .= "background-image: url(".esc_url($footer_background_image).");\r\n";
if( $footer_bg_full == 'yes' )
$footer_background .= "-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;\r\n";
$footer_background .= "background-repeat:".esc_attr($footer_background_repeat).";";
$footer_background .= "background-position:".esc_attr($footer_background_position).";";
$footer_background .= "}\r\n";
}
$onetone_custom_css .= $footer_background ;
$onetone_custom_css .= ".footer-widget-area{\r\n
padding-top:".$footer_top_padding.";\r\n
padding-bottom:".$footer_bottom_padding.";\r\n
}" ;
$onetone_custom_css .= ".footer-info-area{\r\n
padding-top:".$copyright_top_padding.";\r\n
padding-bottom:".$copyright_bottom_padding.";\r\n
}" ;
// home page sections
$section_title_css = '';
$section_content_css = '';
$video_background_section = onetone_option( 'video_background_section' );
for($i=0;$i<15;$i++):
$section_css = '';
$section_background = onetone_option( 'section_background_'.$i );
$background_size = onetone_option( 'background_size_'.$i );
$section_padding = onetone_option( 'section_padding_'.$i ,$i == 0?'':'50px 0');
$text_align = onetone_option( 'text_align_'.$i);
$parallax_scrolling = onetone_option( 'parallax_scrolling_'.$i );
$section_title_typography = onetone_option( 'section_title_typography_'.$i);
$title_typography = onetone_get_typography( $section_title_typography );
$section_content_typography = onetone_option( 'section_content_typography_'.$i);
$content_typography = onetone_get_typography( $section_content_typography );
if( $parallax_scrolling == "yes" || $parallax_scrolling == "1" ){
$section_css .= "background-attachment:fixed;background-position:50% 0;background-repeat:repeat;\r\n";
}
if( $background_size == "yes" ){
$section_css .= "-webkit-background-size: cover;-moz-background-size: cover;-o-background-size: cover;background-size: cover;\r\n";
}
if( $section_padding ){
$section_css .= "padding:".$section_padding.";\r\n";;
}
if( $video_background_section != ($i+1) || $detect->isMobile() || $detect->isTablet() )
$section_css .= onetone_get_background( $section_background );
$section_title_css .= "section.home-section-".($i+1)." .section-title{text-align:center ;}\r\n";
if( $title_typography )
$section_title_css .= "section.home-section-".($i+1)." .section-title{".$title_typography."}\r\n";
if( $content_typography )
$section_content_css .= "section.home-section-".($i+1)." .home-section-content,section.home-section-".($i+1)." .home-section-content p{".$content_typography."}\r\n";
if( $text_align )
$section_content_css .= "section.home-section-".($i+1)." .home-section-content{text-align:".$text_align."}\r\n";
$section_content_css .= "section.home-section-".($i+1)." {".$section_css."}\r\n";
endfor;
$onetone_custom_css .= $section_title_css;
$onetone_custom_css .= $section_content_css;
$onetone_custom_css .= $custom_css;
wp_add_inline_style( 'onetone-main', $onetone_custom_css );
wp_enqueue_style( 'onetone-bigvideo', get_template_directory_uri().'/plugins/YTPlayer/css/jquery.mb.YTPlayer.min.css','', '', true );
wp_enqueue_script( 'onetone-bigvideo', get_template_directory_uri().'/plugins/YTPlayer/jquery.mb.YTPlayer.js', array( 'jquery' ), '', true );
wp_enqueue_script( 'modernizr', get_template_directory_uri().'/plugins/modernizr.custom.js', array( 'jquery' ), '2.8.2', false );
wp_enqueue_script( 'bootstrap', get_template_directory_uri().'/plugins/bootstrap/js/bootstrap.min.js', array( 'jquery' ), '3.3.4', false );
wp_enqueue_script( 'nav', get_template_directory_uri().'/plugins/jquery.nav.js', array( 'jquery' ), '1.4.14 ', false );
wp_enqueue_script( 'scrollTo', get_template_directory_uri().'/plugins/jquery.scrollTo.js', array( 'jquery' ), '1.4.14', false );
wp_enqueue_script( 'parallax', get_template_directory_uri().'/plugins/jquery.parallax-1.1.3.js', array( 'jquery' ), '1.1.3', true );
wp_enqueue_script( 'respond', get_template_directory_uri().'/plugins/respond.min.js', array( 'jquery' ), '', true );
wp_enqueue_script( 'less', get_template_directory_uri().'/plugins/less.min.js', array( 'jquery' ), '2.5.1', true );
wp_enqueue_script( 'prettyPhoto', get_template_directory_uri().'/plugins/jquery.prettyPhoto.js', array( 'jquery' ), '3.1.5', true );
wp_enqueue_script( 'masonry', get_template_directory_uri() . '/plugins/jquery-masonry/jquery.masonry.min.js', array( 'jquery' ), null, true );
wp_enqueue_script( 'waypoints', get_template_directory_uri() . '/plugins/jquery.waypoints.js', array( 'jquery' ), null, true );
wp_enqueue_script( 'easing', get_template_directory_uri() . '/plugins/jquery.easing.min.js' , array( 'jquery' ), null, true);
wp_enqueue_script( 'owl.carousel', get_template_directory_uri() . '/plugins/owl-carousel/owl.carousel.min.js', array( 'jquery' ), null, true );
wp_enqueue_script( 'smoothscroll', get_template_directory_uri() . '/plugins/smoothscroll.js', array( 'jquery' ), '0.9.9', true );
wp_enqueue_script( 'mixitup', get_template_directory_uri() . '/plugins/jquery.mixitup.min.js', array( 'jquery' ), '', true );
wp_enqueue_script( 'onetone-default', get_template_directory_uri().'/js/onetone.js', array( 'jquery' ),$theme_info->get( 'Version' ), true );
if ( is_singular() && comments_open() && get_option( 'thread_comments' ) ){wp_enqueue_script( 'comment-reply' );}
$slide_time = onetone_option("slide_time");
$slide_time = is_numeric($slide_time)?$slide_time:"5000";
$isMobile = 0;
if( $detect->isMobile() && !$detect->isTablet() ){
$isMobile = 1;
}
$sticky_header = esc_attr(onetone_option('enable_sticky_header','yes'));
wp_localize_script( 'onetone-default', 'onetone_params', array(
'ajaxurl' => admin_url('admin-ajax.php'),
'themeurl' => get_template_directory_uri(),
'slideSpeed' => $slide_time,
'sticky_header' => $sticky_header,
'isMobile' =>$isMobile,
'primary_color' => $primary_color,
) );
}
function onetone_admin_scripts(){
global $pagenow ;
$theme_info = wp_get_theme();
wp_enqueue_script('media-upload');
wp_enqueue_script('thickbox');
wp_enqueue_style('thickbox');
wp_enqueue_style( 'onetone-admin', get_template_directory_uri().'/css/admin.css', false, $theme_info->get( 'Version' ), false);
wp_enqueue_style('magnific-popup', get_template_directory_uri() .'/css/magnific-popup.css', false, '0.9.9', false);
if( $pagenow == "post.php" || $pagenow == "post-new.php" || (isset($_GET['page']) && $_GET['page'] == "onetone-options") ):
wp_enqueue_style('font-awesome', get_template_directory_uri() .'/plugins/font-awesome/css/font-awesome.min.css', false, '4.4.0', false);
wp_enqueue_style('onetone-options', get_template_directory_uri() .'/css/options.css', false, $theme_info->get( 'Version' ), false);
endif;
wp_enqueue_script( 'magnific-popup', get_template_directory_uri().'/plugins/jquery.magnific-popup.min.js', array( 'jquery' ), '0.9.9', true );
wp_enqueue_script( 'onetone-admin', get_template_directory_uri().'/js/admin.js', array( 'jquery' ), $theme_info->get( 'Version' ), false );
wp_localize_script( 'onetone-admin', 'onetone_params', array(
'ajaxurl' => admin_url('admin-ajax.php'),
'themeurl' => get_template_directory_uri(),
) );
}
add_action( 'wp_enqueue_scripts', 'onetone_custom_scripts' );
add_action( 'admin_enqueue_scripts', 'onetone_admin_scripts' );
function onetone_of_get_options($default = false) {
global $options_saved;
$options_saved = false;
//$optionsframework_settings = get_option(ONETONE_OPTIONS_PREFIXED.'optionsframework');
// Gets the unique option id
//$option_name = $optionsframework_settings['id'];
$option_name = optionsframework_option_name();
if ( get_option($option_name) ) {
$options = get_option($option_name);
$options_saved = true;
}
else{
$location = apply_filters( 'options_framework_location', array('includes/admin-options.php') );
if ( $optionsfile = locate_template( $location ) ) {
$maybe_options = require_once $optionsfile;
if ( is_array( $maybe_options ) ) {
$options = $maybe_options;
} else if ( function_exists( 'optionsframework_options' ) ) {
$options = optionsframework_options();
}
}
$options = apply_filters( 'of_options', $options );
$config = $options;
foreach ( (array) $config as $option ) {
if ( ! isset( $option['id'] ) ) {
continue;
}
if ( ! isset( $option['std'] ) ) {
continue;
}
if ( ! isset( $option['type'] ) ) {
continue;
}
$output[$option['id']] = apply_filters( 'of_sanitize_' . $option['type'], $option['std'], $option );
}
$options = $output;
}
if ( isset($options) ) {
return $options;
} else {
return $default;
}
}
global $onetone_options, $options_saved;
$onetone_options = onetone_of_get_options();
function onetone_option($name,$default=''){
global $onetone_options;
if(isset($onetone_options[$name]))
return $onetone_options[$name];
else
return $default;
}
function onetone_on_switch_theme(){
global $onetone_options;
$option_name = optionsframework_option_name();
// Import lite version options to pro version
if(!get_option( $option_name ) && get_option('onetone')){
add_option( $option_name ,get_option('onetone'));
}
/* if(!get_option($option_name)){
$config = array();
$output = array();
$location = apply_filters( 'options_framework_location', array('admin-options.php') );
if ( $optionsfile = locate_template( $location ) ) {
$maybe_options = require_once $optionsfile;
if ( is_array( $maybe_options ) ) {
$options = $maybe_options;
} else if ( function_exists( 'optionsframework_options' ) ) {
$options = optionsframework_options();
}
}
$options = apply_filters( 'of_options', $options );
$config = $options;
foreach ( (array) $config as $option ) {
if ( ! isset( $option['id'] ) ) {
continue;
}
if ( ! isset( $option['std'] ) ) {
continue;
}
if ( ! isset( $option['type'] ) ) {
continue;
}
$output[$option['id']] = apply_filters( 'of_sanitize_' . $option['type'], $option['std'], $option );
}
add_option($option_name,$output);
}
*/
//
$onetone_options = onetone_of_get_options();
}
add_action( 'after_setup_theme', 'onetone_on_switch_theme' );
add_action('after_switch_theme', 'onetone_on_switch_theme');
/*
* This is an example of how to add custom scripts to the options panel.
* This one shows/hides the an option when a checkbox is clicked.
*/
add_action('optionsframework_custom_scripts', 'onetone_optionsframework_custom_scripts');
function onetone_optionsframework_custom_scripts() {
}
add_filter('options_framework_location','onetone_options_framework_location_override');
function onetone_options_framework_location_override() {
return array('includes/admin-options.php');
}
function onetone_optionscheck_options_menu_params( $menu ) {
$menu['page_title'] = __( 'Onetone Options', 'onetone');
$menu['menu_title'] = __( 'Onetone Options', 'onetone');
$menu['menu_slug'] = 'onetone-options';
return $menu;
}
add_filter( 'optionsframework_menu', 'onetone_optionscheck_options_menu_params' );
/*
function onetone_wp_title( $title, $sep ) {
global $paged, $page;
if ( is_feed() )
return $title;
// Add the site name.
$title .= get_bloginfo( 'name' );
// Add the site description for the home/front page.
$site_description = get_bloginfo( 'description', 'display' );
if ( $site_description && ( is_home() || is_front_page() ) )
$title = "$title $sep $site_description";
// Add a page number if necessary.
if ( $paged >= 2 || $page >= 2 )
$title = "$title $sep " . sprintf( __( ' Page %s ', 'onetone' ), max( $paged, $page ) );
return $title;
}
add_filter( 'wp_title', 'onetone_wp_title', 10, 2 );
*/
function onetone_title( $title ) {
if ( $title == '' ) {
return __( 'Untitled', 'onetone');
} else {
return $title;
}
}
add_filter( 'the_title', 'onetone_title' );<file_sep>/includes/theme-widget.php
<?php
// global $wp_registered_sidebars;
#########################################
function onetone_widgets_init() {
global $sidebars ;
$sidebars = array(
'' => __( 'No Sidebar', 'onetone' ),
'default_sidebar' => __( 'Default Sidebar', 'onetone' ),
'displayed_everywhere' => __( 'Displayed Everywhere', 'onetone' ),
'post' => __( 'Post Sidebar', 'onetone' ),
'post_category' => __( 'Post Category Sidebar', 'onetone' ),
'portfolio' => __( 'Portfolio Sidebar', 'onetone' ),
'portfolio_category' => __( 'Portfolio Category Sidebar', 'onetone' ),
'shop' => __( 'Shop Sidebar', 'onetone' ),
'sidebar-1' => __( 'Sidebar 1', 'onetone' ),
'sidebar-2' => __( 'Sidebar 2', 'onetone' ),
'sidebar-3' => __( 'Sidebar 3', 'onetone' ),
'sidebar-4' => __( 'Sidebar 4', 'onetone' ),
'sidebar-5' => __( 'Sidebar 5', 'onetone' ),
'sidebar-5' => __( 'Sidebar 5', 'onetone' ),
'sidebar-6' => __( 'Sidebar 6', 'onetone' ),
'sidebar-7' => __( 'Sidebar 7', 'onetone' ),
'sidebar-8' => __( 'Sidebar 8', 'onetone' ),
'footer_widget_1' => __( 'Footer Area One', 'onetone' ),
'footer_widget_2' => __( 'Footer Area Two', 'onetone' ),
'footer_widget_3' => __( 'Footer Area Three', 'onetone' ),
'footer_widget_4' => __( 'Footer Area Four', 'onetone' ),
);
foreach( $sidebars as $k => $v ){
if( $k !='' ){
register_sidebar(array(
'name' => $v,
'id' => $k,
'before_widget' => '<div id="%1$s" class="widget widget-box %2$s">',
'after_widget' => '<span class="seperator extralight-border"></span></div>',
'before_title' => '<h2 class="widget-title">',
'after_title' => '</h2>'
));
}
}
}
add_action( 'widgets_init', 'onetone_widgets_init' );<file_sep>/home-sections/section-youtube-video.php
<?php
$video_background_section = onetone_option( 'video_background_section' );
$i = $video_background_section-1 ;
$video_controls = onetone_option( 'video_controls' );
$section_background_video = onetone_option( 'section_background_video_0' );
$youtube_bg_type = onetone_option("youtube_bg_type");
$youtube_bg_type = is_numeric($youtube_bg_type)?$youtube_bg_type:"1";
$display_video_mobile = onetone_option("display_video_mobile","no");
$start_play = onetone_option("section_youtube_start",3);
$youtube_autoplay = onetone_option("youtube_autoplay");
$youtube_loop = onetone_option("youtube_loop");
$youtube_mute = onetone_option("youtube_mute");
if( $youtube_autoplay == '1' )
$youtube_autoplay = 'true';
else
$youtube_autoplay = 'false';
if( $youtube_loop == '1' )
$youtube_loop = 'true';
else
$youtube_loop = 'false';
if( $youtube_mute == '1' )
$youtube_mute = 'true';
else
$youtube_mute = 'false';
$containment = '.onetone-youtube-section';
if( $youtube_bg_type == '1')
$containment = 'body';
?>
<section class="section home-section-<?php echo $video_background_section;?> onetone-youtube-section video-section">
<div id="onetone-youtube-video" class="onetone-player" data-property="{videoURL:'<?php echo $section_background_video;?>',containment:'<?php echo $containment;?>', showControls:false, autoPlay:<?php echo $youtube_autoplay;?>, loop:<?php echo $youtube_loop;?>, mute:<?php echo $youtube_mute;?>, startAt:<?php echo $start_play;?>, opacity:1, addRaster:true, quality:'default'}"></div>
<div class="bg-video"></div>
<?php get_template_part('home-sections/section',$video_background_section);?>
<div class="clear"></div>
<?php
if( $video_controls == 1 ){
$detect = new Mobile_Detect;
if( !$detect->isMobile() && !$detect->isTablet() ){
if( $youtube_autoplay == 'true' )
$play_btn_icon = 'pause';
else
$play_btn_icon = 'play';
if( $youtube_mute == 'true' )
$mute_btn_icon = 'volume-off';
else
$mute_btn_icon = 'volume-up';
echo '<script>function changeLabel(state){
if( state == 1 )
jQuery("#togglePlay i").removeClass("fa-play").addClass("fa-pause");
else
jQuery("#togglePlay i").removeClass("fa-pause").addClass("fa-play");
}
function toggleVolume(){
var volume =jQuery(\'#onetone-youtube-video\').YTPToggleVolume();
if( volume == true )
jQuery(".youtube-volume i").removeClass("fa-volume-off").addClass("fa-volume-up");
else
jQuery(".youtube-volume i").removeClass("fa-volume-up").addClass("fa-volume-off");
}
</script>
<div class="black-65" id="video-controls">
<a class="youtube-pause command" id="togglePlay" href="javascript:;" onclick="jQuery(\'#onetone-youtube-video\').YTPTogglePlay(changeLabel)"><i class="fa fa-'.$play_btn_icon.'"></i></a>
<a class="youtube-volume" href="javascript:;" onclick="toggleVolume();"><i class="fa fa-'.$mute_btn_icon.' "></i></a>
</div>';
}
}
?>
</section><file_sep>/sidebar-woo_products_right.php
<?php
$right_sidebar = esc_attr(onetone_option('right_sidebar_woo_products',''));
if ( $right_sidebar && is_active_sidebar( $right_sidebar ) ){
dynamic_sidebar( $right_sidebar );
}
elseif( is_active_sidebar( 'default_sidebar' ) ) {
dynamic_sidebar('default_sidebar');
}<file_sep>/includes/metabox-options.php
<?php
/**
* Calls the class on the post edit screen.
*/
function onetone_call_metaboxClass() {
new onetone_metaboxClass();
}
if ( is_admin() ) {
add_action( 'load-post.php', 'onetone_call_metaboxClass' );
add_action( 'load-post-new.php', 'onetone_call_metaboxClass' );
}
/**
* The Class.
*/
class onetone_metaboxClass {
/**
* Hook into the appropriate actions when the class is constructed.
*/
public function __construct() {
add_action( 'add_meta_boxes', array( $this, 'onetone_add_meta_box' ) );
add_action( 'save_post', array( $this, 'onetone_save' ) );
}
/**
* Adds the meta box container.
*/
public function onetone_add_meta_box( $post_type ) {
$post_types = array( 'page'); //limit meta box to certain post types
if ( in_array( $post_type, $post_types )) {
add_meta_box(
'onetone_page_meta_box'
,__( 'Onetone Metabox Options', 'onetone' )
,array( $this, 'onetone_render_meta_box_content' )
,$post_type
,'advanced'
,'high'
);
}
}
// get onetone sliders from plugin magee shrotcodes
public static function onetone_sliders_meta(){
$onetone_sliders[] = array(
'label' => __( 'Select a slider', 'onetone' ),
'value' => ''
);
$onetone_custom_slider = new WP_Query( array( 'post_type' => 'magee_slider', 'post_status'=>'publish', 'posts_per_page' => -1 ) );
while ( $onetone_custom_slider->have_posts() ) {
$onetone_custom_slider->the_post();
$onetone_sliders[] = array(
'label' => get_the_title(),
'value' => get_the_ID()
);
}
wp_reset_postdata();
return $onetone_sliders;
}
/**
* Save the meta when the post is saved.
*
* @param int $post_id The ID of the post being saved.
*/
public function onetone_save( $post_id ) {
/*
* We need to verify this came from the our screen and with proper authorization,
* because save_post can be triggered at other times.
*/
// Check if our nonce is set.
if ( ! isset( $_POST['onetone_inner_custom_box_nonce'] ) )
return $post_id;
$nonce = $_POST['onetone_inner_custom_box_nonce'];
// Verify that the nonce is valid.
if ( ! wp_verify_nonce( $nonce, 'onetone_inner_custom_box' ) )
return $post_id;
// If this is an autosave, our form has not been submitted,
// so we don't want to do anything.
if ( defined( 'DOING_AUTOSAVE' ) && DOING_AUTOSAVE )
return $post_id;
// Check the user's permissions.
if ( 'page' == $_POST['post_type'] ) {
if ( ! current_user_can( 'edit_page', $post_id ) )
return $post_id;
} else {
if ( ! current_user_can( 'edit_post', $post_id ) )
return $post_id;
}
/* OK, its safe for us to save the data now. */
// Sanitize the user input.
//$show_breadcrumb = sanitize_text_field( $_POST['onetone_show_breadcrumb'] );
//$onetone_layout = sanitize_text_field( $_POST['onetone_layout'] );
if( isset($_POST) && $_POST ){
$post_metas = array();
$post_metas['header_position'] = isset($_POST['header_position'])?$_POST['header_position']:'top';
$post_metas['full_width'] = isset($_POST['full_width'])?$_POST['full_width']:'no';
$post_metas['padding_top'] = isset($_POST['padding_top'])?$_POST['padding_top']:'';
$post_metas['padding_bottom'] = isset($_POST['padding_bottom'])?$_POST['padding_bottom']:'';
$post_metas['display_breadcrumb'] = isset($_POST['display_breadcrumb'])?$_POST['display_breadcrumb']:'';
$post_metas['nav_menu'] = isset($_POST['nav_menu'])?$_POST['nav_menu']:'';
$post_metas['page_layout'] = isset($_POST['page_layout'])?$_POST['page_layout']:'none';
$post_metas['left_sidebar'] = isset($_POST['left_sidebar'])?$_POST['left_sidebar']:'';
$post_metas['right_sidebar'] = isset($_POST['right_sidebar'])?$_POST['right_sidebar']:'';
$post_metas['slider_banner'] = isset($_POST['slider_banner'])?$_POST['slider_banner']:'0';
$post_metas['banner_position'] = isset($_POST['banner_position'])?$_POST['banner_position']:'1';
$post_metas['magee_slider'] = isset($_POST['magee_slider'])?$_POST['magee_slider']:'';
$post_metas['display_title'] = isset($_POST['display_title'])?$_POST['display_title']:'yes';
$post_metas['display_title_bar'] = isset($_POST['display_title_bar'])?$_POST['display_title_bar']:'0';
$onetone_post_meta = json_encode( $post_metas );
// Update the meta field.
update_post_meta( $post_id, '_onetone_post_meta', $onetone_post_meta );
}
}
/**
* Render Meta Box content.
*
* @param WP_Post $post The post object.
*/
public function onetone_render_meta_box_content( $post ) {
global $wp_registered_sidebars;
// $magee_sliders = self::onetone_sliders_meta();
// Add an nonce field so we can check for it later.
wp_nonce_field( 'onetone_inner_custom_box', 'onetone_inner_custom_box_nonce' );
// Use get_post_meta to retrieve an existing value from the database.
$page_meta = get_post_meta( $post->ID ,'_onetone_post_meta',true);
$page_metas = @json_decode( $page_meta,true );
if( $page_metas )
extract( $page_metas );
/************ get nav menus*************/
$nav_menus[] = array(
'label' => __( 'Default', 'onetone' ),
'value' => ''
);
$menus = get_registered_nav_menus();
foreach ( $menus as $location => $description ) {
$nav_menus[] = array(
'label' => $description,
'value' => $location
);
}
/* sidebars */
$sidebars[] = array(
'label' => __( 'None', 'onetone' ),
'value' => ''
);
foreach( $wp_registered_sidebars as $key => $value){
$sidebars[] = array(
'label' => $value['name'],
'value' => $value['id'],
);
}
// Display the form, using the current value.
$full_width = isset( $full_width )? $full_width:'no';
$page_layout = isset( $page_layout )? $page_layout:'none';
$left_sidebar = isset( $left_sidebar )? $left_sidebar:'';
$right_sidebar = isset( $right_sidebar )? $right_sidebar:'';
$display_breadcrumb = isset( $display_breadcrumb )? $display_breadcrumb:'';
$display_title = isset( $display_title )? $display_title:'yes';
$display_title_bar = isset( $display_title_bar )? $display_title_bar:'';
$padding_top = isset( $padding_top )? $padding_top:'50px';
$padding_bottom = isset( $padding_bottom )? $padding_top:'50px';
echo '<p class="meta-options"><label for="full_width" style="display: inline-block;width: 150px;">';
_e( 'Content Full Width', 'onetone' );
echo '</label> ';
echo '<select name="full_width" id="full_width">
<option '.selected($full_width,'no',false).' value="no">'.__("No","onetone").'</option>
<option '.selected($full_width,'yes',false).' value="yes">'.__("Yes","onetone").'</option>
</select></p>';
echo '<p class="meta-options"><label for="padding_top" style="display: inline-block;width: 150px;">';
_e( 'Padding Top', 'onetone' );
echo '</label> ';
echo '<input name="padding_top" type="text" value="'.$padding_top.'" />';
echo '</p>';
echo '<p class="meta-options"><label for="padding_bottom" style="display: inline-block;width: 150px;">';
_e( 'Padding Bottom', 'onetone' );
echo '</label> ';
echo '<input name="padding_bottom" type="text" value="'.$padding_bottom.'" />';
echo '</p>';
echo '<p class="meta-options"><label for="display_breadcrumb" style="display: inline-block;width: 150px;">';
_e( 'Display Title Bar', 'onetone' );
echo '</label> ';
echo '<select name="display_title_bar" id="display_title_bar">
<option '.selected($display_title_bar,'',false).' value="" selected>'.__("Default","onetone").'</option>
<option '.selected($display_title_bar,'yes',false).' value="yes">'.__("Yes","onetone").'</option>
<option '.selected($display_title_bar,'no',false).' value="no">'.__("No","onetone").'</option>
</select></p>';
echo '<p class="meta-options"><label for="display_breadcrumb" style="display: inline-block;width: 150px;">';
_e( 'Display Breadcrumb', 'onetone' );
echo '</label> ';
echo '<select name="display_breadcrumb" id="display_breadcrumb">
<option '.selected($display_breadcrumb,'',false).' value="">'.__("Default","onetone").'</option>
<option '.selected($display_breadcrumb,'yes',false).' value="yes">'.__("Yes","onetone").'</option>
<option '.selected($display_breadcrumb,'no',false).' value="no">'.__("No","onetone").'</option>
</select></p>';
echo '<p class="meta-options"><label for="page_layout" style="display: inline-block;width: 150px;">';
_e( 'Page Layout', 'onetone' );
echo '</label> ';
echo '<select name="page_layout" id="page_layout">
<option '.selected($page_layout,'none',false).' value="none">'.__("No Sidebar","onetone").'</option>
<option '.selected($page_layout,'left',false).' value="left">'.__("Left Sidebar","onetone").'</option>
<option '.selected($page_layout,'right',false).' value="right">'.__("Right Sidebar","onetone").'</option>
<option '.selected($page_layout,'both',false).' value="both">'.__("Both Sidebar","onetone").'</option>
</select></p>';
echo '<p class="meta-options"><label for="left_sidebar" style="display: inline-block;width: 150px;">';
_e( 'Select Left Sidebar', 'onetone' );
echo '</label> ';
echo '<select name="left_sidebar" id="left_sidebar">';
foreach( $sidebars as $sidebar ){
echo '<option '.selected($left_sidebar,$sidebar['value'],false).' value="'.$sidebar['value'].'">'.$sidebar['label'].'</option>';
}
echo '</select></p>';
echo '<p class="meta-options"><label for="right_sidebar" style="display: inline-block;width: 150px;">';
_e( 'Select Right Sidebar', 'onetone' );
echo '</label> ';
echo '<select name="right_sidebar" id="right_sidebar">';
foreach( $sidebars as $sidebar ){
echo '<option '.selected($right_sidebar,$sidebar['value'],false).' value="'.$sidebar['value'].'">'.$sidebar['label'].'</option>';
}
echo '</select></p>';
}
}<file_sep>/woocommerce/config.php
<?php
define( "WOO_IMAGES", get_template_directory_uri() . "/woocommerce/images" );
remove_action( 'woocommerce_before_main_content','woocommerce_breadcrumb', 20, 0);
//add_action( 'onetone_before_content_wrap','woocommerce_breadcrumb');
remove_action( 'woocommerce_sidebar', 'woocommerce_get_sidebar', 10);
remove_action( 'woocommerce_after_main_content', 'woocommerce_output_content_wrapper_end', 10);
remove_action( 'woocommerce_before_main_content', 'woocommerce_output_content_wrapper', 10);
add_action( 'woocommerce_before_main_content', 'onetone_woocommerce_output_content_wrapper', 10);
add_action( 'woocommerce_after_main_content', 'onetone_woocommerce_output_content_wrapper_end', 10);
add_action( 'woocommerce_before_shop_loop', 'onetone_woocommerce_before_shop_loop', 0,0);
add_action( 'woocommerce_before_shop_loop', 'onetone_woocommerce_after_shop_loop', 40);
remove_action( 'woocommerce_before_single_product_summary', 'woocommerce_show_product_sale_flash', 10);
add_action( 'woocommerce_product_thumbnails', 'woocommerce_show_product_sale_flash', 10);
//add_action( 'onetone_woocommerce_after_catalog_ordering', 'onetone_after_catalog_ordering', 40);
//remove_action( 'woocommerce_before_shop_loop_item_title', 'woocommerce_template_loop_product_thumbnail', 10);
//remove_action( 'woocommerce_after_shop_loop_item_title', 'woocommerce_template_loop_price', 10);
//remove_action( 'woocommerce_after_shop_loop_item_title', 'woocommerce_template_loop_rating' ,5);
add_action('woocommerce_sidebar','onetone_woocommerce_sidebar');
add_action( 'woocommerce_after_product_loop','onetone_get_portfolio_share_icons');
add_action('woocommerce_share','onetone_single_product_sharing');
add_action('onetone_header_shopping_cart','onetone_header_shopping_cart');
add_action('woocommerce_before_shop_loop_item_title','onetone_before_shop_loop_item_title');
add_action('woocommerce_after_shop_loop_item','onetone_after_shop_loop_item');
function onetone_woocommerce_sidebar(){
}
function onetone_before_shop_loop_item_title(){
echo '<div class="product-info text-center">';
}
function onetone_after_shop_loop_item(){
echo '</div>';
}
function onetone_after_catalog_ordering(){
}
function onetone_woocommerce_before_shop_loop(){
echo "<div class='product-page-title-container'>";
}
function onetone_woocommerce_after_shop_loop(){
echo "</div>";
}
function onetone_single_product_sharing(){
}
function onetone_woocommerce_output_content_wrapper() {
global $woocommerce;
$sidebar ='';
if(is_single())
{
$left_sidebar = onetone_option('left_sidebar_woo_products','');
$right_sidebar = onetone_option('right_sidebar_woo_products','');
}else {
$left_sidebar = onetone_option('left_sidebar_woo_archive','');
$right_sidebar = onetone_option('right_sidebar_woo_archive','');
}
if( $left_sidebar )
$sidebar = 'left';
if( $right_sidebar )
$sidebar = 'right';
if( $left_sidebar && $right_sidebar )
$sidebar = 'both';
$aside = 'no-aside';
if( $left_sidebar !='' )
$aside = 'left-aside';
if( $right_sidebar !='' )
$aside = 'right-aside';
if( $left_sidebar !='' && $right_sidebar !='' )
$aside = 'both-aside';
echo '<div class="col-md-12">';
do_action("onetone_before_content_wrap");
echo '</div>';
?>
<section class="page-title-bar title-left no-subtitle" style="">
<div class="container">
<hgroup class="page-title">
<?php if(is_shop()):?>
<h1><?php woocommerce_page_title(); ?></h1>
<?php elseif( is_product_category() || is_product_tag() ):?>
<h1><?php single_term_title();?></h1>
<?php else:?>
<h1><?php the_title(); ?></h1>
<?php endif; ?>
</hgroup>
<?php woocommerce_breadcrumb(array("before"=>"","after"=>"","wrap_before"=>"<div class='breadcrumb-nav breadcrumbs' itemprop='breadcrumb'>","delimiter"=>' / ','wrap_after'=>'</div>'));?>
<div class="clearfix"></div>
</div>
</section>
<div class="post-wrap">
<div class="container">
<div class="post-inner row <?php echo $aside; ?>">
<div class="col-main">
<section class="post-main" role="main" id="content">
<div class="woo-product clearfix">
<div class="post-attributes">
<?php
}
function onetone_woocommerce_output_content_wrapper_end() {
if(is_single())
{
$left_sidebar = onetone_option('left_sidebar_woo_products','');
$right_sidebar = onetone_option('right_sidebar_woo_products','');
$sidebar_l = 'woo_products_left';
$sidebar_r = 'woo_products_right';
}else {
$left_sidebar = onetone_option('left_sidebar_woo_archive','');
$right_sidebar = onetone_option('right_sidebar_woo_archive','');
$sidebar_l = 'woo_archive_left';
$sidebar_r = 'woo_archive_right';
}
echo '</div>
</div></section></div>';
if( $left_sidebar !='' ):?>
<div class="col-aside-left">
<aside class="blog-side left text-left">
<div class="widget-area">
<?php get_sidebar( $sidebar_l );?>
</div>
</aside>
</div>
<?php endif; ?>
<?php if( $right_sidebar !='' ):?>
<div class="col-aside-right">
<?php get_sidebar( $sidebar_r );?>
</div>
<?php endif;
echo '</div></div></div>';
}
function onetone_woocommerce_styles(){
if(is_admin() || 'wp-login.php' == basename($_SERVER['PHP_SELF'])){
return;
}
//wp_enqueue_style('onetone-woocommerce', get_template_directory_uri().'/style/woocommerce.css', false, false, 'all');
}
add_action('wp_print_styles', 'onetone_woocommerce_styles',12);
add_filter('add_to_cart_fragments', 'woocommerce_header_add_to_cart_fragment');
function woocommerce_header_add_to_cart_fragment( $fragments ) {
global $woocommerce;
ob_start();
?>
<a class="cart-contents" href="<?php echo $woocommerce->cart->get_cart_url(); ?>" title="<?php _e('View your shopping cart', 'woothemes'); ?>"><?php echo sprintf(_n('%d item', '%d items', $woocommerce->cart->cart_contents_count, 'woothemes'), $woocommerce->cart->cart_contents_count);?> - <?php echo $woocommerce->cart->get_cart_total(); ?></a>
<?php
$fragments['a.cart-contents'] = ob_get_clean();
return $fragments;
}
/*--------------------------------------------------------------------------------------------------
PRODUCTS PAGE - FILTER IMAGE
--------------------------------------------------------------------------------------------------*/
if ( ! function_exists( 'woocommerce_template_loop_product_thumbnail' ) ) {
function woocommerce_template_loop_product_thumbnail() {
echo woocommerce_get_product_thumbnail();
}
}
if ( ! function_exists( 'woocommerce_get_product_thumbnail' ) ) {
function woocommerce_get_product_thumbnail( $size = 'shop_catalog', $placeholder_width = 500, $placeholder_height = 500 ) {
global $post, $woocommerce, $shop_style;
$items_in_cart = array();
if ( $woocommerce->cart->get_cart() && is_array( $woocommerce->cart->get_cart() ) ) {
foreach ( $woocommerce->cart->get_cart() as $cart ) {
$items_in_cart[] = $cart['product_id'];
}
}
$id = get_the_ID();
$in_cart = in_array( $id, $items_in_cart );
$size = 'shop_catalog';
$gallery = get_post_meta( $id, '_product_image_gallery', true );
$attachment_image = '';
if ( ! empty( $gallery ) ) {
$gallery = explode( ',', $gallery );
$first_image_id = $gallery[0];
$attachment_image = wp_get_attachment_image( $first_image_id, $size, false, array( 'class' => 'hover-image' ) );
}
if( $shop_style == 1 ):
$output = '<div class="product-image">';
if ( has_post_thumbnail() ) {
$thumb = get_the_post_thumbnail( get_the_ID() , "shop_catalog" );
$output .= '<div class="product-image-front">
'.$thumb.'
</div>
<div class="product-image-back">
'.$attachment_image.'
</div>';
} else {
$output .= '<img src="'. woocommerce_placeholder_img_src() .'" alt="Placeholder" width="'.$placeholder_width.'" height="'.$placeholder_height.'" />';
}
$output .= '<div class="product-image-overlay"></div>
</div>';
else:
if ( has_post_thumbnail() ) {
$thumb = get_the_post_thumbnail( get_the_ID() , "shop_catalog" );
$image = '<div class="product-image-front">
'.$thumb.'
</div>
<div class="product-image-back">
'.$attachment_image.'
</div>';
} else {
$image = '<img src="'. woocommerce_placeholder_img_src() .'" alt="Placeholder" width="'.$placeholder_width.'" height="'.$placeholder_height.'" />';
}
$output = '<div class="product-image"><a href="'.get_permalink().'">'.$image.'
<div class="product-image-overlay"></div>
</a>
<div class="product-action"><a href="#">
</a>';
// $output .= '<a href="#" class="add_to_cart_button"><i class="fa fa-shopping-cart"></i> ADD TO CART</a>';
global $product;
$icon = $product->is_purchasable() && $product->is_in_stock() ? '<i class="fa fa-shopping-cart"></i>':'';
$output .= apply_filters( 'woocommerce_loop_add_to_cart_link',
sprintf( '<a href="%s" rel="nofollow" data-product_id="%s" data-product_sku="%s" data-quantity="%s" class="button %s product_type_%s">%s</a>',
esc_url( $product->add_to_cart_url() ),
esc_attr( $product->id ),
esc_attr( $product->get_sku() ),
esc_attr( isset( $quantity ) ? $quantity : 1 ),
$product->is_purchasable() && $product->is_in_stock() ? 'add_to_cart_button' : '',
esc_attr( $product->product_type ),
$icon.' '.esc_html( $product->add_to_cart_text() )
),
$product );
$output .= '</div></div>';
endif;
return $output;
}
}
function onetone_short_desc_filter($content){
$content = str_replace ('<p>','<p class="desc">',$content);
return $content;
}
add_filter('woocommerce_short_description', 'onetone_short_desc_filter');
function onetone_url_set_value($url,$key,$value)
{
$a=explode('?',$url);
$url_f=$a[0];
$query=isset($a[1])?$a[1]:"";
parse_str($query,$arr);
$arr[$key]=$value;
return $url_f.'?'.http_build_query($arr);
}
function onetone_get_self_url(){
if (isset($_SERVER['REQUEST_URI']))
{
$serverrequri = $_SERVER['REQUEST_URI'];
}
else
{
if (isset($_SERVER['argv']))
{
$serverrequri = $_SERVER['PHP_SELF'] .'?'. $_SERVER['argv'][0];
}
else if(isset($_SERVER['QUERY_STRING']))
{
$serverrequri = $_SERVER['PHP_SELF'] .'?'. $_SERVER['QUERY_STRING'];
}
}
$s = empty($_SERVER["HTTPS"]) ? '' : ($_SERVER["HTTPS"] == "on") ? "s" : "";
$protocol = strstr(strtolower($_SERVER["SERVER_PROTOCOL"]), "/",true).$s;
$port = ($_SERVER["SERVER_PORT"] == "80") ? "" : (":".$_SERVER["SERVER_PORT"]);
return $protocol."://".$_SERVER['SERVER_NAME'].$port.$serverrequri;
}
function onetone_header_shopping_cart(){
global $woocommerce;
}
/**
* Register the [woocommerce_recently_viewed_products per_page="5"] shortcode
*
* This shortcode displays recently viewed products using WooCommerce default cookie
* It only has one parameter "per_page" to choose number of items to show
*
* @access public
* @since 1.0
* @return $content
*/
function onetone_woocommerce_recently_viewed_products( ) {
// Get WooCommerce Global
global $woocommerce;
// Get recently viewed product cookies data
$viewed_products = ! empty( $_COOKIE['woocommerce_recently_viewed'] ) ? (array) explode( '|', $_COOKIE['woocommerce_recently_viewed'] ) : array();
$viewed_products = array_filter( array_map( 'absint', $viewed_products ) );
// If no data, quit
if ( empty( $viewed_products ) )
return __( 'You have not viewed any product yet!', 'onetone' );
// Create the object
// Get products per page
if( !isset( $per_page ) ? $number = 4 : $number = $per_page )
// Create query arguments array
$query_args = array(
'posts_per_page' => $number,
'no_found_rows' => 1,
'post_status' => 'publish',
'post_type' => 'product',
'post__in' => $viewed_products,
'orderby' => 'rand'
);
// Add meta_query to query args
$query_args['meta_query'] = array();
// Check products stock status
$query_args['meta_query'][] = $woocommerce->query->stock_status_meta_query();
// Create a new query
$r = new WP_Query($query_args);
// If query return results
if ( $r->have_posts() ) {
woocommerce_product_loop_start();
// Start the loop
while ( $r->have_posts()) {
$r->the_post();
wc_get_template_part( 'content', 'product' );
}
woocommerce_product_loop_end();
}
wp_reset_query();
}
add_action("woocommerce_cart_is_empty", "onetone_woocommerce_recently_viewed_products");
function onetone_before_cart(){}
add_action("woocommerce_before_cart", "onetone_before_cart",0);
function onetone_before_checkout_form(){}
add_action("woocommerce_before_checkout_form", "onetone_before_checkout_form",20);
function onetone_before_thankyou(){}
add_action("woocommerce_before_thankyou", "onetone_before_thankyou");
function onetone_after_nav_menu(){}
add_action("onetone_before_nav_menu", "onetone_after_nav_menu");<file_sep>/content-article.php
<div class="entry-box-wrap" id="post-<?php the_ID(); ?>">
<article class="entry-box" role="article">
<?php if ( has_post_thumbnail() ): ?>
<div class="feature-img-box">
<div class="img-box figcaption-middle text-center from-top fade-in">
<a href="<?php the_permalink();?>">
<?php the_post_thumbnail();?>
<div class="img-overlay dark">
<div class="img-overlay-container">
<div class="img-overlay-content">
<i class="fa fa-link"></i>
</div>
</div>
</div>
</a>
</div>
</div>
<?php endif;?>
<div class="entry-main">
<div class="entry-header">
<a href="<?php the_permalink();?>"><h1 class="entry-title"><?php the_title();?></h1></a>
<ul class="entry-meta">
<li class="entry-date"><i class="fa fa-calendar"></i><a href="<?php echo get_month_link(get_the_time('Y'), get_the_time('m'));?>"><?php echo get_the_date("M d, Y");?></a></li>
<li class="entry-author"><i class="fa fa-user"></i><?php echo get_the_author_link();?></li>
<li class="entry-catagory"><i class="fa fa-file-o"></i><?php the_category(', '); ?></li>
<li class="entry-comments"><i class="fa fa-comment"></i><a href="<?php the_permalink();?>#comments"><?php comments_popup_link( __('No comments yet','onetone'), __('1 comment','onetone'), __('% comments','onetone'), 'comments-link', '');?></a></li>
</ul>
</div>
<div class="entry-summary">
<?php the_excerpt();?>
</div>
<div class="entry-footer">
<a href="<?php the_permalink();?>" class="entry-more pull-right"><?php _e("Read More","onetone");?> >></a>
</div>
</div>
</article>
</div><file_sep>/functions.php
<?php
//Jesus: remove autoformating in pages
function custom_wpautop($content) {
if (is_attachment() || is_single()) {
return wpautop($content);
} else {
return $content;
}
}
remove_filter('the_content', 'wpautop');
add_filter('the_content', 'custom_wpautop');
//Jesus: remove admin bar in front
function my_function_admin_bar(){ return false; }
add_filter( 'show_admin_bar' , 'my_function_admin_bar');
?>
<?php
define( 'ONETONE_THEME_BASE_URL', get_template_directory_uri());
define( 'ONETONE_OPTIONS_FRAMEWORK', get_template_directory().'/admin/' );
define( 'ONETONE_OPTIONS_FRAMEWORK_URI', ONETONE_THEME_BASE_URL. '/admin/');
define( 'ONETONE_OPTIONS_PREFIXED' ,'onetone_' );
define( 'OPTIONS_FRAMEWORK_DIRECTORY', get_template_directory_uri() . '/admin/' );
require_once dirname( __FILE__ ) . '/admin/options-framework.php';
/**
* google fonts
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/google-fonts.php' );
require_once get_template_directory() . '/includes/admin-options.php';
/**
* Theme Functions
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/theme-functions.php' );
/**
* Required: include options framework.
**/
load_template( trailingslashit( get_template_directory() ) . 'admin/options-framework.php' );
/**
* Mobile Detect Library
**/
if(!class_exists("Mobile_Detect")){
load_template( trailingslashit( get_template_directory() ) . 'includes/Mobile_Detect.php' );
}
/**
* Theme setup
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/theme-setup.php' );
/**
* Onetone Shortcodes
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/shortcodes.php' );
/**
* Theme breadcrumb
*/
load_template( trailingslashit( get_template_directory() ) . 'includes/breadcrumb-trail.php');
/**
* Theme widget
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/theme-widget.php' );
/**
* Meta box
**/
load_template( trailingslashit( get_template_directory() ) . 'includes/metabox-options.php' );
/**
* Woocommerce template
**/
if (class_exists('WooCommerce')) {
require_once ( get_template_directory() .'/woocommerce/config.php' );
}
/**
* Magee Importer
*/
require get_template_directory() . '/lib/importer/importer.php';
/**
* Magee shortcodes
*/
if( ! class_exists( 'Magee_Core' ) )
require get_template_directory() . '/lib/magee-shortcodes-pro/Magee.php';
add_filter('widget_text', 'do_shortcode');
function onetone_deactivate_plugin_conditional() {
if ( is_plugin_active('magee-shortcodes/Magee.php') ) {
deactivate_plugins('magee-shortcodes/Magee.php');
}
if ( is_plugin_active('magee-shortcodes-pro/Magee.php') ) {
deactivate_plugins('magee-shortcodes-pro/Magee.php');
}
}
add_action( 'admin_init', 'onetone_deactivate_plugin_conditional' );<file_sep>/home-sections/section-html5-video.php
<?php
$video_background_section = onetone_option( 'video_background_section' );
$i = $video_background_section-1 ;
$section_title = onetone_option( 'section_title_'.$i );
$section_menu = onetone_option( 'menu_title_'.$i );
$parallax_scrolling = onetone_option( 'parallax_scrolling_'.$i );
$section_css_class = onetone_option( 'section_css_class_'.$i );
$section_content = onetone_option( 'section_content_'.$i );
$full_width = onetone_option( 'full_width_'.$i );
$section_subtitle = onetone_option( 'section_subtitle_'.$i );
$mp4_video_url = onetone_option( 'mp4_video_url' );
$ogv_video_url = onetone_option( 'ogv_video_url' );
$webm_video_url = onetone_option( 'webm_video_url' );
$poster_url = onetone_option( 'poster_url' );
$video_loop = onetone_option( 'video_loop' );
$video_volume = onetone_option( 'video_volume' );
$video_volume = $video_volume == "" ? 0.8 : $video_volume ;
if( !isset($section_content) || $section_content=="" )
$section_content = onetone_option( 'sction_content_'.$i );
$section_id = sanitize_title( onetone_option( 'menu_slug_'.$i ,'section-'.($i+1) ) );
if( $section_id == '' )
$section_id = 'section-'.($i+1);
$container_class = "container";
if( $full_width == "yes" ){
$container_class = "";
}
if( $parallax_scrolling == "yes" || $parallax_scrolling == "1" ){
$section_css_class .= ' onetone-parallax';
}
?>
<section id="<?php echo $section_id; ?>" class="section home-section-<?php echo $video_background_section;?> <?php echo $section_css_class;?> onetone-html5-section video-section">
<?php get_template_part('home-sections/section',$video_background_section);?>
<div class="clear"></div>
<?php
$header_cover_video_background_html5 = onetone_option( "header_cover_video_background_html5" ,1);
if( $video_loop == 1 ){
$video_loop = "true";
}
else{
$video_loop = "false";
}
echo '<script type="text/javascript" src="'.get_template_directory_uri().'/plugins/jquery-ui.min.js"></script>';
echo '<script type="text/javascript" src="'.get_template_directory_uri().'/plugins/video.js"></script>';
echo '<script type="text/javascript" src="'.get_template_directory_uri().'/plugins/bigvideo.js"></script>';
echo '<script type="text/javascript" >
var BV;
var BV = new jQuery.BigVideo({
useFlashForFirefox:false,
forceAutoplay:true,
controls:false,
doLoop:'.$video_loop.',
';
if( $header_cover_video_background_html5 == '0'){
echo 'container:jQuery(".onetone-html5-section")';
}
echo '});
BV.init();';
// echo 'if (Modernizr.touch) { BV.show("'.$poster_url.'"); } else {';
echo 'BV.show(
[
{ type: "video/mp4", src: "'.$mp4_video_url.'" },
{ type: "video/webm", src: "'.$webm_video_url.'" },
{ type: "video/ogg", src: "'.$ogv_video_url.'" }
],{ambient:'.$video_loop.'});
BV.getPlayer().volume('.$video_volume.');
BV.getPlayer().on("durationchange",function(){jQuery("#big-video-wrap").fadeIn();});';
//echo '}';
echo '</script>';
wp_localize_script( 'onetone-default', 'onetone_video',array('header_cover_video_background_html5'=>$header_cover_video_background_html5));
?>
</section><file_sep>/includes/theme-functions.php
<?php
/*
* get background
* ---------------------------------------------------------------------
*/
function onetone_get_background($args){
$background = "";
if (is_array($args)) {
if (isset($args['image']) && $args['image']!="") {
// Jesús: Changed becouse don´t work well
//$background .= "background:url(".$args['image']. ") ".$args['repeat']." ".$args['position']." ".$args['attachment'].";";
// New code
$background .= "background-image:url(".$args['image']. ");";
if( !$parallax_scrolling == "yes" || !$parallax_scrolling == "1" ){
$background .= "background-repeat:".$args['repeat']. ";";
$background .= "background-position:".$args['position']. ";";
$background .= "background-attachment:".$args['attachment']. ";";
}
//
}
if(isset($args['color']) && $args['color'] !=""){
$background .= "background-color:".$args['color'].";";
}
}
return $background;
}
/*
* send email
* ---------------------------------------------------------------------
*/
function onetone_contact(){
if(trim($_POST['Name']) === '') {
$Error = __('Please enter your name.','onetone');
$hasError = true;
} else {
$name = trim($_POST['Name']);
}
if(trim($_POST['Email']) === '') {
$Error = __('Please enter your email address.','onetone');
$hasError = true;
} else if (!preg_match("/^[[:alnum:]][a-z0-9_.-]*@[a-z0-9.-]+\.[a-z]{2,4}$/i", trim($_POST['Email']))) {
$Error = __('You entered an invalid email address.','onetone');
$hasError = true;
} else {
$email = trim($_POST['Email']);
}
if(trim($_POST['Message']) === '') {
$Error = __('Please enter a message.','onetone');
$hasError = true;
} else {
if(function_exists('stripslashes')) {
$message = stripslashes(trim($_POST['Message']));
} else {
$message = trim($_POST['Message']);
}
}
if(!isset($hasError)) {
if (isset($_POST['sendto']) && preg_match("/^[[:alnum:]][a-z0-9_.-]*@[a-z0-9.-]+\.[a-z]{2,4}$/i", trim(base64_decode($_POST['sendto'])))) {
$emailTo = base64_decode($_POST['sendto']);
}
else{
$emailTo = get_option('admin_email');
}
if($emailTo !=""){
$subject = 'From '.$name;
$body = "Name: $name \n\nEmail: $email \n\nMessage: $message";
$headers = 'From: '.$name.' <'.$emailTo.'>' . "\r\n" . 'Reply-To: ' . $email;
wp_mail($emailTo, $subject, $body, $headers);
$emailSent = true;
}
echo json_encode(array("msg"=>__("Your message has been successfully sent!","onetone"),"error"=>0));
}
else
{
echo json_encode(array("msg"=>$Error,"error"=>1));
}
die() ;
}
add_action('wp_ajax_onetone_contact', 'onetone_contact');
add_action('wp_ajax_nopriv_onetone_contact', 'onetone_contact');
function onetone_contact_advanced(){
$body = '';
$email = '';
if (isset($_POST['sendto']) && preg_match("/^[[:alnum:]][a-z0-9_.-]*@[a-z0-9.-]+\.[a-z]{2,4}$/i", trim(base64_decode($_POST['sendto'])))) {
$emailTo = base64_decode($_POST['sendto']);
}
else{
$emailTo = get_option('admin_email');
}
if($emailTo !=""){
$subject = 'From '.get_bloginfo('name');
parse_str($_POST['values'], $values);
if( is_array($values) ){
foreach( $values as $k => $v ){
//$body .= str_replace('_',' ',$k).': '.$v.' <br/><br/>';
$body .= str_replace('_',' ',$k).': '.utf8_encode(htmlentities($v)).' <br/><br/>';
if( strpos(strtolower($k),'email') && $v != "" ){
$email = $v;
}
}
}
$headers = 'From: '.get_bloginfo('name').' <'.$emailTo.'>' . "\r\n" . 'Reply-To: ' . $email . "\r\n";
$headers .= "MIME-Version: 1.0\r\n";
$headers .= "Content-Type: text/html; charset=ISO-8859-1\r\n";
wp_mail($emailTo, $subject, $body, $headers);
$emailSent = true;
}
echo json_encode(array("msg"=>__("Your message has been successfully sent!","onetone"),"error"=>0));
die() ;
}
add_action('wp_ajax_onetone_contact_advanced', 'onetone_contact_advanced');
add_action('wp_ajax_nopriv_onetone_contact_advanced', 'onetone_contact_advanced');
// get breadcrumbs
function onetone_get_breadcrumb( $options = array()){
global $post,$wp_query ;
$postid = isset($post->ID)?$post->ID:"";
$show_breadcrumb = "";
if ( 'page' == get_option( 'show_on_front' ) && ( '' != get_option( 'page_for_posts' ) ) && $wp_query->get_queried_object_id() == get_option( 'page_for_posts' ) ) {
$postid = $wp_query->get_queried_object_id();
}
if(isset($postid) && is_numeric($postid)){
$show_breadcrumb = get_post_meta( $postid, '_onetone_show_breadcrumb', true );
}
if($show_breadcrumb == 'yes' || $show_breadcrumb==""){
onetone_breadcrumb_trail( $options);
}
}
/*
* page navigation
*
*/
function onetone_native_pagenavi($echo,$wp_query){
if(!$wp_query){global $wp_query;}
global $wp_rewrite;
$wp_query->query_vars['paged'] > 1 ? $current = $wp_query->query_vars['paged'] : $current = 1;
$pagination = array(
'base' => @add_query_arg('paged','%#%'),
'format' => '',
'total' => $wp_query->max_num_pages,
'current' => $current,
'prev_text' => '<i class="fa fa-angle-double-left"></i>',
'next_text' => '<i class="fa fa-angle-double-right"></i>',
'type' => 'list',
);
if( $wp_rewrite->using_permalinks() )
$pagination['base'] = user_trailingslashit( trailingslashit( remove_query_arg('s',get_pagenum_link(1) ) ) . 'page/%#%/', 'paged');
if( !empty($wp_query->query_vars['s']) )
$pagination['add_args'] = array('s'=>get_query_var('s'));
if($echo == "echo"){
echo '<div class="page_navi text-center">'.paginate_links($pagination).'</div>';
}else
{
return '<div class="page_navi text-center">'.paginate_links($pagination).'</div>';
}
}
//// Custom comments list
function onetone_comment($comment, $args, $depth) {
$GLOBALS['comment'] = $comment; ?>
<li <?php comment_class(); ?> id="li-comment-<?php comment_ID() ;?>">
<div id="comment-<?php comment_ID(); ?>">
<div class="comment media-comment media">
<div class="media-avatar media-left">
<?php echo get_avatar($comment,'52','' ); ?>
</div>
<div class="media-body">
<div class="media-inner">
<h4 class="media-heading clearfix">
<?php echo get_comment_author_link();?> - <a href="<?php echo htmlspecialchars( get_comment_link( $comment->comment_ID ) ) ;?>">
<?php printf(__('%1$s at %2$s','onetone'), get_comment_date(), get_comment_time()) ;?></a>
<?php edit_comment_link(__('(Edit)','onetone'),' ','') ;?>
<?php comment_reply_link(array_merge( $args, array('reply_text' => '<i class="fa fa-reply"></i> '. __('Reply','onetone'), 'depth' => $depth, 'max_depth' => $args['max_depth']))) ;?>
</h4>
<?php if ($comment->comment_approved == '0') : ?>
<em><?php _e('Your comment is awaiting moderation.','onetone') ;?></em>
<br />
<?php endif; ?>
<?php comment_text() ;?>
</div>
</div>
</div>
<div class="clear"></div>
</div>
<?php
}
function onetone_get_default_slider(){
$sanitize_title = "home";
$section_menu = onetone_option( 'menu_title_0' );
$section_slug = onetone_option( 'menu_slug_0' );
if( $section_menu != "" ){
$sanitize_title = sanitize_title($section_menu );
if( trim($section_slug) !="" ){
$sanitize_title = sanitize_title($section_slug);
}
}
$return = '<section id="'.$sanitize_title.'" class="section homepage-slider onetone-'.$sanitize_title.'"><div id="onetone-owl-slider" class="owl-carousel owl-theme">';
for($i=1;$i<=5;$i++){
$active = '';
$text = onetone_option('onetone_slide_text_'.$i);
$image = onetone_option('onetone_slide_image_'.$i);
if( $image != "" ){
$return .= '<div class="item"><img src="'.$image.'" alt=""><div class="inner">'. do_shortcode($text) .'</div></div>';
}
}
$return .= '</div></section>';
return $return;
}
/**
* onetone admin panel menu
*/
add_action( 'optionsframework_page_title_after','onetone_options_page_title' );
function onetone_options_page_title() { ?>
<ul class="options-links">
<li><a href="<?php echo esc_url( 'http://www.mageewp.com/wordpress-themes' ); ?>" target="_blank"><?php _e( 'MageeWP Themes', 'onetone' ); ?></a></li>
<li><a href="<?php echo esc_url( 'http://www.mageewp.com/manuals/theme-guide-onetone.html' ); ?>" target="_blank"><?php _e( 'Manual', 'onetone' ); ?></a></li>
<li><a href="<?php echo esc_url( 'http://www.mageewp.com/documents/faq/' ); ?>" target="_blank"><?php _e( 'FAQ', 'onetone' ); ?></a></li>
<li><a href="<?php echo esc_url( 'http://www.mageewp.com/knowledges/' ); ?>" target="_blank"><?php _e( 'Knowledge', 'onetone' ); ?></a></li>
<li><a href="<?php echo esc_url( 'http://www.mageewp.com/forums/onetone/' ); ?>" target="_blank"><?php _e( 'Support Forums', 'onetone' ); ?></a></li>
</ul>
<?php
}
if ( ! function_exists( '_wp_render_title_tag' ) ) {
function onetone_wp_title( $title, $sep ) {
global $paged, $page;
if ( is_feed() )
return $title;
// Add the site name.
$title .= get_bloginfo( 'name' );
// Add the site description for the home/front page.
$site_description = get_bloginfo( 'description', 'display' );
if ( $site_description && ( is_home() || is_front_page() ) )
$title = "$title $sep $site_description";
// Add a page number if necessary.
if ( $paged >= 2 || $page >= 2 )
$title = "$title $sep " . sprintf( __( ' Page %s ', 'onetone' ), max( $paged, $page ) );
return $title;
}
add_filter( 'wp_title', 'onetone_wp_title', 10, 2 );
}
if ( ! function_exists( '_wp_render_title_tag' ) ) {
function onetone_slug_render_title() {
?>
<title><?php wp_title( '|', true, 'right' ); ?></title>
<?php
}
add_action( 'wp_head', 'onetone_slug_render_title' );
}
/**
* back to top
*/
function onetone_back_to_top(){
$back_to_top_btn = onetone_option("back_to_top_btn");
if( $back_to_top_btn != "hide" ){
echo '<a href="javascript:;">
<div id="back-to-top">
<span class="fa fa-arrow-up"></span>
<span>'.__("TOP","onetone").'</span>
</div>
</a>';
}
}
add_action( 'wp_footer', 'onetone_back_to_top' );
// get social icon
function onetone_get_social( $position, $class = 'top-bar-sns',$placement='top',$target='_blank'){
global $social_icons;
$return = '';
$rel = '';
$social_links_nofollow = onetone_option( 'social_links_nofollow','no' );
$social_new_window = onetone_option( 'social_new_window','yes' );
if( $social_new_window == 'no')
$target = '_self';
if( $social_links_nofollow == 'yes' )
$rel = 'nofollow';
if(is_array($social_icons) && !empty($social_icons)):
$return .= '<ul class="'.esc_attr($class).'">';
$i = 1;
foreach($social_icons as $sns_list_item){
$icon = onetone_option( $position.'_social_icon_'.$i,'' );
$title = onetone_option( $position.'_social_title_'.$i,'' );
$link = onetone_option( $position.'_social_link_'.$i,'' );
if( $icon !="" ){
$return .= '<li><a target="'.esc_attr($target).'" rel="'.$rel.'" href="'.esc_url($link).'" data-placement="'.esc_attr($placement).'" data-toggle="tooltip" title="'.esc_attr( $title).'"><i class="fa fa-'.esc_attr( $icon).'"></i></a></li>';
}
$i++;
}
$return .= '</ul>';
endif;
return $return ;
}
// get top bar content
function onetone_get_topbar_content( $type =''){
switch( $type ){
case "info":
echo '<div class="top-bar-info">';
echo onetone_option('top_bar_info_content');
echo '</div>';
break;
case "sns":
$tooltip_position = onetone_option('top_social_tooltip_position','bottom');
echo onetone_get_social('header','top-bar-sns',$tooltip_position);
break;
case "menu":
echo '<nav class="top-bar-menu">';
wp_nav_menu(array('theme_location'=>'top_bar_menu','depth'=>1,'fallback_cb' =>false,'container'=>'','container_class'=>'','menu_id'=>'','menu_class'=>'','link_before' => '<span>', 'link_after' => '</span>','items_wrap'=> '<ul id="%1$s" class="%2$s">%3$s</ul>'));
echo '</nav>';
break;
case "none":
break;
}
}
/**
* Convert Hex Code to RGB
* @param string $hex Color Hex Code
* @return array RGB values
*/
function onetone_hex2rgb( $hex ) {
if ( strpos( $hex,'rgb' ) !== FALSE ) {
$rgb_part = strstr( $hex, '(' );
$rgb_part = trim($rgb_part, '(' );
$rgb_part = rtrim($rgb_part, ')' );
$rgb_part = explode( ',', $rgb_part );
$rgb = array($rgb_part[0], $rgb_part[1], $rgb_part[2], $rgb_part[3]);
} elseif( $hex == 'transparent' ) {
$rgb = array( '255', '255', '255', '0' );
} else {
$hex = str_replace( '#', '', $hex );
if( strlen( $hex ) == 3 ) {
$r = hexdec( substr( $hex, 0, 1 ) . substr( $hex, 0, 1 ) );
$g = hexdec( substr( $hex, 1, 1 ) . substr( $hex, 1, 1 ) );
$b = hexdec( substr( $hex, 2, 1 ) . substr( $hex, 2, 1 ) );
} else {
$r = hexdec( substr( $hex, 0, 2 ) );
$g = hexdec( substr( $hex, 2, 2 ) );
$b = hexdec( substr( $hex, 4, 2 ) );
}
$rgb = array( $r, $g, $b );
}
return $rgb; // returns an array with the rgb values
}
/**
* load less
*/
function onetone_enqueue_less_styles($tag, $handle) {
global $wp_styles;
$match_pattern = '/\.less$/U';
if ( preg_match( $match_pattern, $wp_styles->registered[$handle]->src ) ) {
$handle = $wp_styles->registered[$handle]->handle;
$media = $wp_styles->registered[$handle]->args;
$href = $wp_styles->registered[$handle]->src . '?ver=' . $wp_styles->registered[$handle]->ver;
$rel = isset($wp_styles->registered[$handle]->extra['alt']) && $wp_styles->registered[$handle]->extra['alt'] ? 'alternate stylesheet' : 'stylesheet';
$title = isset($wp_styles->registered[$handle]->extra['title']) ? "title='" . esc_attr( $wp_styles->registered[$handle]->extra['title'] ) . "'" : '';
$tag = "<link rel='stylesheet' id='$handle' $title href='$href' type='text/less' media='$media' />\n";
}
return $tag;
}
add_filter( 'style_loader_tag', 'onetone_enqueue_less_styles', 5, 2);
// get related posts
function onetone_get_related_posts($post_id, $number_posts = -1,$post_type = 'post',$taxonomies='category') {
//$query = new WP_Query();
$categories = array();
$terms = wp_get_object_terms( $post_id, $taxonomies );
if ( ! empty( $terms ) ) {
if ( ! is_wp_error( $terms ) ) {
foreach( $terms as $term ) {
$categories[] = $term->term_id;
}
}
}
if( $post_type == 'post' )
$args = array('category__in' => $categories);
else
$args = array('tax_query' => array(
array(
'taxonomy' => $taxonomies,
'field' => 'term_id',
'terms' => $categories,
),
),);
if($number_posts == 0) {
$query = new WP_Query();
return $query;
}
$args = wp_parse_args($args, array(
'posts_per_page' => $number_posts,
'post__not_in' => array($post_id),
'ignore_sticky_posts' => 0,
'meta_key' => '_thumbnail_id',
'post_type' =>$post_type,
'operator' => 'IN'
));
$query = new WP_Query($args);
wp_reset_postdata();
return $query;
}
if ( ! function_exists( 'onetone_paging_nav' ) ) :
/**
* Display navigation to next/previous set of posts when applicable.
*/
function onetone_paging_nav($echo='echo',$wp_query='') {
if(!$wp_query){global $wp_query;}
global $wp_rewrite;
$wp_query->query_vars['paged'] > 1 ? $current = $wp_query->query_vars['paged'] : $current = 1;
$pagination = array(
'base' => @add_query_arg('paged','%#%'),
'format' => '?page=%#%',
'total' => $wp_query->max_num_pages,
'current' => $current,
'show_all' => false,
'end_size' => 1,
'mid_size' => 2,
'prev_next' => true,
'prev_text' => __(' Prev', 'onetone'),
'next_text' => __('Next ', 'onetone'),
'type' => 'list',
'add_args' => false,
'add_fragment' => '',
'before_page_number' => '',
'after_page_number' => ''
);
if( $wp_rewrite->using_permalinks() )
$pagination['base'] = user_trailingslashit( trailingslashit( remove_query_arg('s',get_pagenum_link(1) ) ) . 'page/%#%/', 'paged');
if( !empty($wp_query->query_vars['s']) )
$pagination['add_args'] = array('s'=>get_query_var('s'));
if( $wp_query->max_num_pages > 1 ){
if($echo == "echo"){
echo '<nav class="post-list-pagination" role="navigation">
<div class="post-pagination-decoration text-center">
'.paginate_links($pagination).'</div></nav>';
}else
{
return '<nav class="post-list-pagination" role="navigation">
<div class="post-pagination-decoration text-center">'.paginate_links($pagination).'</div></nav>';
}
}
}
endif;
/**
* Display navigation to next/previous post when applicable.
*/
if ( ! function_exists( 'onetone_post_nav' ) ) :
function onetone_post_nav() {
// Don't print empty markup if there's nowhere to navigate.
$previous = ( is_attachment() ) ? get_post( get_post()->post_parent ) : get_adjacent_post( false, '', true );
$next = get_adjacent_post( false, '', false );
if ( ! $next && ! $previous ) {
return;
}
?>
<nav class="post-pagination" role="navigation">
<ul class="clearfix">
<?php
previous_post_link( '<li class="pull-left">%link</li>', '%title' );
next_post_link( '<li class="pull-right">%link</li>', '%title' );
?>
</ul>
</nav>
<!-- .navigation -->
<?php
}
endif;
// get post content css class
function onetone_get_content_class( $sidebar = '' ){
if( $sidebar == 'left' )
return 'left-aside';
if( $sidebar == 'right' )
return 'right-aside';
if( $sidebar == 'both' )
return 'both-aside';
if( $sidebar == 'none' )
return 'no-aside';
return 'no-aside';
}
// remove woocommerce page title
function onetone_woocommerce_show_page_title(){
return false;
}
add_filter('woocommerce_show_page_title','onetone_woocommerce_show_page_title');
// fix shortcode
function onetone_fix_shortcodes($content){
$replace_tags_from_to = array (
'<p>[' => '[',
']</p>' => ']',
']<br />' => ']',
']<br>' => ']',
']\r\n' => ']',
']\n' => ']',
']\r' => ']',
'\r\n[' => '[',
);
return strtr( $content, $replace_tags_from_to );
}
function onetone_the_content_filter($content) {
$content = onetone_fix_shortcodes($content);
return $content;
}
add_filter( 'the_content', 'onetone_the_content_filter' );
// cover excerpt length
function onetone_get_excerpt($count,$postid){
$permalink = get_permalink($postid);
$excerpt = get_the_content();
$excerpt = strip_tags($excerpt);
$excerpt = substr($excerpt, 0, $count);
$excerpt = substr($excerpt, 0, strripos($excerpt, " "));
$excerpt = $excerpt.'[...]';
return $excerpt;
}
// cover content length
function onetone_cover_content($count,$content){
$content = substr($content, 0, $count);
$content = substr($content, 0, strripos($content, " "));
$content = $content.'[...]';
return $content;
}
function onetone_tinymce_config( $init ) {
// IFRAME
$valid_iframe = 'iframe[id|class|title|style|align|frameborder|height|longdesc|marginheight|marginwidth|name|scrolling|src|width],span[class|id|title|style],a[href],ul[id|class|style],br,p';
// Add to extended_valid_elements if it alreay exists
if ( isset( $init['extended_valid_elements'] ) ) {
$init['extended_valid_elements'] .= ',' . $valid_iframe;
} else {
$init['extended_valid_elements'] = $valid_iframe;
}
// Pass $init back to WordPress
return $init;
}
//add_filter('tiny_mce_before_init', 'onetone_tinymce_config');
// ################################## fonts family
/**
* Returns an array of system fonts
*/
function onetone_options_typography_get_os_fonts() {
// OS Font Defaults
$os_faces = array(
'Arial, sans-serif' => 'Arial',
// '"Avant Garde", sans-serif' => 'Avant Garde',
'Cambria, Georgia, serif' => 'Cambria',
'Calibri,sans-serif' => 'Calibri' ,
'Copse, sans-serif' => 'Copse',
'Garamond, "Hoefler Text", Times New Roman, Times, serif' => 'Garamond',
'Georgia, serif' => 'Georgia',
'"Helvetica Neue", Helvetica, sans-serif' => 'Helvetica Neue',
'Tahoma, Geneva, sans-serif' => 'Tahoma'
);
return $os_faces;
}
/**
* Returns a select list of Google fonts
* Feel free to edit this, update the fallbacks, etc.
*/
function onetone_options_typography_get_google_fonts() {
// Google Font Defaults
global $google_fonts_json;
$googleFontArray = array();
$fontArray = json_decode($google_fonts_json, true);
foreach($fontArray['items'] as $index => $value){
$_family = strtolower( str_replace(' ','_',$value['family']) );
$googleFontArray[$_family]['family'] = $value['family'];
$googleFontArray[$_family]['variants'] = $value['variants'];
$googleFontArray[$_family]['subsets'] = $value['subsets'];
$category = '';
if( isset($value['category']) ) $category = ', '.$value['category'];
$googleFontArray['onetone_of_family'][$value['family'].$category] = $value['family'];
}
return $googleFontArray;
}
/*
* get typography
*
*/
function onetone_options_typography_enqueue_google_font($font) {
$googleFontArray = onetone_options_typography_get_google_fonts() ;
$googleFontFamilyArray = array() ;
foreach($googleFontArray['onetone_of_family'] as $k => $v){
$googleFontFamilyArray[] = $k;
}
if( in_array( $font , $googleFontFamilyArray ) ){
$font = explode(',', $font);
$font = $font[0];
$_font_add_string = '';
$_family = strtolower( str_replace(' ','_',$font) );
if(count( $googleFontArray[$_family]['variants'] )) $_font_add_string = ":". implode(',', $googleFontArray[$_family]['variants']);
$font = str_replace(" ", "+", $font);
wp_enqueue_style( "onetone-typography-$_family", esc_url("//fonts.googleapis.com/css?family=$font" . $_font_add_string), false, null, 'all' );
}
}
function onetone_get_typography( $option= array() ){
$return = "";
if( $option && is_array($option) ){
if($option['face']){
$return .= 'font-family:'.$option['face'].';' ;
onetone_options_typography_enqueue_google_font($option['face']);
}
if(isset($option['size']) && !is_array($option['size']) )
$return .= 'font-size:'.$option['size'].';' ;
if($option['style'])
$return .= 'font-weight:'.$option['style'].';' ;
if($option['color'])
$return .= 'color:'.$option['color'].';' ;
}
return $return ;
}
function onetone_of_recognized_font_styles() {
$default = array(
'normal' => 'normal',
'italic' => 'italic',
'bold' => 'bold',
'bold italic' => 'bold italic',
'100' => '100',
'200' => '200',
'300' => '300',
'400' => '400',
'500' => '500',
'600' => '600',
'700' => '700',
'800' => '800',
'900' => '900'
);
return apply_filters( 'onetone_of_recognized_font_styles', $default );
}
add_filter( 'of_recognized_font_styles', 'onetone_of_recognized_font_styles' );
//###################################
function onetone_admin_notice(){
if ( class_exists('Magee_Core') ) {
echo '<div class="updated"><p>Once you got onetone theme updated from free to pro, do make sure that you got the <strong>Magee Shortcodes</strong> (the free version) deactivated in "plugin/installed plugins". Since Onetone pro was built in with <strong>Magee Shortcodes</strong> pro, no need for the free version anymore.
</p></div>';
}
}
//add_action('admin_notices', 'onetone_admin_notice');
// footer tracking code
function onetone_tracking_code(){
$tracking_code = onetone_option('tracking_code');
echo $tracking_code;
}
add_action('wp_footer', 'onetone_tracking_code');
// Space before </head>
function onetone_space_before_head(){
$space_before_head = onetone_option('space_before_head');
echo $space_before_head;
}
add_action('wp_head', 'onetone_space_before_head');
// Space before </body>
function onetone_space_before_body(){
$space_before_body = onetone_option('space_before_body');
echo $space_before_body;
}
add_action('wp_footer', 'onetone_space_before_body');
add_action('init', 'onetone_html_tags_code', 10);
function onetone_html_tags_code() {
global $allowedposttags;
$allowedposttags["javascript"] = array("src" => array(),"type" => array());
$allowedposttags["style"] = array("type" => array());
$allowedposttags["link"] = array("rel" => array(),"href" => array(),"id" => array(),"type" => array(),"media" => array());
}
// get summary
function onetone_get_summary(){
$excerpt_or_content = onetone_option('excerpt_or_content','excerpt');
$excerpt_length = onetone_option('excerpt_length','55');
if( $excerpt_or_content == 'full_content' ){
$output = get_the_content();
}
else{
$output = get_the_excerpt();
if( is_numeric($excerpt_length) && $excerpt_length !=0 )
$output = onetone_content_length($output, $excerpt_length );
}
return $output;
}
function onetone_content_length($content, $limit) {
$excerpt = explode(' ', get_the_excerpt(), $limit);
if (count($excerpt)>=$limit) {
array_pop($excerpt);
$excerpt = implode(" ",$excerpt).'...';
} else {
$excerpt = implode(" ",$excerpt);
}
$excerpt = preg_replace('`\[[^\]]*\]`','',$excerpt);
return $excerpt;
}
if ( ! function_exists( 'onetone_posted_on' ) ) :
/**
* Prints HTML with meta information for the current post-date/time and author.
*/
function onetone_posted_on() {
$return = '';
$display_post_meta = onetone_option('display_post_meta','yes');
if( $display_post_meta == 'yes' ){
$display_meta_author = onetone_option('display_meta_author','yes');
$display_meta_date = onetone_option('display_meta_date','yes');
$display_meta_categories = onetone_option('display_meta_categories','yes');
$display_meta_comments = onetone_option('display_meta_comments','yes');
$display_meta_readmore = onetone_option('display_meta_readmore','yes');
$display_meta_tags = onetone_option('display_meta_tags','yes');
$date_format = onetone_option('date_format','M d, Y');
$return .= '<ul class="entry-meta">';
if( $display_meta_date == 'yes' )
$return .= '<li class="entry-date"><i class="fa fa-calendar"></i>'. get_the_date( $date_format ).'</li>';
if( $display_meta_author == 'yes' )
$return .= '<li class="entry-author"><i class="fa fa-user"></i>'.get_the_author_link().'</li>';
if( $display_meta_categories == 'yes' )
$return .= '<li class="entry-catagory"><i class="fa fa-file-o"></i>'.get_the_category_list(', ').'</li>';
if( $display_meta_comments == 'yes' )
$return .= '<li class="entry-comments pull-right">'.onetone_get_comments_popup_link('', __( '<i class="fa fa-comment"></i> 1 ', 'onetone'), __( '<i class="fa fa-comment"></i> % ', 'onetone'), 'read-comments', '').'</li>';
$return .= '</ul>';
}
echo $return;
}
endif;
/**
* Modifies WordPress's built-in comments_popup_link() function to return a string instead of echo comment results
*/
function onetone_get_comments_popup_link( $zero = false, $one = false, $more = false, $css_class = '', $none = false ) {
global $wpcommentspopupfile, $wpcommentsjavascript;
$id = get_the_ID();
if ( false === $zero ) $zero = __( 'No Comments', 'onetone');
if ( false === $one ) $one = __( '1 Comment', 'onetone');
if ( false === $more ) $more = __( '% Comments', 'onetone');
if ( false === $none ) $none = __( 'Comments Off', 'onetone');
$number = get_comments_number( $id );
$str = '';
if ( 0 == $number && !comments_open() && !pings_open() ) {
$str = '<span' . ((!empty($css_class)) ? ' class="' . esc_attr( $css_class ) . '"' : '') . '>' . $none . '</span>';
return $str;
}
if ( post_password_required() ) {
return '';
}
$str = '<a href="';
if ( $wpcommentsjavascript ) {
if ( empty( $wpcommentspopupfile ) )
$home = home_url();
else
$home = get_option('siteurl');
$str .= $home . '/' . $wpcommentspopupfile . '?comments_popup=' . $id;
$str .= '" onclick="wpopen(this.href); return false"';
} else { // if comments_popup_script() is not in the template, display simple comment link
if ( 0 == $number )
$str .= get_permalink() . '#respond';
else
$str .= get_comments_link();
$str .= '"';
}
if ( !empty( $css_class ) ) {
$str .= ' class="'.$css_class.'" ';
}
$title = the_title_attribute( array('echo' => 0 ) );
$str .= apply_filters( 'comments_popup_link_attributes', '' );
$str .= ' title="' . esc_attr( sprintf( __('Comment on %s', 'onetone'), $title ) ) . '">';
$str .= onetone_get_comments_number_str( $zero, $one, $more );
$str .= '</a>';
return $str;
}
/**
* Modifies WordPress's built-in comments_number() function to return string instead of echo
*/
function onetone_get_comments_number_str( $zero = false, $one = false, $more = false, $deprecated = '' ) {
if ( !empty( $deprecated ) )
_deprecated_argument( __FUNCTION__, '1.3' );
$number = get_comments_number();
if ( $number > 1 )
$output = str_replace('%', number_format_i18n($number), ( false === $more ) ? __('% Comments', 'onetone') : $more);
elseif ( $number == 0 )
$output = ( false === $zero ) ? __('No Comments', 'onetone') : $zero;
else // must be one
$output = ( false === $one ) ? __('1 Comment', 'onetone') : $one;
return apply_filters('comments_number', $output, $number);
}
function onetone_array_sort($array,$keys,$type='asc'){
if(!isset($array) || !is_array($array) || empty($array)){
return '';
}
if(!isset($keys) || trim($keys)==''){
return '';
}
if(!isset($type) || $type=='' || !in_array(strtolower($type),array('asc','desc'))){
return '';
}
$keysvalue=array();
foreach($array as $key=>$val){
$val[$keys] = str_replace('-','',$val[$keys]);
$val[$keys] = str_replace(' ','',$val[$keys]);
$val[$keys] = str_replace(':','',$val[$keys]);
$keysvalue[] =$val[$keys];
}
asort($keysvalue);
reset($keysvalue);
foreach($keysvalue as $key=>$vals) {
$keysort[] = $key;
}
$keysvalue = array();
$count=count($keysort);
if(strtolower($type) != 'asc'){
for($i=$count-1; $i>=0; $i--) {
$keysvalue[] = $array[$keysort[$i]];
}
}else{
for($i=0; $i<$count; $i++){
if(isset($array[$keysort[$i]]))
$keysvalue[] = $array[$keysort[$i]];
}
}
return $keysvalue;
}
/**
* Change the Shop archive page title.
* @param string $title
* @return string
*/
function onetone_custom_shop_archive_title( $title ) {
if ( function_exists('is_shop') && function_exists('woocommerce_page_title') && is_shop() ) {
return str_replace( __( 'Products', 'onetone' ), woocommerce_page_title(false), $title );
}
return $title;
}
add_filter( 'wp_title', 'onetone_custom_shop_archive_title' );
function onetone_is_plugin_active( $plugin ) {
return in_array( $plugin, (array) get_option( 'active_plugins', array() ) );
}
// Onetone guide tips
/* $option_name = optionsframework_option_name();
$options_saved = false;
if ( get_option($option_name) ) {
$options_saved = true;
}
if( (!isset($_GET['page']) || ($_GET['page'] !='onetone-options' && $_GET['page'] !='import-demos' && $_GET['page'] !='onetone' ) ) && $options_saved == false )
add_action('admin_menu', 'onetone_guide_submenu_page');
function onetone_guide_submenu_page() {
// add_theme_page(__('Import Onetone Demos', 'onetone' ),__('Import Onetone Demos', 'onetone' ), 'edit_theme_options', 'import-demos', 'onetone_import_demos');
add_theme_page( __('Onetone step 2', 'onetone' ), '<div class="onetone-step-2-text"><h2>'.__('Customize Content for Homepage', 'onetone' ).'</h2>
<p>'.__('Open this page to edit content for homepage and customize styles of the site.', 'onetone' ).'</p><div id="onetone-step-1-text" style=" display:none;"><div class="onetone-step-1"><div class="onetone-step-1-text"><h2>'.__('Customize Content for Homepage', 'onetone' ).'</h2><p>'.__('Open this page to edit content for homepage and customize styles of the site.', 'onetone' ).'</p></div></div></div></div>', 'edit_theme_options', 'themes.php?page=onetone-options', '' );
}
*/
if( isset($_GET['page']) && $_GET['page'] =='onetone-options' )
add_action('admin_footer', 'onetone_admin_footer_function');
function onetone_admin_footer_function() {
echo '<div class="onetone-admin-footer" style="height: 60px; display:none;background: rgba(0,0,0,0.5); position:fixed;bottom:0px;padding:15px 0;z-index:99;cursor: pointer;width: 100%;"><span style=" margin-left:180px; font-size:16px;color:#fff;">'.__('To save any changes on onetone options, click on the <strong style="color:orange;">Save All Options</strong> button on the right side.', 'onetone' ).'</span><button class="button-primary" id="onetone-save-options" style=" float:right;margin-right:50px;">'.__('Save All Options', 'onetone' ).'</button></div>
<div class="options-saved"><i class="fa fa-check"></i>'.__('Options Updated', 'onetone' ).'</div>
<div class="options-saving"><i class="fa fa-spinner fa-spin"></i>'.__('Options Saving', 'onetone' ).'</div>
';
}
function onetone_tinymce_init() {
// Hook to tinymce plugins filter
add_filter( 'mce_external_plugins', 'onetone_tinymce_plugin' );
}
add_filter('init', 'onetone_tinymce_init');
function onetone_tinymce_plugin($init) {
// We create a new plugin... linked to a js file.
// Mine was created from a plugin... but you can change this to link to a file in your plugin
$init['keyup_event'] = get_template_directory_uri() . '/js/keyup_event.js';
return $init;
}
add_filter( 'wp_kses_allowed_html', 'onetone_allowedposttags_filter',1,1 );
function onetone_allowedposttags_filter( $allowedposttags ) {
$allowedposttags['i'] = array ( 'class' => 1,'style' => 1);
$allowedposttags['input'] = array ( 'class' => 1, 'id'=> 1, 'style' => 1, 'type' => 1, 'name'=>1,'value' => 1 ,'placeholder'=> 1,'size'=> 1,'tabindex'=> 1,'aria-required'=> 1);
$allowedposttags['iframe'] = array(
'align' => true,
'width' => true,
'height' => true,
'frameborder' => true,
'name' => true,
'src' => true,
'id' => true,
'class' => true,
'style' => true,
'scrolling' => true,
'marginwidth' => true,
'marginheight' => true,
);
return $allowedposttags;
}
<file_sep>/js/keyup_event.js
jQuery(document).ready(function($) {
// Create 'keyup_event' tinymce plugin
tinymce.PluginManager.add('keyup_event', function(editor, url) {
// Create keyup event
editor.on('keyup', function(e) {
// Get the editor content (html)
get_ed_content = tinymce.activeEditor.getContent();
id = $(this).attr('id');
// Do stuff here... (run do_stuff_here() function)
do_stuff_here(id,get_ed_content);
});
});
// This function allows the script to run from both locations (visual and text)
function do_stuff_here(id,content) {
// Now, you can further process the data in a single function
$('#'+id).val(content);
}
});<file_sep>/pageprofileempresa.php
<?php /* Template Name: Empresa Template Profile */
get_header('pagedefault');
?>
<link href='http://fonts.googleapis.com/css?family=Droid+Serif|Open+Sans:400,700' rel='stylesheet' type='text/css'>
<?php
echo '<link rel="stylesheet" href="'.get_template_directory_uri().'/css/vertical-timeline-style.css">';
echo '<script src="'.get_template_directory_uri().'/js/vertical-timeline-modernizr.js"></script>';
?>
<script type="text/javascript">
jQuery(document).ready(function () {
jQuery('#anchor_empresa').addClass('active');
jQuery('.expanded-view').hide();
jQuery('.interstitial').click(function (){
if (jQuery(this).hasClass('col-lg-6')) {
jQuery('.interstitial').addClass('col-lg-3');
jQuery('.interstitial').removeClass('col-lg-2');
jQuery('.interstitial').removeClass('col-lg-6');
jQuery('.expanded-view').hide();
jQuery(this).children('.chopped-view').show();
}else{
jQuery('.expanded-view').find('.text-overlay').hide();
jQuery('.interstitial').addClass('col-lg-2');
jQuery('.interstitial').removeClass('col-lg-3');
jQuery('.interstitial').removeClass('col-lg-6');
jQuery(this).addClass('col-lg-6');
jQuery(this).removeClass('col-lg-2');
jQuery('.chopped-view').show();
jQuery('.expanded-view').hide();
jQuery(this).children('.chopped-view').hide();
jQuery(this).children('.expanded-view').show('100',function (){
jQuery(this).find('.text-overlay').fadeIn( "slow");
});
}
});
});
</script>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<div class="entry-content">
<?php $url = wp_get_attachment_url(get_post_thumbnail_id(get_the_ID ()));
$bkg = ($url) ? 'style="background-image:url('.$url.')"' : '';
?>
<div class="header-page header-empresa" <?=$bkg?>>
<div><h1><?php echo the_title();?></h1></div>
</div>
<?php $post = get_post();
echo $post->post_content;
?>
</div>
<div class="container">
<div class="row">
<div class="col-md-12">
<section class="cd-horizontal-timeline">
<div class="timeline">
<div class="events-wrapper">
<div class="events">
<ol>
<?php //obtencion del history
query_posts(array(
'post_type' => 'histories',
'order'=>'ASC',
'orderby'=> 'title',
'posts_per_page' => -1
));
$i=0;
$content='';
while (have_posts()) {
the_post();
$title = get_the_title();
$i++;
$clase = ($i == 1) ? 'class="selected"' : '';
echo '<li><a href="#0" data-date="'.$title.'" '.$clase.'>'.substr($title, -4).'</a></li>';
$content.= '<li '.$clase.' data-date="'.$title.'">'.get_the_content().'</li>';
}
?>
</ol>
<span class="filling-line" aria-hidden="true"></span>
</div> <!-- .events -->
</div> <!-- .events-wrapper -->
<ul class="cd-timeline-navigation">
<li><a href="#0" class="prev inactive">Prev</a></li>
<li><a href="#0" class="next">Next</a></li>
</ul> <!-- .cd-timeline-navigation -->
</div> <!-- .timeline -->
<div class="events-content">
<ol>
<?=$content;?>
</ol>
</div> <!-- .events-content -->
</section>
<?php //obtencion del history
query_posts(array(
'post_type' => 'histories',
'order'=>'ASC',
'orderby'=> 'title',
'posts_per_page' => -1
));
$i=0;
$content_2='';
while (have_posts()) {
the_post();
$title = get_the_title();
$i++;
$content_2.= ' <div class="cd-timeline-vertical-block">
<div class="cd-timeline-vertical-content">
<h3>'.substr($title, -4).'</h3>
<p>'.get_the_content().'</p>
</div> <!-- cd-timeline-content -->
</div> <!-- cd-timeline-vertical-block -->';
}
?>
<section id="cd-timeline-responsive" class="cd-container">
<?=$content_2;?>
</section> <!-- cd-timeline -->
</div>
</div>
</div>
</article>
<?php
echo '<script type="text/javascript" src="'.get_template_directory_uri().'/js/timeline.js"></script>';
echo '<script src="'.get_template_directory_uri().'/js/vertical-timeline-main.js"></script>';
get_footer();
?><file_sep>/pageprofilejobs.php
<?php /* Template Name: Profile: Jobs Template */
get_header();
global $page_meta;
$detect = new Mobile_Detect;
$enable_page_title_bar = onetone_option('enable_page_title_bar');
$page_title_bg_parallax = esc_attr(onetone_option('page_title_bg_parallax','no'));
$page_title_bg_parallax = $page_title_bg_parallax=="yes"?"parallax-scrolling":"";
$display_breadcrumb = esc_attr(onetone_option('display_breadcrumb','yes'));
$breadcrumbs_on_mobile = esc_attr(onetone_option('breadcrumbs_on_mobile_devices','yes'));
$breadcrumb_menu_prefix = esc_attr(onetone_option('breadcrumb_menu_prefix',''));
$breadcrumb_menu_separator = esc_attr(onetone_option('breadcrumb_menu_separator','/'));
$sidebar = isset($page_meta['page_layout'])?$page_meta['page_layout']:'none';
$left_sidebar = isset($page_meta['left_sidebar'])?$page_meta['left_sidebar']:'';
$right_sidebar = isset($page_meta['right_sidebar'])?$page_meta['right_sidebar']:'';
$full_width = isset($page_meta['full_width'])?$page_meta['full_width']:'no';
$display_breadcrumb = isset($page_meta['display_breadcrumb'])?$page_meta['display_breadcrumb']:$display_breadcrumb;
$display_title = isset($page_meta['display_title'])?$page_meta['display_title']:'yes';
$padding_top = isset($page_meta['padding_top'])?$page_meta['padding_top']:'';
$padding_bottom = isset($page_meta['padding_bottom'])?$page_meta['padding_bottom']:'';
$enable_page_title_bar = (isset($page_meta['display_title_bar']) && $page_meta['display_title_bar']!='')?$page_meta['display_title_bar']:$enable_page_title_bar;
if( $full_width == 'no' )
$container = 'container';
else
$container = 'container-fullwidth';
$aside = 'no-aside';
if( $sidebar =='left' )
$aside = 'left-aside';
if( $sidebar =='right' )
$aside = 'right-aside';
if( $sidebar =='both' )
$aside = 'both-aside';
$container_css = '';
if( $padding_top )
$container_css .= 'padding-top:'.$padding_top.';';
if( $padding_bottom )
$container_css .= 'padding-bottom:'.$padding_bottom.';';
?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?> role="article">
<?php if ( has_post_thumbnail() ): ?>
<div class="feature-img-box">
<div class="img-box">
<?php the_post_thumbnail();?>
</div>
</div>
<?php endif;?>
<?php if( $enable_page_title_bar == 'yes' ):?>
<section class="page-title-bar title-left no-subtitle" style="">
<div class="container">
<div class="page-title">
<h1 class="text-center"><?php the_title();?></h1>
</div>
<?php if( $display_breadcrumb == 'yes' && !$detect->isMobile() ):?>
<?php onetone_get_breadcrumb(array("before"=>"<div class=''>".$breadcrumb_menu_prefix,"after"=>"</div>","show_browse"=>false,"separator"=>$breadcrumb_menu_separator,'container'=>'div'));?>
<?php endif;?>
<?php if( $breadcrumbs_on_mobile == 'yes' && $detect->isMobile()):?>
<?php onetone_get_breadcrumb(array("before"=>"<div class=''>".$breadcrumb_menu_prefix,"after"=>"</div>","show_browse"=>false,"separator"=>$breadcrumb_menu_separator,'container'=>'div'));?>
<?php endif;?>
<div class="clearfix"></div>
</div>
</section>
<?php endif;?>
<div class="post-wrap">
<div class="<?php echo $container;?>">
<div class="post-inner row <?php echo $aside; ?>" style=" <?php echo $container_css;?>">
<div class="col-main">
<section class="post-main" role="main" id="content">
<?php while ( have_posts() ) : the_post(); ?>
<article class="post type-post" id="" role="article">
<div class="entry-main">
<div class="entry-content">
<?php the_content();?>
<?php
wp_link_pages( array( 'before' => '<div class="page-links"><span class="page-links-title">' . __( 'Pages:', 'onetone' ) . '</span>', 'after' => '</div>', 'link_before' => '<span>', 'link_after' => '</span>' ) );
?>
<section class="job-container pvm">
<div class="container">
<h1 class="content-title"><?php _e("[:es]Ofertas de trabajo[:en]Job offers"); ?></h1>
<div class="row mbs">
<h2 class="title-tagline col-sm-6 col-xs-12"><?php _e("[:es]Éstas son nuestras vacantes actuales[:en]These are our current vacancies."); ?></h2>
<div id="location-job-filter" class="input-group pull-right">
<div class="input-group-addon"><i class="fa fa-map-marker"></i></div>
<select class="form-control">
<option value="">Filtra por localización</option>
<option value="barcelona">Barcelona</option>
<option value="madrid">Madrid</option>
<option value="sevilla">Sevilla</option>
</select>
</div>
</div>
<?php
function CallAPI($url) {
$curl = curl_init();
$client_id = 'ead3a0c7c59f4d1d81143fa9d2277147';
$client_secret = '<KEY>';
curl_setopt($curl, CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
curl_setopt($curl, CURLOPT_USERPWD, $client_id . ':' . $client_secret);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
$result = curl_exec($curl);
curl_close($curl);
return $result;
}
$array = json_decode(CallAPI('https://api.infojobs.net/api/1/offer?employerId=9664852452652250253012575553'))->offers;
//print_r($array);
//print_r($array[0]);
$urls = array();
for($i=0; $i<count($array); $i++){
array_push($urls, 'https://api.infojobs.net/api/1/offer/' . $array[$i]->id);
}
$url_count = count($urls);
$curl_array = array();
$ch = curl_multi_init();
foreach($urls as $count => $url) {
$curl_array[$count] = curl_init($url);
$client_id = 'ead3a0c7c59f4d1d81143fa9d2277147';
$client_secret = '<KEY>';
curl_setopt($curl_array[$count], CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
curl_setopt($curl_array[$count], CURLOPT_USERPWD, $client_id . ':' . $client_secret);
curl_setopt($curl_array[$count], CURLOPT_RETURNTRANSFER, 1);
curl_multi_add_handle($ch, $curl_array[$count]);
}
do {
curl_multi_exec($ch, $exec);
} while($exec > 0);
/* Dynamic select with job´s provinces
echo '<select class="country">
<option value="">Filtra por porvincia</option>';
foreach($urls as $count => $url) {
$returned = json_decode(curl_multi_getcontent($curl_array[$count]));
$provinceArray = array();
$provinceValue = $returned->province->value;
if (isset($provinceArray[$provinceValue])) {echo $provinceValue;
} else {
echo '<option value="'.strtolower($returned->province->value).'">'.$returned->province->value.'</option>';
};
}
echo '</select>';
*/
echo '<div class="offers-list grid-divider">';
foreach($urls as $count => $url) {
if( $count % 2 == 0) {
$jobPos = 'item-left';
} else {
$jobPos = 'item-right';
}
$returned = json_decode(curl_multi_getcontent($curl_array[$count]));
echo '<a href="' . $returned->link . '" target="_blank" id="job-offer-'.$count.'" class="job-item col-sm-6 col-xs-12 ' . strtolower($returned->province->value) .' '.$jobPos.'">
<div class="col-padding">
<div class="job-location"><i class="fa fa-map-marker"></i>' . $returned->city . ' (' . $returned->province->value . ')</div>
<h3>' . $returned->title . '</h3>
<span class="job-extra"> Jornada ' . $returned->journey->value;
if ($returned->contractType->value == "(Seleccionar)") { //SI no ha sido definidio tipo en infojobs
echo '</span>';
}
else{
echo ' / Contrato ' . $returned->contractType->value . '</span>';
}
echo '<p class="job-description">' . preg_replace('/[\x00-\x08\x10\x0B\x0C\x0E-\x19\x7F]'.
'|[\x00-\x7F][\x80-\xBF]+'.
'|([\xC0\xC1]|[\xF0-\xFF])[\x80-\xBF]*'.
'|[\xC2-\xDF]((?![\x80-\xBF])|[\x80-\xBF]{2,})'.
'|[\xE0-\xEF](([\x80-\xBF](?![\x80-\xBF]))|(?![\x80-\xBF]{2})|[\x80-\xBF]{3,})/S',
'?', substr($returned->description, 0, 155)) . '...</p>'.
'<span class="btn btn-ghost">';
_e('[:es]Más información[:en]More info');
echo '</span>';
echo '</div>';
echo '</a>';
if( ($count+1) % 2 == 0) {
echo '<p class="clear"></p>';
}
}
echo '</div>';
?>
</div>
</section>
<section id="job-form" class="bg-light-grey pvm brd2">
<div class="container">
<?php echo do_shortcode('[contact-form-7 id="203" title="Job form general"]') ?>
</div>
</section>
</div>
</div>
</div>
</article>
<div class="post-attributes">
<!--Comments Area-->
<div class="comments-area text-left">
<?php
// If comments are open or we have at least one comment, load up the comment template
if ( comments_open() ) :
comments_template();
endif;
?>
</div>
<!--Comments End-->
</div>
<?php endwhile; // end of the loop. ?>
</section>
</div>
</div>
</div>
</div>
</article>
<?php get_footer(); ?><file_sep>/lib/importer/importer.php
<?php
defined( 'ABSPATH' ) or die( 'You cannot access this script directly' );
// Don't resize images
function onetone_filter_image_sizes( $sizes ) {
return array();
}
// Hook importer into admin init
add_action( 'wp_ajax_magee_import_demo_data', 'magee_importer' );
function magee_importer() {
global $wpdb;
if ( current_user_can( 'manage_options' ) ) {
if ( !defined('WP_LOAD_IMPORTERS') ) define('WP_LOAD_IMPORTERS', true); // we are loading importers
if ( ! class_exists( 'WP_Importer' ) ) { // if main importer class doesn't exist
$wp_importer = ABSPATH . 'wp-admin/includes/class-wp-importer.php';
include $wp_importer;
}
if ( ! class_exists('WP_Import') ) { // if WP importer doesn't exist
$wp_import = get_template_directory() . '/lib/importer/wordpress-importer.php';
include $wp_import;
}
if ( class_exists( 'WP_Importer' ) && class_exists( 'WP_Import' ) ) { // check for main import class and wp import class
if( ! isset($_POST['demo_type']) || trim($_POST['demo_type']) == '' ) {
$demo_type = 'classic';
} else {
$demo_type = $_POST['demo_type'];
}
switch($demo_type) {
case 'resume':
$shop_demo = false;
$woo_xml = get_template_directory() . '/lib/importer/demos/resume/onetone-pro-resume.xml';
$theme_xml_file = get_template_directory() . '/lib/importer/demos/resume/onetone-pro-resume.xml';
$theme_options_file = get_template_directory() . '/lib/importer/demos/resume/theme_options_resume.txt';
// Sidebar Widgets File
$widgets_file = get_template_directory() . '/lib/importer/demos/resume/widget_data_resume.json';
$layerslider_exists = false;
$layer_directory = get_template_directory() . '/lib/importer/demos/resume/layersliders/';
$revslider_exists = false;
$rev_directory = get_template_directory() . '/lib/importer/demos/resume/revsliders/';
// reading settings
$homepage_title = 'Homepage';
$posts_page_title='Blog';
break;
case 'app':
$shop_demo = false;
$woo_xml = get_template_directory() . '/lib/importer/demos/app/onetone-pro-app.xml';
$theme_xml_file = get_template_directory() . '/lib/importer/demos/app/onetone-pro-app.xml';
$theme_options_file = get_template_directory() . '/lib/importer/demos/app/theme_options_app.txt';
// Sidebar Widgets File
$widgets_file = get_template_directory() . '/lib/importer/demos/app/widget_data_app.json';
$layerslider_exists = false;
$layer_directory = get_template_directory() . '/lib/importer/demos/app/layersliders/';
$revslider_exists = false;
$rev_directory = get_template_directory() . '/lib/importer/demos/app/revsliders/';
// reading settings
$homepage_title = 'Homepage';
$posts_page_title='Blog';
break;
case 'fashion':
$shop_demo = false;
$woo_xml = get_template_directory() . '/lib/importer/demos/fashion/onetone-pro-fashion.xml';
$theme_xml_file = get_template_directory() . '/lib/importer/demos/fashion/onetone-pro-fashion.xml';
$theme_options_file = get_template_directory() . '/lib/importer/demos/fashion/theme_options_fashion.txt';
// Sidebar Widgets File
$widgets_file = get_template_directory() . '/lib/importer/demos/fashion/widget_data_fashion.json';
$layerslider_exists = false;
$layer_directory = get_template_directory() . '/lib/importer/demos/fashion/layersliders/';
$revslider_exists = false;
$rev_directory = get_template_directory() . '/lib/importer/demos/fashion/revsliders/';
// reading settings
$homepage_title = 'Homepage';
$posts_page_title='Blog';
break;
case 'cafe':
$shop_demo = false;
$woo_xml = get_template_directory() . '/lib/importer/demos/cafe/onetone-pro-cafe.xml';
$theme_xml_file = get_template_directory() . '/lib/importer/demos/cafe/onetone-pro-cafe.xml';
$theme_options_file = get_template_directory() . '/lib/importer/demos/cafe/theme_options_cafe.txt';
// Sidebar Widgets File
$widgets_file = get_template_directory() . '/lib/importer/demos/cafe/widget_data_cafe.json';
$layerslider_exists = false;
$layer_directory = get_template_directory() . '/lib/importer/demos/cafe/layersliders/';
$revslider_exists = false;
$rev_directory = get_template_directory() . '/lib/importer/demos/cafe/revsliders/';
// reading settings
$homepage_title = 'Homepage';
$posts_page_title='Blog';
break;
case 'classic':
default:
$shop_demo = true;
$woo_xml = get_template_directory() . '/lib/importer/demos/classic/onetone-pro-classic.xml';
$theme_xml_file = get_template_directory() . '/lib/importer/demos/classic/onetone-pro-classic.xml';
$theme_options_file = get_template_directory() . '/lib/importer/demos/classic/theme_options_classic.txt';
// Sidebar Widgets File
$widgets_file = get_template_directory() . '/lib/importer/demos/classic/widget_data_classic.json';
$layerslider_exists = true;
$layer_directory = get_template_directory() . '/lib/importer/demos/classic/layersliders/';
$revslider_exists = true;
$rev_directory = get_template_directory() . '/lib/importer/demos/classic/revsliders/';
// reading settings
$homepage_title = 'Homepage';
$posts_page_title='Blog';
break;
}
// Import Theme Options
$theme_options_txt = $theme_options_file; // theme options data file
//if( file_exists( $theme_options_txt ) ){
$theme_options_txt = file_get_contents( $theme_options_txt );
$options_data = unserialize( base64_decode( $theme_options_txt ) );
$table_name = $wpdb->prefix . "options";
$onetone_options = $wpdb->get_results( "SELECT * FROM $table_name WHERE option_name = 'onetone_pro'" );
if(!empty($onetone_options)):
$wpdb->update(
$table_name,
array(
'option_value' => base64_decode( $theme_options_txt ) ,
),
array( 'option_name' => 'onetone_pro' ),
array(
'%s',
),
array( '%s' )
);
else:
$wpdb->insert(
$table_name,
array(
'option_name' => 'onetone_pro',
'option_value' => base64_decode( $theme_options_txt ),
'autoload' => 'no'
),
array(
'%s',
'%s',
'%s',
)
);
endif;
add_filter('intermediate_image_sizes_advanced', 'onetone_filter_image_sizes');
/* Import Woocommerce if WooCommerce Exists */
if( class_exists('WooCommerce') && $shop_demo == true ) {
$importer = new WP_Import();
$theme_xml = $woo_xml;
$importer->fetch_attachments = true;
ob_start();
$importer->import($theme_xml);
ob_end_clean();
// Set pages
$woopages = array(
'woocommerce_shop_page_id' => 'Shop',
'woocommerce_cart_page_id' => 'Cart',
'woocommerce_checkout_page_id' => 'Checkout',
'woocommerce_pay_page_id' => 'Checkout → Pay',
'woocommerce_thanks_page_id' => 'Order Received',
'woocommerce_myaccount_page_id' => 'My Account',
'woocommerce_edit_address_page_id' => 'Edit My Address',
'woocommerce_view_order_page_id' => 'View Order',
'woocommerce_change_password_page_id' => 'Change Password',
'woocommerce_logout_page_id' => 'Logout',
'woocommerce_lost_password_page_id' => 'Lost Password'
);
foreach($woopages as $woo_page_name => $woo_page_title) {
$woopage = get_page_by_title( $woo_page_title );
if(isset( $woopage ) && $woopage->ID) {
update_option($woo_page_name, $woopage->ID); // Front Page
}
}
// We no longer need to install pages
delete_option( '_wc_needs_pages' );
delete_transient( '_wc_activation_redirect' );
// Flush rules after install
flush_rewrite_rules();
} else {
$importer = new WP_Import();
/* Import Posts, Pages, Portfolio Content, FAQ, Images, Menus */
$theme_xml = $theme_xml_file;
$importer->fetch_attachments = true;
ob_start();
$importer->import($theme_xml);
ob_end_clean();
flush_rewrite_rules();
}
// Set imported menus to registered theme locations
$locations = get_theme_mod( 'nav_menu_locations' ); // registered menu locations in theme
$menus = wp_get_nav_menus(); // registered menus
if($menus) {
if( $demo_type == 'classic' ) {
$opmenu = get_page_by_title( 'Homepage - One page' );
}
foreach($menus as $menu) { // assign menus to theme locations
if( $demo_type == 'classic' ) {
if( $menu->name == 'All Pages' ) {
$locations['primary'] = $menu->term_id;
} else if( $menu->name == 'home' ) {
$locations['home_menu'] = $menu->term_id;
}
// Assign One Page Menu
/* if(isset( $opmenu ) && $opmenu->ID && $menu->name == 'home_menu') {
update_post_meta($opmenu->ID, 'nav_menu', $menu->term_id);
}*/
}
if( $demo_type == 'resume' ){
if( $menu->name == 'All Pages' ) {
$locations['primary'] = $menu->term_id;
}
}
}
}
}
set_theme_mod( 'nav_menu_locations', $locations ); // set menus to locations
// Add data to widgets
if( isset( $widgets_file ) && $widgets_file ) {
$widgets_json = $widgets_file; // widgets data file
$widgets_json = file_get_contents( $widgets_json );
$widget_data = $widgets_json;
$import_widgets = magee_import_widget_data( $widget_data );
}
// Import Layerslider
if( function_exists( 'layerslider_import_sample_slider' ) && $layerslider_exists == true ) { // if layerslider is activated
// Get importUtil
include WP_PLUGIN_DIR . '/LayerSlider/classes/class.ls.importutil.php';
$layer_files = magee_get_import_files( $layer_directory, 'zip' );
foreach( $layer_files as $layer_file ) { // finally import layer slider
$import = new LS_ImportUtil($layer_file);
}
// Get all sliders
// Table name
$table_name = $wpdb->prefix . "layerslider";
// Get sliders
$sliders = $wpdb->get_results( "SELECT * FROM $table_name
WHERE flag_hidden = '0' AND flag_deleted = '0'
ORDER BY date_c ASC" );
if(!empty($sliders)):
foreach($sliders as $key => $item):
$slides[$item->id] = $item->name;
endforeach;
endif;
if($slides){
foreach($slides as $key => $val){
$slides_array[$val] = $key;
}
}
// Assign LayerSlider
/*if( $demo_type == 'classic' ) {
$lspage = get_page_by_title( 'Layer Slider' );
if(isset( $lspage ) && $lspage->ID && $slides_array['Avada Full Width']) {
update_post_meta($lspage->ID, 'pyre_slider', $slides_array['Avada Full Width']);
}
}*/
}
// Import Revslider
if( class_exists('UniteFunctionsRev') && $revslider_exists == true ) { // if revslider is activated
$rev_files = magee_get_import_files( $rev_directory, 'zip' );
$slider = new RevSlider();
foreach( $rev_files as $rev_file ) { // finally import rev slider data files
$filepath = $rev_file;
$_FILES["import_file"]["tmp_name"] = $filepath;
ob_start();
$slider->importSliderFromPost(true, false, $filepath);
ob_clean();
ob_end_clean();
}
}
// Set reading options
//$homepage = get_page_by_title( $homepage_title );
$posts_page = get_page_by_title( $posts_page_title );
if(isset( $posts_page ) && $posts_page->ID ) {
//update_option('show_on_front', 'page');
//update_option('page_on_front', $homepage->ID); // Front Page
update_option('page_for_posts', $posts_page->ID); // Blog Page
}
echo 'imported';
exit;
}
}
// Parsing Widgets Function
// Thanks to http://wordpress.org/plugins/widget-settings-importexport/
function magee_import_widget_data( $widget_data ) {
$json_data = $widget_data;
$json_data = json_decode( $json_data, true );
$sidebar_data = $json_data[0];
$widget_data = $json_data[1];
foreach ( $widget_data as $widget_data_title => $widget_data_value ) {
$widgets[ $widget_data_title ] = '';
foreach( $widget_data_value as $widget_data_key => $widget_data_array ) {
if( is_int( $widget_data_key ) ) {
$widgets[$widget_data_title][$widget_data_key] = 'on';
}
}
}
unset($widgets[""]);
foreach ( $sidebar_data as $title => $sidebar ) {
$count = count( $sidebar );
for ( $i = 0; $i < $count; $i++ ) {
$widget = array( );
$widget['type'] = trim( substr( $sidebar[$i], 0, strrpos( $sidebar[$i], '-' ) ) );
$widget['type-index'] = trim( substr( $sidebar[$i], strrpos( $sidebar[$i], '-' ) + 1 ) );
if ( !isset( $widgets[$widget['type']][$widget['type-index']] ) ) {
unset( $sidebar_data[$title][$i] );
}
}
$sidebar_data[$title] = array_values( $sidebar_data[$title] );
}
foreach ( $widgets as $widget_title => $widget_value ) {
foreach ( $widget_value as $widget_key => $widget_value ) {
$widgets[$widget_title][$widget_key] = $widget_data[$widget_title][$widget_key];
}
}
$sidebar_data = array( array_filter( $sidebar_data ), $widgets );
magee_parse_import_data( $sidebar_data );
}
function magee_parse_import_data( $import_array ) {
global $wp_registered_sidebars;
$sidebars_data = $import_array[0];
$widget_data = $import_array[1];
$current_sidebars = get_option( 'sidebars_widgets' );
$new_widgets = array( );
foreach ( $sidebars_data as $import_sidebar => $import_widgets ) :
foreach ( $import_widgets as $import_widget ) :
//if the sidebar exists
if ( isset( $wp_registered_sidebars[$import_sidebar] ) ) :
$title = trim( substr( $import_widget, 0, strrpos( $import_widget, '-' ) ) );
$index = trim( substr( $import_widget, strrpos( $import_widget, '-' ) + 1 ) );
$current_widget_data = get_option( 'widget_' . $title );
$new_widget_name = magee_get_new_widget_name( $title, $index );
$new_index = trim( substr( $new_widget_name, strrpos( $new_widget_name, '-' ) + 1 ) );
if ( !empty( $new_widgets[ $title ] ) && is_array( $new_widgets[$title] ) ) {
while ( array_key_exists( $new_index, $new_widgets[$title] ) ) {
$new_index++;
}
}
$current_sidebars[$import_sidebar][] = $title . '-' . $new_index;
if ( array_key_exists( $title, $new_widgets ) ) {
$new_widgets[$title][$new_index] = $widget_data[$title][$index];
$multiwidget = $new_widgets[$title]['_multiwidget'];
unset( $new_widgets[$title]['_multiwidget'] );
$new_widgets[$title]['_multiwidget'] = $multiwidget;
} else {
$current_widget_data[$new_index] = $widget_data[$title][$index];
$current_multiwidget = isset($current_widget_data['_multiwidget']) ? $current_widget_data['_multiwidget'] : false;
$new_multiwidget = isset($widget_data[$title]['_multiwidget']) ? $widget_data[$title]['_multiwidget'] : false;
$multiwidget = ($current_multiwidget != $new_multiwidget) ? $current_multiwidget : 1;
unset( $current_widget_data['_multiwidget'] );
$current_widget_data['_multiwidget'] = $multiwidget;
$new_widgets[$title] = $current_widget_data;
}
endif;
endforeach;
endforeach;
if ( isset( $new_widgets ) && isset( $current_sidebars ) ) {
update_option( 'sidebars_widgets', $current_sidebars );
foreach ( $new_widgets as $title => $content )
update_option( 'widget_' . $title, $content );
return true;
}
return false;
}
function magee_get_new_widget_name( $widget_name, $widget_index ) {
$current_sidebars = get_option( 'sidebars_widgets' );
$all_widget_array = array( );
foreach ( $current_sidebars as $sidebar => $widgets ) {
if ( !empty( $widgets ) && is_array( $widgets ) && $sidebar != 'wp_inactive_widgets' ) {
foreach ( $widgets as $widget ) {
$all_widget_array[] = $widget;
}
}
}
while ( in_array( $widget_name . '-' . $widget_index, $all_widget_array ) ) {
$widget_index++;
}
$new_widget_name = $widget_name . '-' . $widget_index;
return $new_widget_name;
}
if( function_exists( 'layerslider_import_sample_slider' ) ) {
function onetone_import_sample_slider( $layerslider_data ) {
// Base64 encoded, serialized slider export code
$sample_slider = $layerslider_data;
// Iterate over the sliders
foreach($sample_slider as $sliderkey => $slider) {
// Iterate over the layers
foreach($sample_slider[$sliderkey]['layers'] as $layerkey => $layer) {
// Change background images if any
if(!empty($sample_slider[$sliderkey]['layers'][$layerkey]['properties']['background'])) {
$sample_slider[$sliderkey]['layers'][$layerkey]['properties']['background'] = LS_ROOT_URL.'sampleslider/'.basename($layer['properties']['background']);
}
// Change thumbnail images if any
if(!empty($sample_slider[$sliderkey]['layers'][$layerkey]['properties']['thumbnail'])) {
$sample_slider[$sliderkey]['layers'][$layerkey]['properties']['thumbnail'] = LS_ROOT_URL.'sampleslider/'.basename($layer['properties']['thumbnail']);
}
// Iterate over the sublayers
if(isset($layer['sublayers']) && !empty($layer['sublayers'])) {
foreach($layer['sublayers'] as $sublayerkey => $sublayer) {
// Only IMG sublayers
if($sublayer['type'] == 'img') {
$sample_slider[$sliderkey]['layers'][$layerkey]['sublayers'][$sublayerkey]['image'] = LS_ROOT_URL.'sampleslider/'.basename($sublayer['image']);
}
}
}
}
}
// Get WPDB Object
global $wpdb;
// Table name
$table_name = $wpdb->prefix . "layerslider";
// Append duplicate
foreach($sample_slider as $key => $val) {
// Insert the duplicate
$wpdb->query(
$wpdb->prepare("INSERT INTO $table_name
(name, data, date_c, date_m)
VALUES (%s, %s, %d, %d)",
$val['properties']['title'],
json_encode($val),
time(),
time()
)
);
}
}
}
// Rename sidebar
function onetone_name_to_class($name){
$class = str_replace(array(' ',',','.','"',"'",'/',"\\",'+','=',')','(','*','&','^','%','$','#','@','!','~','`','<','>','?','[',']','{','}','|',':',),'',$name);
return $class;
}
/*
* Returns all files in directory with the given filetype. Uses glob() for older
* php versions and recursive directory iterator otherwise.
*
* @param string $directory Directory that should be parsed
* @param string $filetype The file type
*
* @return array $files File names that match the $filetype
*/
function magee_get_import_files( $directory, $filetype ) {
$phpversion = phpversion();
$files = array();
// Check if the php version allows for recursive iterators
if ( version_compare( $phpversion, '5.2.11', '>' ) ) {
if ( $filetype != '*' ) {
$filetype = '/^.*\.' . $filetype . '$/';
} else {
$filetype = '/.+\.[^.]+$/';
}
$directory_iterator = new RecursiveDirectoryIterator( $directory );
$recusive_iterator = new RecursiveIteratorIterator( $directory_iterator );
$regex_iterator = new RegexIterator( $recusive_iterator, $filetype );
foreach( $regex_iterator as $file ) {
$files[] = $file->getPathname();
}
// Fallback to glob() for older php versions
} else {
if ( $filetype != '*' ) {
$filetype = '*.' . $filetype;
}
foreach( glob( $directory . $filetype ) as $filename ) {
$filename = basename( $filename );
$files[] = $directory . $filename;
}
}
return $files;
}
// Omit closing PHP tag to avoid "Headers already sent" issues.
<file_sep>/content-portfolio.php
<?php
global $orientation, $overlay_content,$post;
if( !$orientation ) $orientation = 'top';
if( !$overlay_content ) $overlay_content = '1';
//overlay_content: 1, button 2, title 3, title & tags 4, link
$taxonomy = 'portfolio_tags';
$tax_terms = wp_get_post_terms($post->ID,$taxonomy);
$tags = array();
if( $tax_terms ){
foreach ($tax_terms as $tax_term) {
$term_link = get_term_link( $tax_term );
// If there was an error, continue to the next term.
if ( is_wp_error( $term_link ) ) {
continue;
}
$tags[] = '<a href="' . esc_url( $term_link ) . '" title="' . sprintf( __( "View all posts in %s" ,'onetone'), $tax_term->name ) . '" ' . '>' . $tax_term->name.'</a>';
}
}
$tags_str = '';
if($tags)
$tags_str = implode(', ',$tags);
$action = 'from-'.$orientation;
if($overlay_content == '5')
$action = 'img-zoom-in';
?>
<article id="portfolio-<?php echo $post->ID;?>" class="portfolio-box" role="article">
<div class="feature-img-box">
<div class="img-box figcaption-middle text-center <?php echo $action;?> fade-in">
<?php if($overlay_content == '2' || $overlay_content == '4' || $overlay_content == '5'):?>
<a href="<?php the_permalink();?>">
<?php endif;?>
<?php
the_post_thumbnail("portfolio-thumb");
$image = wp_get_attachment_image_src( get_post_thumbnail_id( get_the_ID() ), 'large' );
$featured_image = $image[0];
?>
<div class="img-overlay <?php if( $overlay_content == '4' ){echo 'light';} if( $overlay_content == '5' ){ echo 'primary';}?>">
<div class="img-overlay-container">
<div class="img-overlay-content">
<?php if($overlay_content == '1'):?>
<div class="img-overlay-icons">
<a href="<?php the_permalink();?>"><i class="fa fa-link"></i></a>
<a rel="portfolio-image" href="<?php echo $featured_image;?>"><i class="fa fa-search"></i></a>
</div>
<?php endif;?>
<?php if($overlay_content == '2'):?>
<h3 class="img-overlay-title"><?php the_title();?></h3>
<?php endif;?>
<?php if($overlay_content == '3'):?>
<a href="<?php the_permalink();?>"><div class="img-overlay-total-link"></div></a>
<a href="<?php the_permalink();?>"><h3 class="img-overlay-title"><?php the_title();?></h3></a>
<div class="entry-category"><?php echo $tags_str;?></div>
<?php endif;?>
<?php if( $overlay_content == '4' || $overlay_content == '5' ):?>
<i class="fa fa-link"></i>
<?php endif;?>
</div>
</div>
</div>
<?php if( $overlay_content == '2' || $overlay_content == '4' || $overlay_content == '5' ):?>
</a>
<?php endif;?>
</div>
</div>
<div class="entry-main text-center">
<div class="entry-header"> <a href="<?php the_permalink();?>">
<h1 class="entry-title">
<?php the_title();?>
</h1>
</a> </div>
<div class="entry-meta">
<div class="entry-category">
<?php echo $tags_str; ?>
</div>
</div>
</div>
</article>
<file_sep>/sidebar-woo_products_left.php
<?php
$left_sidebar = esc_attr(onetone_option('left_sidebar_woo_products',''));
if ( $left_sidebar && is_active_sidebar( $left_sidebar ) ){
dynamic_sidebar( $left_sidebar );
}
elseif( is_active_sidebar( 'default_sidebar' ) ) {
dynamic_sidebar('default_sidebar');
}<file_sep>/pageprofileabout.php
<?php /* Template Name: About Template Profile */
get_header('pagedefault');
?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<div class="entry-content">
<div class="header-page">
<h1> <?php echo the_title();?></h1>
</div>
<div class="post-wrap">
<div class="">
<div class="row no-aside">
<div class="col-main">
<div class="spinner-wrap">
<div class="leftside"></div>
<div class="rightside"></div>
<div class="spinner">
<div class="rect1"></div>
<div class="rect2"></div>
<div class="rect3"></div>
<div class="rect4"></div>
<div class="rect5"></div>
</div>
</div>
<section class="main-listing">
<div class="container">
<div class="row">
<div class="col-md-12">
<section id="cd-timeline" class="cd-container">
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>1999</h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block post type-post status-publish format-image has-post-thumbnail sticky hentry category-image tag-image-2 post_format-post-format-image">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered sticky-post box-panel selected-box-bordered">
<div class="cd-content clearfix">
<!-- <img width="515" height="290" src="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o-515x290.jpg" class="attachment-box-item size-box-item wp-post-image" alt="Peace" srcset="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o-300x169.jpg 300w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o-1024x576.jpg 1024w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o-800x450.jpg 800w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o-515x289.jpg 515w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/apx8EPiSnWoYHSEiUENI_14553734675_699b2aa038_o.jpg 1080w" sizes="(max-width: 515px) 100vw, 515px" /> -->
<img width="515" height="290" src="http://www.nubelo.com/blog/wp-content/uploads/2012/11/bornglobal600.jpg">
<div class="content-padding">
<a href="image-post-format/index.html" class="post-title">
<h2>Nacimiento de Profile</h2>
</a>
<div class="post-content">
<p>Profile nace con la voluntad de cubrir necesidades de servicios de outsourcing de Consultoría, Análisis y Programación del ERP JdEwards en sus dos versiones, One World y World Software.</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/5KY7898-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<div class="cd-timeline-block post type-post status-publish format-standard hentry category-standard tag-gmap tag-standard-2">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d889.832160056289!2d2.1588905933821776!3d41.39522664213615!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x12a4a291593588d9%3A0x99247f25b11b0371!2sCarrer+del+Rossell%C3%B3%2C+253%2C+08008+Barcelona!5e0!3m2!1ses!2ses!4v1458059919302" class="embed-responsive-item"></iframe>
</div>
<div class="content-padding">
<a href="standard-post-format-with-gmap/index.html" class="post-title">
<h2>Abrimos las oficinas de Barcelona</h2>
</a>
<div class="post-content">
<p>Nacen las oficinas de Barcelona.</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/sally-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2001</h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block even post type-post status-publish format-video hentry category-video tag-video-2 tag-youtube post_format-post-format-video">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="http://www.youtube.com/embed/MSMDgMhfM_M?rel=0" class="embed-responsive-item"></iframe>
</div>
<div class="content-padding">
<a href="youtube-video/index.html" class="post-title">
<h2>Ampliación de tecnologías</h2>
</a>
<div class="post-content">
<p>Tras situarse rápidamente como uno de los líderes en este mercado, en el año 2001 inicia su ampliación hacia otro tipo de tecnologías entre las que se encuentran los entornos de SAP, Java, .Net o Cobol.</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/john-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2006</h2>
</div>
<!-- cd-timeline-img -->
</div>
<div class="cd-timeline-block post type-post status-publish format-audio has-post-thumbnail hentry category-audio tag-audio-2 tag-background post_format-post-format-audio">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3037.1754128475377!2d-3.6965095840046636!3d40.42711456293062!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0xd42288e41f83c2f%3A0xce1a826d4f6ecb85!2sCalle+de+G%C3%A9nova%2C+11%2C+28004+Madrid!5e0!3m2!1ses!2ses!4v1458060324999" class="embed-responsive-item"></iframe>
</div>
<div class="content-padding">
<a href="standard-post-format-with-gmap/index.html" class="post-title">
<h2>Nos expandimos a Madrid</h2>
</a>
<div class="post-content">
<p>Profile abre sus oficinas en Madrid</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/sally-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block even post type-post status-publish format-link has-post-thumbnail hentry category-link tag-google tag-link-2 post_format-post-format-link">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered content-bg ">
<div class="bg-image" style="background-image: url( 'http://10.0.1.178/timeliner/wp-content/uploads/2014/07/lSed5VXIQnOw7PMfB9ht_IMG_1642-800x450.jpg' ); background-size: cover;">
<div class="overlay">
<a href="http://www.google.com">
<h2 class="break-word">http://www.profile.es</h2>
</a>
</div>
</div>
<div class="cd-content">
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/kdzhOFb-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2007</h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block post type-post status-publish format-status has-post-thumbnail hentry category-status tag-status-2 tag-twitter post_format-post-format-status">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<img width="515" height="343" src="http://pymematica.com/wp-content/uploads/2015/08/factores-seo-2016.jpg" class="attachment-box-item size-box-item wp-post-image" alt="Safe"/>
<div class="content-padding">
<a href="status-with-background/index.html" class="post-title">
<h2>Ampliamos perfiles y servicios</h2>
</a>
<div class="post-content">
<p> Profile inicia otra ampliación de perfiles ofreciendo servicios más especializados como son los de Quality Assurance, Software Testing, Front End o SEO.</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/5KY7898-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2011</h2>
</div>
<!-- cd-timeline-img -->
</div>
<div class="cd-timeline-block even post type-post status-publish format-gallery hentry category-galleries tag-gallery tag-slider post_format-post-format-gallery">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<ul class="list-unstyled post-slider">
<li><img width="515" height="289" src="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/skater-515x289.jpg" class="attachment-gallery-box-item size-gallery-box-item" alt="Fun" srcset="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/skater-800x450.jpg 800w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/skater-515x289.jpg 515w" sizes="(max-width: 515px) 100vw, 515px" /></li>
<li><img width="515" height="289" src="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/pHyYeNZMRFOIRpYeW7X3_manacloseup-copy-515x289.jpg" class="attachment-gallery-box-item size-gallery-box-item" alt="Amaizing" srcset="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/pHyYeNZMRFOIRpYeW7X3_manacloseup-copy-800x450.jpg 800w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/pHyYeNZMRFOIRpYeW7X3_manacloseup-copy-515x289.jpg 515w" sizes="(max-width: 515px) 100vw, 515px" /></li>
<li><img width="515" height="289" src="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/gQZ2iaRdRoWKahCTncS1_brooklyn-bridge-515x289.jpg" class="attachment-gallery-box-item size-gallery-box-item" alt="Bridge" srcset="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/gQZ2iaRdRoWKahCTncS1_brooklyn-bridge-800x450.jpg 800w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/gQZ2iaRdRoWKahCTncS1_brooklyn-bridge-515x289.jpg 515w" sizes="(max-width: 515px) 100vw, 515px" /></li>
<li><img width="515" height="289" src="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/90PZOijCSquhzM1A3cq8_photo-515x289.jpg" class="attachment-gallery-box-item size-gallery-box-item" alt="Empty" srcset="http://10.0.1.178/timeliner/wp-content/uploads/2014/07/90PZOijCSquhzM1A3cq8_photo-800x450.jpg 800w, http://10.0.1.178/timeliner/wp-content/uploads/2014/07/90PZOijCSquhzM1A3cq8_photo-515x289.jpg 515w" sizes="(max-width: 515px) 100vw, 515px" /></li>
</ul>
<div class="content-padding">
<a href="slider-gallery/index.html" class="post-title">
<h2>Nace IROS</h2>
</a>
<div class="post-content">
<p>Los socios de Profile crean, junto con un antiguo empleado, otra empresa, Intelligent Resources Optimization Services (IROS) de la que Profile se convierte en partner comercial. Su misión es dar servicios de optimización de los recursos comerciales de las grandes empresas, mediante el uso de tecnologías avanzadas propias y de terceros.</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/john-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
<!-- cd-timeline-content -->
</div>
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2014</h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block post type-post status-publish format-standard hentry category-standard tag-gmap tag-standard-2">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3011.4837381894595!2d0.9211394160049433!3d40.99278422832577!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x12a14060f5eaeaf3%3A0x7ec79747ffd8b96a!2sVia+Augusta%2C+2%2C+43890+L'Hospitalet+de+l'Infant%2C+Tarragona!5e0!3m2!1ses!2ses!4v1458061303700"></iframe>
</div>
<div class="content-padding">
<a href="standard-post-format-with-gmap/index.html" class="post-title">
<h2>Nace el CDA a Tarragona</h2>
</a>
<div class="post-content">
<p>Nace el centro de desarrollo Ágil en Tarrragona</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/sally-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
</div>
<div class="cd-timeline-block year-block">
<div class="cd-timeline-year">
<h2>2015</h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block even post type-post status-publish format-standard hentry category-standard tag-standard-2">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d2896.422116127657!2d-3.8780926839516523!3d43.45178387344852!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0xd49485a0100ed05%3A0xf1529d22c236641f!2sUNEATLANTICO+-+Universidad+Europea+del+Atl%C3%A1ntico!5e0!3m2!1ses!2ses!4v1458061450523"></iframe>
</div>
<div class="content-padding">
<a href="standard-post-format-with-gmap/index.html" class="post-title">
<h2><NAME></h2>
</a>
<div class="post-content">
<p><NAME></p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/sally-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
</div>
<div class="cd-timeline-block even post type-post status-publish format-standard hentry category-standard tag-standard-2">
<div class="cd-timeline-img">
<h2>11</h2>
<p>Jul</p>
</div>
<!-- cd-timeline-img -->
<div class="cd-timeline-content box-panel selected-box-bordered ">
<div class="cd-content clearfix">
<div class="embed-responsive embed-responsive-16by9">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3010.6801597225904!2d0.9069539160052354!3d41.01037322724353!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x12a1409c2176005d%3A0x1cc592b91602373d!2sCarrer+Rivera+Sans%2C+5%2C+43890%2C+Tarragona!5e0!3m2!1ses!2ses!4v1458061549029"></iframe>
</div>
<div class="content-padding">
<a href="standard-post-format-with-gmap/index.html" class="post-title">
<h2>Ampliamos las oficinas del CDA de Tarragona</h2>
</a>
<div class="post-content">
<p>El CDA Se amplia y se traslada de oficinas</p>
</div>
<div class="clearfix"></div>
<div class="cd-author">
<img src="http://10.0.1.178/timeliner/wp-content/uploads/2014/09/sally-150x150.png" class="media-object img-responsive" alt="" />
</div>
</div>
</div>
</div>
</div>
<!-- cd-timeline-block -->
<div class="cd-timeline-block load-more-block">
<div class="cd-timeline-year">
<h2><a href="javascript:;" class="load-more" data-next_link="http://demo.djmimi.net/themes/timeliner/page/2/">...</a></h2>
</div>
<!-- cd-timeline-img -->
</div>
<!-- cd-timeline-block -->
</section>
</div>
</div>
</div>
<link rel='stylesheet' id='mediaelement-css' href='<?php echo get_site_url(); ?>/bower_components/mediaelement/build/mediaelementplayer.min.css' type='text/css' media='all' />
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-content/themes/onetone/js/responsiveslides.min-ver=4.4.2.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-content/themes/onetone/js/modernizr-ver=4.4.2.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-content/themes/onetone/js/jquery.magnific-popup.min-ver=4.4.2.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-content/themes/onetone/js/custom-ver=4.4.2.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-includes/js/wp-embed.min.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/bower_components/mediaelement/build/mediaelement-and-player.min.js'></script>
<script type='text/javascript' src='<?php echo get_site_url(); ?>/wp-includes/js/mediaelement/wp-mediaelement.js'></script>
</section>
</div>
</div>
</div>
</div>
</article>
<?php
get_footer(); ?>
<file_sep>/pageprofileservicios.php
<?php /* Template Name: Servicios Template Profile */
get_header('pagedefault');
?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<div class="entry-content">
<div class="header-page header-servicios">
<h1> <?php echo the_title();?></h1>
</div>
<div class="post-wrap">
<div class="bgcolorf4">
<div class="container servicios">
<div class="row">
<div class="col-md-8">
<h1 class="cprofile">Introducción</h1>
<h3>Texto entradilla que resulta lo que va contener la página, conclusión.</h3>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Corrupti dolorem ducimus, saepe aliquam, totam impedit veniam officiis est et aliquid in voluptas necessitatibus nulla non. Quibusdam reprehenderit, mollitia consequuntur totam.</p>
</div>
<div class="col-md-4">
<img class="img-servicios" src="<?= get_bloginfo('template_directory'); ?>/images/ico-40.png">
</div>
</div>
</div>
</div>
<div class="separator my-arrow-servicios ">
<div class="home-container container">
<h1 class="text-center negative">Big Data</h1>
<div class="home-section-content">
</div>
</div>
<div class="clear"></div>
</div>
<div class="container servicios">
<div class="row">
<div class="col-md-4 text-center">
<img class="img-servicios" src="<?= get_bloginfo('template_directory'); ?>/images/servicios/bigdata.png" alt="">
</div>
<div class="testimonial-servicios block clearfix col-md-8">
<div class="description">
La información es el petróleo del siglo XXI, y la analítica Big Data es el motor.
</div>
<div class="author">
<NAME>, Gartner Research
</div>
</div>
</div>
</div>
<div class="bgcolorf4">
<div class="container servicios">
<div class="row">
<div class="col-md-6 col-xs-6">
<div class="col-md-4 col-img"><img src="<?= get_bloginfo('template_directory'); ?>/images/ico-47.png" /></div>
<div class="col-md-8">
<h3>Big Data Open Source</h3>
<h5 class="servicios-apartado-entradilla">Hadoop, Spark, Storm, Kafka, Flume, Impala, Pig, Hive... en instalación local o en cloud.</h5>
<p>El ecosistema open source para análisis de datos es potente y flexible. Las arquitecturas big data basadas en open source permiten análisis de datos distribuido a cualquier escala.</p>
</div>
</div>
<div class="col-md-6 col-xs-6">
<div class="col-md-4 col-img"><img src="<?= get_bloginfo('template_directory'); ?>/images/ico-49.png" /></div>
<div class="col-md-8">
<h3>NoSQL</h3>
<h5 class="servicios-apartado-entradilla">Sistemas de almacenamiento.</h5>
<p>Las bases de datos NoSQL como MongoDB nos permiten gran agilidad durante el desarrollo y una mayor flexibilidad y potencia de explotación que las bases de datos tradicionales.</p>
</div>
</div>
</div>
<div class="row nomargintop">
<div class="col-md-6 col-xs-6">
<div class="col-md-4 col-img"><img src="<?= get_bloginfo('template_directory'); ?>/images/ico-48.png" /></div>
<div class="col-md-8">
<h3>Data-Driven Business Strategy</h3>
<h5 class="servicios-apartado-entradilla">Estrategia de datos, data exploration.</h5>
<p>Como paso inicial hacia convertirse en una compañía con orientación a los datos, es conveniente definir una estrategia alrededor de todo el ciclo de vida de los datos y realizar una exploración inicial para generar ideas alrededor de estos datos.</p>
</div>
</div>
<div class="col-md-6 col-xs-6">
<div class="col-md-4 col-img"><img src="<?= get_bloginfo('template_directory'); ?>/images/ico-50.png" /></div>
<div class="col-md-8">
<h3>Logtrust</h3>
<h5 class="servicios-apartado-entradilla">Análisis en tiempo real</h5>
<p>Logtrust nos permite dar soluciones para análisis de tiempo real bajo un modelo ágil y flexible, desde la ingesta de datos hasta la visualización del análisis y la toma de decisiones.</p>
</div>
</div>
</div>
</div>
</div>
<div class="separator my-arrow-servicios ">
<div class="home-container container">
<h1 class="text-center negative">Cloud Computing</h1>
<div class="home-section-content">
</div>
</div>
<div class="clear"></div>
</div>
<div class="container servicios">
<div class="row">
<div class="col-md-8">
<h1 class="cprofile">Lorem Ipsum</h1>
<h3>Texto entradilla que resulta lo que va contener la página, conclusión.</h3>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Corrupti dolorem ducimus, saepe aliquam, totam impedit veniam officiis est et aliquid in voluptas necessitatibus nulla non. Quibusdam reprehenderit, mollitia consequuntur totam.</p>
</div>
<div class="col-md-4">
<img class="img-servicios" src="<?= get_bloginfo('template_directory'); ?>/images/servicios/default.jpg">
</div>
</div>
</div>
<div class="bgcolorf4">
<div class="container">
<div class="row">
<div class="col-md-4">
<div class="col-md-12">
<img class="img-servicios" src="http://placeimg.com/400/200/tech" />
<h3>LaaS</h3>
<label>Infraestructura como servicio</label>
<p class="service-separator">La nube nos ofrece una capacidad de proceso y almacenamiento que hata ahora estaba sólo al alcance de pocos.</p>
<p>Hoy, diseñamos una infraestructura y sólo con unos clicks, conseguimos materializarla y ponerla en funcionamiento.</p>
</div>
</div>
<div class="col-md-4">
<div class="col-md-12">
<img class="img-servicios" src="http://placeimg.com/400/201/tech" />
<h3>PaaS</h3>
<label>Plataformas tecnológicas ágiles</label>
<p class="service-separator">Gracias a los productos PaaS (Platform as a service) podemos centrarnos en lo que hacemos bien: escribir código de alta calidad y ponerlo en marcha.</p>
<p>Empezamos subiendo código al repositorio, creamos un entorno escalable y con alta disponibilidad en cloud, y ponemos la aplicación a correr en minutos.</p>
</div>
</div>
<div class="col-md-4">
<div class="col-md-12">
<img class="img-servicios" src="http://placeimg.com/401/200/tech" />
<h3>SaaS</h3>
<label>Soluciones software flexibles</label>
<p class="service-separator">No tratamos de reinventar la rueda. En ocasiones es mejor utilizar un servicio (Software as a service) que agilice nuestro desarrollo.</p>
<p>Conectamos servicios y construimos aplicaciones a partir de las mejores piezas.</p>
</div>
</div>
</div>
</div>
</div>
<div class="separator my-arrow-servicios ">
<div class="home-container container">
<h1 class="text-center negative">User experience</h1>
<div class="home-section-content">
</div>
</div>
<div class="clear"></div>
</div>
<div class="container servicios">
<div class="row">
<div class="col-md-8">
<h1 class="cprofile">Diseño centrado en el usuario</h1>
<h3>Una experiencia de usuario efectiva es la que consigue una interacción agradable, intuitiva y usable, sencilla pero completa.</h3>
<p>La perfección no consiste en poner, sino en quitar hasta dejar sólo lo imprescindible. Uno de los principios Lean es que lo que no aporta ningún valor al producto, sobra. El diseño de UX se alinea con los objetivos de la empresa para aportar el máximo valor en el menor tiempo posible.</p>
</div>
<div class="col-md-4">
<img class="img-servicios" src="http://localhost/web-profile/wp-content/themes/onetone/images/servicios/default.jpg">
</div>
</div>
</div>
<div class="bgcolorf4">
<div class="container servicios">
<div class="row">
<div class="col-md-7">
<div class="col-md-2">
<img src="http://placeimg.com/200/300/tech" />
</div>
<div class="col-md-10">
<h3>Patrones de diseño</h3>
<h5>Se facilita el uso siguiendo patrones universales y convenciones culturales.</h5>
<p>Se usan affordances para reducir la carga cognitiva y se establecen estrategias para influir en la percepción del usuario mediante la manipulación de los patrones de exploración, el flujo y la jerarquía visual.</p>
</div>
<div class="col-md-2">
<img src="http://placeimg.com/201/300/tech" />
</div>
<div class="col-md-10">
<h3>Tests de usuario</h3>
<h5>Si el producto se adecua al usuario final aumenta su probabilidad de éxito.</h5>
<p>A través de encuestas, entrevistas, focus group como con test A/B y tests de usabilidad. Los tests se realizan en las fases de conceptualización y durante el desarrollo, ya sea presenciales o a distáncia, cualitativos o cuantitativos.</p>
</div>
</div>
<div class="col-md-5">
<div class="col-md-3 col-img">
<img src="http://placeimg.com/200/301/tech" />
</div>
<div class="col-md-9">
<h3>Prototipado</h3>
<h5>Mediante la co-creación el quipo suma, a la vez que se consigue empatizar.</h5>
<p>Con la información extraída del análisis de requisitos el equipo puede realizar diferentes prototipos, variando cúales según fase y necesidades del proyecto.</p>
<ul class="list-unordered">
<li>Personas</li>
<li>Escenarios</li>
<li>Flujos de navegación</li>
<li>Storyboards</li>
<li>Paper prototypes</li>
<li>Whiteboards</li>
<li>Wireframes de baja resolución</li>
<li>Wireframes de alta resolución</li>
<li>Prototipos interactivos estáticos</li>
<li>Prototipos interactivos dinámicos</li>
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</article>
<?php
get_footer(); ?>
<file_sep>/taxonomy-portfolio-category.php
<?php
/**
* The sigle template file.
*
*/
get_header();
$related_number = absint(onetone_option('related_number','8'));
$left_sidebar = onetone_option('left_sidebar_portfolio_archive');
$right_sidebar = onetone_option('right_sidebar_portfolio_archive');
$aside = 'no-aside';
if( $left_sidebar !='' )
$aside = 'left-aside';
if( $right_sidebar !='' )
$aside = 'right-aside';
if( $left_sidebar !='' && $right_sidebar !='' )
$aside = 'both-aside';
?>
<div id="portfolio-cat">
<section class="page-title-bar title-left no-subtitle" style="">
<div class="container">
<hgroup class="page-title">
<h1><?php single_cat_title();?></h1>
</hgroup>
<?php onetone_get_breadcrumb(array("before"=>"<div class=''>","after"=>"</div>","show_browse"=>false,"separator"=>'','container'=>'div'));?>
<div class="clearfix"></div>
</div>
</section>
<div class="post-wrap">
<div class="container">
<div class="post-inner row <?php echo $aside; ?>">
<div class="col-main">
<section class="post-main" role="main" id="content">
<?php if (have_posts()) : ?>
<?php
$items = "" ;
$i = 1 ;
$result = "" ;
while ( have_posts() ) : the_post();
$portfolio_image = "";
if (has_post_thumbnail( get_the_ID()) ):
$thumb = get_the_post_thumbnail( get_the_ID() , "portfolio-grid-thumb" );
//$image = wp_get_attachment_image_src( get_post_thumbnail_id( get_the_ID() ), 'large' );
//$portfolio_image = $image[0];
endif;
$tags = get_the_tags(get_the_ID());
$tags_list = '<ul>';
if(is_array($tags)){
foreach ( $tags as $tag ) {
$tag_link = get_tag_link( $tag->term_id );
$tags_list .= "<li><a href='{$tag_link}' title='{$tag->name}' class='{$tag->slug}'>";
$tags_list .= "{$tag->name}</a></li>";
}
}
$tags_list .= '</ul>';
$items .= '<div class="portfolio-col col-sm-4"><div class="portfolio-box text-center">';
$items .= '<a href="'.get_permalink().'">';
$items .= $thumb;
$items .= '</a>';
$items .= '<div class="portfolio-box-title"><a href="'.get_permalink().'"><h3>'.get_the_title().'</h3></a>
'.$tags_list.'
</div>
</div>
</div>';
if($i%3 == 0){
$result .= '<div class="row">'.$items.'</div>';
$items = "";
}
$i++;
endwhile;
if($items != "")
$result = $result.'<div class="row">'.$items.'</div>';
echo $result;
?>
<div class="list-pagition text-center">
<?php onetone_native_pagenavi("echo",$wp_query);?>
</div>
<?php else:?>
<div style="width:100%; text-align:center; margin-bottom:30px;">
<?php _e("Nothing found.","onetone");?>
</div>
<?php endif; ?>
</section>
</div>
<?php if( $left_sidebar !='' ):?>
<div class="col-aside-left">
<aside class="blog-side left text-left">
<?php get_sidebar('portfolioleft');?>
</aside>
</div>
<?php endif; ?>
<?php if( $right_sidebar !='' ):?>
<div class="col-aside-right">
<?php get_sidebar('portfolioright');?>
</div>
<?php endif; ?>
</div>
</div>
</div>
</div>
<?php get_footer(); ?><file_sep>/readme.txt
Corporate Theme
================
##<file_sep>/index.php
<?php
/**
* The main template file.
*
*/
?>
<?php
if ( 'page' == get_option( 'show_on_front' ) && ( '' != get_option( 'page_for_posts' ) ) && $wp_query->get_queried_object_id() == get_option( 'page_for_posts' ) ) :
get_header();
else:
get_header('home');
endif;
?>
<?php
if ( 'page' == get_option( 'show_on_front' ) && ( '' != get_option( 'page_for_posts' ) ) && $wp_query->get_queried_object_id() == get_option( 'page_for_posts' ) ) :
$left_sidebar = onetone_option('left_sidebar_blog_archive','');
$right_sidebar = onetone_option('right_sidebar_blog_archive','');
$aside = 'no-aside';
if( $left_sidebar !='' )
$aside = 'left-aside';
if( $right_sidebar !='' )
$aside = 'right-aside';
if( $left_sidebar !='' && $right_sidebar !='' )
$aside = 'both-aside';
?>
<div class="post-wrap">
<div class="container">
<div class="post-inner row <?php echo $aside; ?>">
<div class="col-main">
<section class="post-main" role="main" id="content">
<article class="page type-page" id="" role="article">
<?php if (have_posts()) :?>
<!--blog list begin-->
<div class="blog-list-wrap">
<?php while ( have_posts() ) : the_post();?>
<?php get_template_part("content",get_post_format()); ?>
<?php endwhile;?>
</div>
<?php endif;?>
<!--blog list end-->
<!--list pagination begin-->
<nav class="post-list-pagination" role="navigation">
<?php if(function_exists("onetone_native_pagenavi")){onetone_native_pagenavi("echo",$wp_query);}?>
</nav>
<!--list pagination end-->
</article>
<div class="post-attributes"></div>
</section>
</div>
<?php if( $left_sidebar !='' ):?>
<div class="col-aside-left">
<aside class="blog-side left text-left">
<div class="widget-area">
<?php get_sidebar('archiveleft');?>
</div>
</aside>
</div>
<?php endif; ?>
<?php if( $right_sidebar !='' ):?>
<div class="col-aside-right">
<?php get_sidebar('archiveright');?>
</div>
<?php endif; ?>
</div>
</div>
</div>
<?php else: ?>
<div class="post-wrap">
<div class="container-fullwidth">
<div class="page-inner row no-aside" style="padding-top: 0; padding-bottom: 0;">
<div class="col-main">
<section class="post-main" role="main" id="content">
<article class="page type-page homepage" id="" role="article">
<?php
global $onetone_options,$onetone_new_version,$onetone_homepage_sections ;
$detect = new Mobile_Detect;
$video_background_section = onetone_option( 'video_background_section' );
$video_background_type = onetone_option( 'video_background_type' );
$video_background_type = $video_background_type == ""?"youtube":$video_background_type;
$section_1_content = onetone_option( 'section_1_content' );
$animated = onetone_option( 'home_animated');
$section_1_content = $section_1_content=='slider'?1:$section_1_content;
if( $animated == '1' )
$onetone_animated = 'onetone-animated';
$sections_num = 15 ;
$new_homepage_section = array();
for($i=0;$i<$sections_num;$i++){
$section = onetone_option('section_order_'.$i);
if( is_numeric($section ) )
$new_homepage_section[] = $section;
else
$new_homepage_section[] = $i;
}
$i = 0 ;
foreach( $new_homepage_section as $section_part ):
$hide_section = onetone_option( 'section_hide_'.($section_part-1) );
if( $hide_section != '1' ){
if( $section_part == 1 && $section_1_content == '1'){
get_template_part('home-sections/section','slider');
}else{
//if( $video_background_section >0 && $video_background_section == $section_part && !$detect->isMobile() && !$detect->isTablet() )
if( $video_background_section >0 && $video_background_section == $section_part )
get_template_part('home-sections/section',$video_background_type.'-video');
else
get_template_part('home-sections/section',$section_part);
}
}
$i++;
endforeach;
?>
<div class="clear"></div>
</article>
</section>
</div>
</div>
</div>
</div>
<?php endif;?>
<?php get_footer();?><file_sep>/includes/shortcodes.php
<?php
/**
* Shortcodes
*
*/
//
global $onetone_shortcodes , $portfolio_categories;
function onetone_get_nav_array(){
$menus = get_terms('nav_menu');
$items = array();
foreach($menus as $menu){
$items[$menu->term_id] = $menu->name;
}
return $items;
}
function onetone_get_post_categories(){
$post_categories = array();
$post_type_array = array();
$args = array( 'hide_empty=0' );
$terms = get_terms('category', $args);
$post_categories[""] = "All";
$count = count($terms); $i=0;
if ($count > 0) {
foreach ($terms as $term) {
$i++;
if(isset($term->slug) && isset($term->name)){
$post_categories[$term->slug] = $term->name;
}
}
}
return $post_categories;
}
//
$portfolio_categories = array();
$post_type_array = array();
$args = array( 'hide_empty'=>0,'post_type' => 'portfolio','posts_per_page' => -1 );
$terms = get_terms('portfolio-category', $args);
$portfolio_categories[""] = "All";
$count = count($terms); $i=0;
if ($count > 0) {
foreach ($terms as $term) {
$i++;
if(isset($term->slug) && isset($term->name)){
$portfolio_categories[$term->slug] = $term->name;
}
}
}
$onetone_shortcodes = array(
'align' => array(
array("type"=>"select","std"=>"left","id"=>"align","title"=>__("Align",'onetone') ,"desc"=>'',"options"=>array("left"=>"left","right"=>"right","center"=>"center")),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>'')
) ,
'animation' => array(
array("type"=>"select","std"=>"1.5","id"=>"animation_speed","title"=>__("Animation Speed",'onetone') ,"desc"=>'',"options"=>array("0.1"=>"0.1","0.2"=>"0.2","0.3"=>"0.3","0.4"=>"0.4","0.5"=>"0.5","0.6"=>"0.6","0.7"=>"0.7","0.8"=>"0.8","0.9"=>"0.9","1"=>"1","1.2"=>"1.2","1.4"=>"1.4","1.6"=>"1.6","1.8"=>"1.8","2"=>"2","2.4"=>"2.4","2.6"=>"2.6","2.8"=>"2.8","3"=>"3")),
array("type"=>"select","std"=>"bounce","id"=>"animation_type","title"=>__("Animation Type",'onetone') ,"desc"=>'',"options"=>array("flash"=>"flash","shake"=>"shake","bounce"=>"bounce","scale"=>"scale","smush"=>"smush","spin"=>"spin","fade"=>"fade","fade-left"=>"fade-left","fade-right"=>"fade-right","fade-up"=>"fade-up","fade-down"=>"fade-down","slide-left"=>"slide-left","slide-right"=>"slide-right","slide-up"=>"slide-up","slide-down"=>"slide-down")),
array("type"=>"select","std"=>"no","id"=>"image_animation","title"=>__("Image Animation",'onetone') ,"desc"=>__('Image animation only','onetone'),"options"=>array("no"=>"no","yes"=>"yes")),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>'')
),
'button' => array(
array("type"=>"select","std"=>"normal","id"=>"size","title"=>__("Size",'onetone') ,"desc"=>'',"options"=>array("normal"=>"normal","large"=>"large")),
array("type"=>"select","std"=>"no","id"=>"rounded","title"=>__("Rounded",'onetone') ,"desc"=>'',"options"=>array("no"=>"no","yes"=>"yes")),
array("type"=>"text","std"=>"#","id"=>"link","title"=>__("Button Link",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"_blank","id"=>"target","title"=>__("Target",'onetone') ,"desc"=>'',"options"=>array("_blank"=>"_blank","_self"=>"_self","_parent"=>"_parent","_top"=>"_top")),
array("type"=>"text","std"=>"#666","id"=>"color","title"=>__("Color",'onetone') ,"desc"=>''),
array("type"=>"textarea","std"=>" Button ","id"=>"text_content","title"=>__("Button Text",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'boxed' => array(
// array("type"=>"text","std"=>"","id"=>"width","title"=>__("Box Width",'onetone') ,"desc"=>__('Default 1170','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'column' => array(
array("type"=>"select","std"=>"","id"=>"col_xs","title"=>__("Extra small grid( < 768px)",'onetone') ,"desc"=>__("Select column width. This width will be calculated depend page width.",'onetone'),"options"=>array(""=>"default","1"=>"1/12","2"=>"2/12","3"=>"3/12","4"=>"4/12","5"=>"5/12","6"=>"6/12","7"=>"7/12","8"=>"8/12","9"=>"9/12","10"=>"10/12","11"=>"11/12","12"=>"12/12")),
array("type"=>"select","std"=>"6","id"=>"col_sm","title"=>__("Small grid(≥ 768px)",'onetone') ,"desc"=>'',"options"=>array("1"=>"1/12","2"=>"2/12","3"=>"3/12","4"=>"4/12","5"=>"5/12","6"=>"6/12","7"=>"7/12","8"=>"8/12","9"=>"9/12","10"=>"10/12","11"=>"11/12","12"=>"12/12")),
array("type"=>"select","std"=>"3","id"=>"col_md","title"=>__("Medium grid( ≥ 992px)",'onetone'),"desc"=>'',"options"=>array("1"=>"1/12","2"=>"2/12","3"=>"3/12","4"=>"4/12","5"=>"5/12","6"=>"6/12","7"=>"7/12","8"=>"8/12","9"=>"9/12","10"=>"10/12","11"=>"11/12","12"=>"12/12")),
array("type"=>"select","std"=>"","id"=>"col_lg","title"=>__("Large grid( ≥ 1200px)",'onetone') ,"desc"=>'',"options"=>array(""=>"default","1"=>"1/12","2"=>"2/12","3"=>"3/12","4"=>"4/12","5"=>"5/12","6"=>"6/12","7"=>"7/12","8"=>"8/12","9"=>"9/12","10"=>"10/12","11"=>"11/12","12"=>"12/12")),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>'')
),
'divider' => array(
array("type"=>"select","std"=>"","id"=>"style","title"=>__("Style",'onetone') ,"options"=>array(""=>"blank","1"=>"1","2"=>"2","3"=>"3","4"=>"4")),
array("type"=>"text","std"=>"20","id"=>"height","title"=>__("Divider Height",'onetone') ,"desc"=>'px'),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
)
,
'menu' => array(
array("type"=>"select","std"=>"","id"=>"item","title"=>__("Menu",'onetone') ,"options"=>onetone_get_nav_array()) ,
array("type"=>"select","std"=>"static","id"=>"position","title"=>__("Position",'onetone') ,"options"=>array("static"=>"static","fixed"=>"fixed")),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'contact' => array(
array("type"=>"select","std"=>"3","id"=>"style","title"=>__("Style",'onetone') ,"options"=>array("1"=>"1","2"=>"2","3"=>"3")),
array("type"=>"text","std"=>get_option( 'admin_email' ),"id"=>"email","title"=>__("Contact Email",'onetone') ,"desc"=>''),
array("type"=>"textarea","std"=>"\n[form_field type='input' name='YOUR NAME' required='1' options='']\n
[form_field type='input' is_email='1' name='YOUR EMAIL' required='1' options='']\n
[form_field type='input' name='SUBJECT' required='1' options='']\n
[form_field type='select' name='QUESTION TYPE' required='1' options='Type Item One,Type Item Two,Type Item Three']\n
[form_field type='textarea' name='MESSAGE' required='1' options='']\n","id"=>"text_content","title"=>__("List Items",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'portfolio' => array(
array("type"=>"text","std"=>"4","id"=>"num","title"=>__("List Num",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"4","id"=>"columns","title"=>__("Columns",'onetone') ,"options"=>array("2"=>"2","3"=>"3","4"=>"4")),
array("type"=>"select","std"=>"","id"=>"category","title"=>__("Category",'onetone') ,"options"=>$portfolio_categories),
array("type"=>"select","std"=>"0","id"=>"pagenav","title"=>__("Display Pagenav",'onetone') ,"options"=>array("0"=>"no","1"=>"yes")),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'pricing' => array(
array("type"=>"select","std"=>"1","id"=>"style","title"=>__("Style",'onetone') ,"options"=>array("1"=>"1","2"=>"2")),
array("type"=>"text","std"=>"$","id"=>"currency","title"=>__("Currency",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"29","id"=>"price","title"=>__("Price",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"title","title"=>__("Title",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"sub_title","title"=>__("Sub-title",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"0","id"=>"featured","title"=>__("Featured",'onetone') ,"options"=>array("0"=>"no","1"=>"yes")),
array("type"=>"text","std"=>"","id"=>"btn_text","title"=>__("Button Text",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"#","id"=>"btn_link","title"=>__("Button Link",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"fa-shopping-cart","id"=>"btn_icon","title"=>__("Button Icon",'onetone') ,"desc"=>__('Font Awesome Icon.','onetone')),
array("type"=>"textarea","std"=>"[pricing_item]5 GB Bandwidth[/pricing_item]\n[pricing_item]1 GB[/pricing_item]\n[pricing_item]8 GB Storage[/pricing_item]\n[pricing_item]Limited[/pricing_item]\n[pricing_item]2 Projects[/pricing_item]\n","id"=>"text_content","title"=>__("List Items",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
)
,
'pricing_item' => array(
array("type"=>"textarea","std"=>"","id"=>"text_content","title"=>__("Content",'magee') ,"desc"=>'')
),
'row' => array(
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'section' => array(
array("type"=>"text","std"=>"#ffffff","id"=>"background_color","title"=>__("Section Background Color",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"background_image","title"=>__("Section Background Image",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"","id"=>"background_repeat","title"=>__("Background Repeat",'onetone') ,"options"=>array("repeat"=>"repeat all","no-repeat"=>"no-repeat","repeat-x"=>"repeat-x","repeat-y"=>"repeat-y")),
array("type"=>"select","std"=>"off","id"=>"background_size","title"=>__("100% Background Image",'onetone') ,"options"=>array("off"=>"off","on"=>"on")),
array("type"=>"select","std"=>"","id"=>"parallax","title"=>__("Parallax Scrolling Background Image",'onetone') ,"options"=>array("off"=>"off","on"=>"on")),
array("type"=>"text","std"=>"","id"=>"heading_color","title"=>__("Heading Font Color",'onetone') ,"desc"=>__('h1-h6 color, e.g. #ffffff','onetone')),
array("type"=>"text","std"=>"","id"=>"color","title"=>__("Font Color",'onetone') ,"desc"=>'e.g. #ffffff'),
array("type"=>"text","std"=>"","id"=>"padding","title"=>__("Section Padding",'onetone') ,"desc"=>__('e.g. 60px 0','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"id","title"=>__("ID",'onetone') ,"desc"=>__('Scrolling to a specified target.','onetone')),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
)
,
'team' => array(
array("type"=>"select","std"=>"1","id"=>"style","title"=>__("Style",'onetone') ,"options"=>array("1"=>"1","2"=>"2","3"=>"3")),
array("type"=>"text","std"=>"","id"=>"name","title"=>__("Member Name",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"byline","title"=>__("Byline",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"avatar","title"=>__("Avatar",'onetone') ,"desc"=>__('Size 238 x 271 px for 3/12 column','onetone')),
array("type"=>"select","std"=>"","id"=>"social_icon_1","title"=>__("Social Icon 1",'onetone') ,"options"=>
array("skype"=>"skype","facebook"=>"facebook","twitter"=>"twitter","google-plus"=>"google+","youtube"=>"youtube","linkedin"=>"linkedin","pinterest"=>"pinterest","email"=>"email","instagram"=>"instagram","deviantart"=>"deviantart","soundcloud"=>"soundcloud","vimeo"=>"vimeo","flickr"=>"flickr")),
array("type"=>"text","std"=>"","id"=>"social_link_1","title"=>__("Social Link 1",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"","id"=>"social_icon_2","title"=>__("Social Icon 2",'onetone') ,"options"=>array("skype"=>"skype","facebook"=>"facebook","twitter"=>"twitter","google-plus"=>"google+","youtube"=>"youtube","linkedin"=>"linkedin","pinterest"=>"pinterest","email"=>"email","instagram"=>"instagram","deviantart"=>"deviantart","soundcloud"=>"soundcloud","vimeo"=>"vimeo","flickr"=>"flickr")),
array("type"=>"text","std"=>"","id"=>"social_link_2","title"=>__("Social Link 2",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"","id"=>"social_icon_3","title"=>__("Social Icon 3",'onetone') ,"options"=>array("skype"=>"skype","facebook"=>"facebook","twitter"=>"twitter","google-plus"=>"google+","youtube"=>"youtube","linkedin"=>"linkedin","pinterest"=>"pinterest","email"=>"email","instagram"=>"instagram","deviantart"=>"deviantart","soundcloud"=>"soundcloud","vimeo"=>"vimeo","flickr"=>"flickr")),
array("type"=>"text","std"=>"","id"=>"social_link_3","title"=>__("Social Link 3",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"","id"=>"social_icon_4","title"=>__("Social Icon 4",'onetone') ,"options"=>array("skype"=>"skype","facebook"=>"facebook","twitter"=>"twitter","google-plus"=>"google+","youtube"=>"youtube","linkedin"=>"linkedin","pinterest"=>"pinterest","email"=>"email","instagram"=>"instagram","deviantart"=>"deviantart","soundcloud"=>"soundcloud","vimeo"=>"vimeo","flickr"=>"flickr")),
array("type"=>"text","std"=>"","id"=>"social_link_4","title"=>__("Social Link 4",'onetone') ,"desc"=>''),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Description",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'service' => array(
array("type"=>"select","std"=>"","id"=>"style","title"=>__("Style",'onetone') ,"options"=>array("1"=>"1","2"=>"2","3"=>"3","4"=>"4")),
array("type"=>"text","std"=>"Our Service","id"=>"title","title"=>__("Title",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"fa-gift","id"=>"icon","title"=>__("Icon",'onetone') ,"desc"=>__('Font Awesome Icon or Image URL','onetone')),
//array("type"=>"text","std"=>"","id"=>"icon_color","title"=>__("Icon Color",'onetone') ,"desc"=>'e.g. #00b7ee'),
array("type"=>"text","std"=>"#","id"=>"link","title"=>__("Link",'onetone') ,"desc"=>__('Read more link.','onetone')),
array("type"=>"textarea","std"=>" Your Content ","id"=>"text_content","title"=>__("Content",'onetone') ,"desc"=>''),
array("type"=>"text","std"=>"","id"=>"css_class","title"=>__("Css Class",'onetone') ,"desc"=>__('Extra CSS class','onetone'))
),
'timeline' => array(
array("type"=>"text","std"=>"3","id"=>"num","title"=>__("Blog List Num",'onetone') ,"desc"=>''),
array("type"=>"select","std"=>"","id"=>"category","title"=>__("Category",'onetone') ,"options"=>onetone_get_post_categories()),
array("type"=>"text","std"=>"60","id"=>"excerpt_length","title"=>__("Excerpt Length",'onetone') ,"desc"=>__('Number of words.','onetone'))
)
);
foreach($onetone_shortcodes as $onetone_shortcode => $std){
add_shortcode($onetone_shortcode,'onetone_'.$onetone_shortcode.'_shortcode');
}
/**
* Shortcode: align
*
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_align_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'align' =>'left',
'css_class' => ''
), $atts ) );
$return = '<div class="onetone-shortcode align-'.$align.' '.$css_class.'" style="width:100%;">'.do_shortcode(onetone_fix_shortcodes( $content) ).'</div>';
return $return;
}
/**
* Shortcode: css animation
*
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_animation_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'animation_speed' => '0.5',
'animation_type' => 'bounce',
'image_animation' =>'no'
), $atts ) );
$animation = 'data-animationduration="'.$animation_speed.'" data-animationtype="'.$animation_type.'" data-imageanimation="'.$image_animation.'"';
$return = '<div class="animated '.$css_class.'" '.$animation.'>'.do_shortcode(onetone_fix_shortcodes( $content) ).'</div>';
return $return;
}
/**
* Shortcode: button
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_button_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'size' => '',
'rounded' =>'',
'link' => '#',
'target' => '_blank',
'color' => '#666'
), $atts ) );
$css_style = "";
if( $size == "large" ){
$css_class .= " btn-lg";
}
if( $rounded == "yes" ){
$css_class .= " btn-rd";
}
if( $color != "" ){
$css_style .= 'color:'.$color.';border-color:'.$color.';';
}
$return = '<a href="'.$link.'" target="'.$target.'" ><button class="onetone-shortcode btn '.$css_class.'" style="'.$css_style.'">'.do_shortcode( onetone_fix_shortcodes( $content ) ).'</button></a>';
return $return ;
}
/**
* Shortcode: boxed
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_boxed_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'width'=> ''
), $atts ) );
if($width != ""){
if(is_numeric($width))
$width = $width."px";
$width = "width:".$width.";";
}
$return = '<div class="onetone-shortcode container '.$css_class.'" style="'.$width.'">'.do_shortcode( onetone_fix_shortcodes( $content ) ).'</div>';
return $return ;
}
/**
* Shortcode: column
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_column_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'col_xs' => '',
'col_sm' => '',
'col_md' => '',
'col_lg' => '',
'col_sm_offset' =>'',
'col_md_offset' =>'',
'col_lg_offset' =>'',
'col_sm_push' =>'',
'col_md_push' =>'',
'col_lg_push' =>'',
'col_sm_pull' =>'',
'col_md_pull' =>'',
'col_lg_pull' =>'',
'css_class' =>''
), $atts ) );
$col_class = "";
if(trim($col_xs) != "" && is_numeric($col_xs)){ $col_class .= "col-xs-".$col_xs." ";}
if(trim($col_sm) != "" && is_numeric($col_sm)){ $col_class .= "col-sm-".$col_sm." ";}
if(trim($col_md) != "" && is_numeric($col_md)){ $col_class .= "col-md-".$col_md." ";}
if(trim($col_lg) != "" && is_numeric($col_lg)){ $col_class .= "col-lg-".$col_lg." ";}
if(trim($col_sm_offset) != "" && is_numeric($col_sm_offset)){ $col_class .= "col-sm-offset-".$col_sm_offset." ";}
if(trim($col_md_offset) != "" && is_numeric($col_md_offset)){ $col_class .= "col-md-offset-".$col_md_offset." ";}
if(trim($col_lg_offset) != "" && is_numeric($col_lg_offset)){ $col_class .= "col-lg-offset-".$col_lg_offset." ";}
if(trim($col_sm_push) != "" && is_numeric($col_sm_push)){ $col_class .= "col-sm-push-".$col_sm_push." ";}
if(trim($col_md_push) != "" && is_numeric($col_md_push)){ $col_class .= "col-md-push-".$col_md_push." ";}
if(trim($col_lg_push) != "" && is_numeric($col_lg_push)){ $col_class .= "col-lg-push-".$col_lg_push." ";}
if(trim($col_sm_pull) != "" && is_numeric($col_sm_pull)){ $col_class .= "col-sm-pull-".$col_sm_pull." ";}
if(trim($col_md_pull) != "" && is_numeric($col_md_pull)){ $col_class .= "col-md-pull-".$col_md_pull." ";}
if(trim($col_lg_pull) != "" && is_numeric($col_lg_pull)){ $col_class .= "col-lg-pull-".$col_lg_pull." ";}
if(trim($css_class) != ""){ $col_class .= $css_class;}
$return = '<div class="onetone-shortcode '.$col_class.'">';
$return .= do_shortcode(onetone_fix_shortcodes( $content) );
$return .= '<div class="clear"></div>';
$return .= '</div>';
return $return ;
}
/**
* Shortcode: Contact Form
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_form_field_shortcode( $atts,$content=NULL ){
extract( shortcode_atts( array(
'type' => 'input',
'name'=> '',
'required'=>'',
'is_email' =>'',
'options' =>''
), $atts ) );
$inupt_type = 'text';
if($is_email == '1' || $is_email == 'yes' || $is_email == 'true' )
$inupt_type = 'email';
$return = "";
$required_str = "";
if($required == '1' || $required == 'yes' || $required == 'true' )
$required_str = ' required="required" aria-required="true" data-required="1" ';
if( $name != "" ){
$field_id = sanitize_title( $name );
switch($type){
case "select":
$options_array = explode(",",$options);
$return .= '<section><select class="'.$field_id.'" '.$required_str.' data-name="'.$name.'" name="'.$field_id.'" id="'.$field_id.'">';
$return .= '<option value="">=='.$name.'==</option>';
foreach( $options_array as $option )
{
$return .= '<option value="'.$option.'">'.$option.'</option>';
}
$return .= '</select></section>';
break;
case "textarea":
$return .= '<section><textarea name="'.$field_id.'" data-name="'.$name.'" '.$required_str.' id="'.$field_id.'" cols="39" rows="5" tabindex="3" placeholder="'.$name.'"></textarea></section>';
break;
case "input":
default:
$return .= '<section><input type="'.$inupt_type.'" name="'.$field_id.'" data-name="'.$name.'" value="" id="'.$field_id.'" placeholder="'.$name.'" tabindex="1" '.$required_str.'></section>';
break;
}
}
return $return;
}
add_shortcode('form_field','onetone_form_field_shortcode');
function onetone_contact_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'email'=> get_option( 'admin_email' ),
'style' => '1'
), $atts ) );
$return = "";
if( $content != NULL ){
switch($style){
case "1":
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form onsubmit="return false;" action="'.esc_url(home_url('/')).'" class="onetone-shortcode contact-form style1 " method="post">
<fieldset>
'.do_shortcode( onetone_fix_shortcodes( $content ) ).'
</fieldset>
<section>
<span class="noticefailed"></span>
<input type="hidden" name="sendto" id="sendto" value="'.base64_encode($email).'">
<input type="button" id="submit" name="submit" value="'.__("SEND","onetone").'" class="contact-submit btn-normal">
<input type="hidden" name="contact-form-ver" class="contact-form-ver" value="2">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</section>
</form></div>';
break;
case "2":
default:
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form onsubmit="return false;" action="'.esc_url(home_url('/')).'" class="onetone-shortcode contact-form style2 " method="post">
<fieldset>
'.do_shortcode( onetone_fix_shortcodes( $content ) ).'
</fieldset>
<section>
<span class="noticefailed"></span>
<input type="hidden" name="sendto" id="sendto" value="'.base64_encode($email).'">
<input type="button" id="submit" name="submit" value="'.__("SEND","onetone").'" class="contact-submit btn-normal">
<input type="hidden" name="contact-form-ver" class="contact-form-ver" value="2">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</section>
</form></div>';
break;
case 3:
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form action="'.esc_url(home_url('/')).'" method="post" class="onetone-shortcode contact-form style3 ">
<fieldset>
'.do_shortcode( onetone_fix_shortcodes( $content ) ).'
</fieldset>
<p class="noticefailed"></p>
<input type="hidden" value="'.base64_encode($email).'" id="sendto" name="sendto">
<input type="button" value="'.__("SEND","onetone").'" id="submit" name="submit">
<input type="hidden" name="contact-form-ver" class="contact-form-ver" value="2">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</form></div>';
break;
}
return $return ;
}
switch($style){
case "1":
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form onsubmit="return false;" action="'.esc_url(home_url('/')).'" class="onetone-shortcode contact-form style1 " method="post">
<fieldset>
<section>
<input type="text" name="name" id="name" placeholder="'.__("YOUR NAME","onetone").'*" tabindex="1" required="required" aria-required="true">
</section>
<section>
<input type="email" name="email" id="email" placeholder="'.__("YOUR E-MAIL","onetone").'*" tabindex="2" required="required" aria-required="true">
</section>
<section>
<textarea name="message" aria-required="true" required="required" id="message" cols="39" rows="5" tabindex="3" placeholder="'.__("YOUR MESSAGE","onetone").'*"></textarea>
</section>
</fieldset>
<section>
<span class="noticefailed"></span>
<input type="hidden" name="sendto" id="sendto" value="'.base64_encode($email).'">
<input type="button" id="submit" name="submit" value="'.__("SEND","onetone").'" class="contact-submit btn-normal">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</section>
</form></div>';
break;
case "2":
default:
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form onsubmit="return false;" action="'.esc_url(home_url('/')).'" class="onetone-shortcode contact-form style2 " method="post">
<fieldset>
<section>
<i class="fa fa-user fa-fw"></i>
<input type="text" name="name" id="name" placeholder="'.__("Your Name","onetone").'" tabindex="1" required="required" aria-required="true">
</section>
<section>
<i class="fa fa-envelope fa-fw"></i>
<input type="email" name="email" id="email" placeholder="'.__("Your Email","onetone").'" tabindex="2" required="required" aria-required="true">
</section>
<section>
<textarea name="message" aria-required="true" required="required" id="message" cols="39" rows="5" tabindex="3" placeholder="'.__("Message","onetone").'"></textarea>
</section>
</fieldset>
<section>
<span class="noticefailed"></span>
<input type="hidden" name="sendto" id="sendto" value="'.base64_encode($email).'">
<input type="button" id="submit" name="submit" value="'.__("SEND","onetone").'" class="contact-submit btn-normal">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</section>
</form></div>';
break;
case 3:
$return = '<div class="onetone-shortcode contact-area '.$css_class.'"><form action="'.esc_url(home_url('/')).'" method="post" class="onetone-shortcode contact-form style3 ">
<fieldset>
<input type="text" aria-required="true" required="required" tabindex="1" size="22" placeholder="'.__("Name","onetone").'" value="" id="name" name="name">
<input type="email" aria-required="true" required="required" tabindex="2" size="22" placeholder="'.__("Email","onetone").'" value="" id="email" name="email">
<textarea placeholder="'.__("Message","onetone").'" required="required" tabindex="4" rows="7" cols="39" id="message" name="message"></textarea>
</fieldset>
<p class="noticefailed"></p>
<input type="hidden" value="'.base64_encode($email).'" id="sendto" name="sendto">
<input type="button" value="'.__("SEND","onetone").'" id="submit" name="submit">
<input type="hidden" name="email_error" value="'.__("Please enter valid email.",'onetone').'">
<input type="hidden" name="name_error" value="'.__("Please enter your name.",'onetone').'">
<input type="hidden" name="message_error" value="'.__("Message is required.",'onetone').'">
</form></div>';
break;
}
return $return ;
}
/**
* Shortcode: divider
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_divider_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'height'=> '10',
'style' => ''
), $atts ) );
if($height != ""){
if(is_numeric($height))
$height = $height."px";
$height = "margin-bottom:".$height.";";
}
$return = '<div class="onetone-shortcode divider style'.$style.' '.$css_class.'" style='.$height.'></div>';
return $return ;
}
/**
* Shortcode: menu
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_menu_shortcode( $atts,$content=NULL ){
extract( shortcode_atts( array(
'item' =>'',
'css_class' => '',
'position' =>'static'
), $atts ) );
$css_style = 'position:'.$position.';';
$return = '<div class="onetone-shortcode onetone-nav '.$css_class.'" style="'.$css_style.'">';
$return .= wp_nav_menu( array( 'echo'=> false,'menu' => $item,'depth'=>0,'fallback_cb' =>false,'container'=>'' ,'items_wrap'=> '<ul class="shortcode-nav">%3$s</ul>') );
$return .= '</div>';
return $return;
}
/**
* Shortcode: portfolio
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_portfolio_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'columns'=> '4',
'num' => '4',
'category' => '',
'pagenav' =>''
), $atts ) );
global $paged;
if(!is_numeric($category)){
$term = get_term_by('name', $category, 'portfolio-category');
}else{
$term = get_term_by('id', $category, 'portfolio-category');
}
$return = '<div class="onetone-shortcode portfolio-wrapper">';
$items = '';
$term_slug = isset($term->slug)?$term->slug:"";
if(!is_numeric($columns) || $columns<2 || $columns>4)
$columns = 4;
$i = 1;
$col = 12/$columns ;
if(!$paged){$paged = (get_query_var('paged')) ? get_query_var('paged') : 1;}
$query = new WP_Query('post_type=portfolio&paged='.$paged.'&orderby=menu_order&post_status=publish&portfolio-category='.$term_slug.'&posts_per_page='.$num);
if($query->have_posts() ):
while ($query->have_posts() ) :
$query->the_post();
$postid = get_the_ID();
$permalink = get_permalink();
$title = get_the_title();
$image = "";
$thumb = "";
if (has_post_thumbnail( $postid) ):
$thumb = get_the_post_thumbnail( $postid , "portfolio-grid-thumb" );
$image = wp_get_attachment_image_src( get_post_thumbnail_id( $postid ), 'large' );
$image = $image[0];
endif;
$tags = get_the_tags(get_the_ID());
$tags_list = '<ul>';
if(is_array($tags)){
foreach ( $tags as $tag ) {
$tag_link = get_tag_link( $tag->term_id );
$tags_list .= "<li><a href='{$tag_link}' title='{$tag->name}' class='{$tag->slug}'>";
$tags_list .= "{$tag->name}</a></li>";
}
}
$tags_list .= '</ul>';
$items .= '<div class="portfolio-col col-sm-6 col-md-'.$col.' '.$css_class.'"><div class="portfolio-box text-center">';
$items .= '<a href="'.get_permalink().'">';
$items .= $thumb;
$items .= '</a>';
$items .= '<div class="portfolio-box-title"><a href="'.$permalink.'"><h3>'.$title.'</h3></a>
'.$tags_list.'
</div>
</div>
</div>';
/* $items .= '<div class="col-md-'.$col.' col-sm-6 '.$css_class.'">
<figure class="portfolio-list-box">
'. $thumb.'
<figcaption>
<a href="'.$permalink.'"><h3>'.$title.'</h3></a>
'.get_the_excerpt().'
</figcaption>
</figure></div>';*/
if($i%$columns == 0)
{
$return .= '<div class="row">'. $items.'</div>';
$items = '';
}
$i++ ;
endwhile;
endif;
if($items != '') $return .= '<div class="row">'. $items.'</div>';
if($pagenav == "yes" || $pagenav == "1"){
$return .= '<div class="list-pagition text-center">'.onetone_native_pagenavi("return",$query).'</div>';
}
$return .= '</div>';
wp_reset_postdata();
return $return ;
}
/**
* Shortcode: pricing
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_pricing_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'currency' =>'$',
'price' => '',
'title' => '',
'sub_title' => '',
'color' => '',
'btn_text' => 'BUY',
'btn_link' => '#',
'btn_icon' => 'fa-shopping-cart',
'style' => '1',
'featured' => '0'
), $atts ) );
$css_style = '';
if($color !=""){
$css_style = 'background-color: '.$color.';color: #fff;border-color: '.$color.';';
}
$is_featured = "" ;
if($featured == '1' || $featured == 'yes') $is_featured = " featured";
$css_class .= " style".$style;
$css_class .= $is_featured;
switch($style){
case "2":
$return = '<div class="onetone-shortcode price-box '.$css_class.'">
<ul>
<li class="price-title">
<h3>'.$title.'</h3>
<div class="price-tag">
<sup>'.$currency.'</sup>'.$price.'
</div>
<h4>'.$sub_title.'</h4>
</li>
'.do_shortcode( onetone_fix_shortcodes($content) ).'
<li><a href="'.$btn_link.'" target="_blank"><button class="btn"><i class="fa '.$btn_icon.'"></i> '.$btn_text.'</button></a></li>
</ul>
</div>';
break;
case "1":
default:
$return = '<div class="onetone-shortcode price-box '.$css_class.'">
<div class="price-tag" style="'.$css_style.'">
<sup>'.$currency.'</sup>'.$price.'
</div>
<ul>
<li class="price-title">
<h3>'.$title.'</h3>
<h4>'.$sub_title.'</h4>
</li>
'.do_shortcode( onetone_fix_shortcodes($content) ).'
<li><a href="'.$btn_link.'" target="_blank"><button class="btn" style="'.$css_style.'"><i class="fa '.$btn_icon.'"></i> '.$btn_text.'</button></a></li>
</ul>
</div>';
break;
}
return $return ;
}
function onetone_pricing_item_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => ''
), $atts ) );
$return = '<li>'.do_shortcode( onetone_fix_shortcodes($content) ).'</li>';
return $return ;
}
/**
* Shortcode: row
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_row_shortcode($atts,$content=NULL)
{
extract( shortcode_atts( array(
'css_class' => ''
), $atts ) );
$return = '<div class="onetone-shortcode row '.$css_class.'">';
$return .= do_shortcode(onetone_fix_shortcodes( $content) );
$return .= '</div>';
return $return ;
}
/**
* Shortcode: section
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_section_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'background_color' => '',
'background_image' => '',
'background_repeat' => '',
'background_size' => 'off',
'padding' =>'',
'parallax' => 'off',
'color' =>'',
'heading_color'=>'',
'id' => ''
), $atts ) );
$style = "";
$bg_pos = "";
if( $parallax == "on" ){
$css_class .= " onetone-parallax";
}
if($background_color != "")
$style .= 'background-color:'.$background_color.';';
if($background_image != "")
{
if( $parallax == "on" )
$style .= 'background:url('.$background_image.') 50% 0 no-repeat fixed;';
else
$style .= 'background-image:url('.$background_image.');';
}
if( $background_repeat != "" && $parallax != "on" )
$style .= 'background-repeat:'.$background_repeat.' ;';
if( $color != "" )
$style .= 'color:'.$color.' ;';
if( $background_size == "on" )
$style .= '-webkit-background-size: cover;
-moz-background-size: cover;
-o-background-size: cover;
background-size: cover;';
if($padding != ""){
if(is_numeric($padding))
$padding = $padding."px";
$style .= "padding:".$padding.";";
}
$return = '<section id="'.$id.'" data-headingcolor="'.$heading_color.'" class="onetone-shortcode '.$css_class.'" style="'.$style.'">'.do_shortcode( onetone_fix_shortcodes($content) ).'<div class="clear"></div></section>';
return $return ;
}
/**
* Shortcode: service
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_service_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'title' => '',
'icon' => '',
'link' => '',
'icon_color'=>'',
'style' => '1'
), $atts ) );
if($icon_color != "")
$icon_color = 'style="color:'.$icon_color.'"';
$css_class .= ' style'.$style;
$more_link = '';
switch($style){
case "2":
case "4":
if($link != "")
$more_link = '<div class="text-right"><a href="'.esc_url($link).'" class="text-right">'.__("Read More","onetone").'>></a></div>';
$return = '<div class="onetone-shortcode service-box '.$css_class.' text-left">';
if(strstr($icon,"http")){
$return .= '<h3><img src="'.$icon.'" alt="'.$title.'" />'.$title.'</h3>';
}else{
$return .= '<h3><i class="fa '.$icon.' '.$icon_color.'"></i>'.$title.'</h3>';
}
$return .= '<p>'.do_shortcode(onetone_fix_shortcodes( $content) ).'</p>'.$more_link.'</div>';
break;
case "1":
case "3":
default:
$return = '<div class="onetone-shortcode service-box text-center '.$css_class.'">';
if($icon != ""){
if(strstr($icon,"http")){
$return .= '<img src="'.$icon.'" alt="'.$title.'" />';
}else{
$return .= '<i class="fa '.$icon.'" '.$icon_color.'></i>';
}
}
if($title != "")
$return .= '<h3>'.$title.'</h3>';
$return .= '<p>'.do_shortcode(onetone_fix_shortcodes( $content) ).'</p>';
if($link != "")
$return .= '<a href="'.esc_url($link).'">'.__("Read More","onetone").'>></a>';
$return .= '</div>';
break;
}
return $return ;
}
/**
* Shortcode: blog timeline
*
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_timeline_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'num' => '3',
'category' => '',
'excerpt_length' => '60'
), $atts ) );
global $paged;
$return = '<div class="onetone-shortcode timeline '.$css_class.'"><div class="time-stick"></div>';
// $paged =(get_query_var('paged'))? get_query_var('paged'): 1;
$wp_query = new WP_Query( 'showposts='.$num.'&category_name='.$category.'&post_status=publish&ignore_sticky_posts=1' );
$i = 1 ;
if ($wp_query -> have_posts()) :
while ( $wp_query -> have_posts() ) : $wp_query -> the_post();
$return .= '<div class="time-box">
<a href="'.get_permalink().'"><h2 class="time-title">'.get_the_title().'</h2></a>
<p class="time-content">'.onetone_cover_content( $excerpt_length,get_the_excerpt() ).'
</p>
</div>';
endwhile;
endif;
$return .= '</div>';
wp_reset_postdata();
return $return;
}
/**
* Shortcode: team
*
* @param string $content
* @param array $atts Shortcode attributes
* @return string Output html
* author: quan
*/
function onetone_team_shortcode($atts,$content=NULL){
extract( shortcode_atts( array(
'css_class' => '',
'name' => '',
'avatar' => '',
'byline' => '',
'social_icon_1' => '',
'social_link_1' => '',
'social_icon_2' => '',
'social_link_2' => '',
'social_icon_3' => '',
'social_link_3' => '',
'social_icon_4' => '',
'social_link_4' => '',
'style' => '1'
), $atts ) );
$css_class .= " style".$style;
switch($style){
case "2":
$return = '<div class="onetone-shortcode team-box '.$css_class.'">
<div class="team-img-box">
<img src="'.esc_url($avatar).'">
</div>
<div class="team-info">
<h4>'.$name.'</h4>
<h5>'.$byline.'</h5>
<p>'.do_shortcode( onetone_fix_shortcodes($content) ).'</p>
<div>
<div class="team-sns">';
for($i = 1; $i<=4 ; $i++){
if(${"social_icon_$i"} != "" && ${"social_link_$i"} != ""){
$return .= '<a href="'.${"social_link_$i"}.'"><i class="fa fa-'.${"social_icon_$i"}.'"></i></a>';
}
}
$return .= ' </div></div></div></div>';
break;
case "3":
$return = '<div class="onetone-shortcode team-box '.$css_class.'">
<div class="team-img-box">
<img src="'.esc_url($avatar).'">
</div>
<div class="team-info">
<h4>'.$name.'</h4>
<h5>'.$byline.'</h5>
<p>'.do_shortcode( onetone_fix_shortcodes($content) ).'</p>
<div>
<div class="team-sns">';
for($i = 1; $i<=4 ; $i++){
if(${"social_icon_$i"} != "" && ${"social_link_$i"} != ""){
$return .= '<a href="'.${"social_link_$i"}.'"><i class="fa fa-'.${"social_icon_$i"}.'"></i></a>';
}
}
$return .= '</div>
</div>
</div>
</div>';
break ;
case "1":
default:
$return = '<div class="onetone-shortcode team-box '.$css_class.'">
<div class="team-img-box">
<img src="'.esc_url($avatar).'">
<div class="team-info">
<h4>'.$name.'</h4>
<h5>'.$byline.'</h5>
<img src="'.esc_url($avatar).'">
<div>
<div class="team-sns">';
for($i = 1; $i<=4 ; $i++){
if(${"social_icon_$i"} != "" && ${"social_link_$i"} != ""){
$return .= '<a href="'.${"social_link_$i"}.'"><i class="fa fa-'.${"social_icon_$i"}.'"></i></a>';
}
}
$return .= '</div>
</div> </div>
</div><p>'.do_shortcode( onetone_fix_shortcodes($content) ).'</p></div>';
break;
}
return $return ;
}
################################################
#
# Onetone Shortcodes Generator
#
################################################
function onetone_form_generator($value){
if(!isset($value['id'])){exit;}
$value['std'] = isset($value['std'])?$value['std']:"";
$value['std'] = str_replace("\r\n",' ', $value['std']);
?>
<div class="onetone-shortcode-attr-container">
<label for="onetone-<?php echo $value['id']; ?>"><h5><?php echo $value['title']; ?></h5></label>
<?php
switch ( $value['type'] ) {
case 'text': ?>
<input name="<?php echo $value['id']; ?>" id="onetone-<?php echo $value['id']; ?>" type="text" value="<?php echo $value['std']; ?>" />
<?php
break;
case 'checkbox':
if($value['id']){$checked = "checked=\"checked\""; $checkbox_switch = "on";} else{$checked = "";$checkbox_switch = "off";} ?>
<input class="on-of" type="checkbox" name="onetone-<?php echo $value['id'] ?>" id="<?php echo $value['id'] ?>" value="true" <?php echo $checked; ?> />
<?php
break;
case 'radio':
?>
<div style="float:left; width: 295px;">
<?php foreach ($value['options'] as $key => $option) { ?>
<label style="display:block; margin-bottom:8px;"><input name="<?php echo $value['id']; ?>" id="onetone-<?php echo $value['id']; ?>" type="radio" value="<?php echo $key ?>" <?php if ( $value['id'] == $key) { echo ' checked="checked"' ; } ?>> <?php echo $option; ?></label>
<?php } ?>
</div>
<?php
break;
case 'select':
?>
<select name="<?php echo $value['id']; ?>" id="onetone-<?php echo $value['id']; ?>">
<?php foreach ($value['options'] as $key => $option) { ?>
<option value="<?php echo $key ?>" <?php if ( $value['std'] == $key) { echo ' selected="selected"' ; } ?>><?php echo $option; ?></option>
<?php } ?>
</select>
<?php
break;
case 'textarea':
?>
<textarea name="<?php echo $value['id']; ?>" id="onetone-<?php echo $value['id']; ?>" type="textarea" cols="100%" rows="8" tabindex="4"><?php echo $value['std']; ?></textarea>
<?php
break;
case 'upload':
?>
<input id="<?php echo $value['id']; ?>" class="img-path upload_box" type="text" size="56" style="direction:ltr; text-laign:left" name="<?php echo $value['id']; ?>" value="<?php echo $value['std']; ?>" />
<input id="upload_<?php echo $value['id']; ?>_button" type="button" class="upload_image_button" value="Upload" />
<?php
if(isset($value['std']) && $value['std'] != ""){
$img_preview = '<div id="'.$value['id'].'-preview" class="img-preview"><img src="'.$value['std'].'" alt="" /><a class="del-img" title="Delete"></a></div>';}
?>
<?php
break;
case 'color':
?>
<input type="text" value="<?php echo $value['std'] ; ?>" class="minicolors" data-theme="bootstrap" name="<?php echo $value['id']; ?>" id="<?php echo $value['id']; ?>" />
<?php
break;
}
?>
<?php if( isset( $value['desc'] ) ) : ?><div class="onetone-shortcode-attr-desc"><?php echo $value['desc'] ?></div><?php endif; ?>
<div class="clear"></div>
</div>
<?php
}
/*
* Shortcode generator Form
* ---------------------------------------------------------------------
*/
function onetone_shortcode_form(){
global $onetone_shortcodes ;
$shortcode = $_POST['shortcode'];
if(isset($onetone_shortcodes[$shortcode]) && is_array($onetone_shortcodes[$shortcode])){
foreach($onetone_shortcodes[$shortcode] as $key=>$value){
if(is_array($value)){
array_push($value,array("id"=>$key));
onetone_form_generator($value);
}
}
echo '<input type="hidden" id="onetone-curr-shortcode" value="'.$shortcode.'" />';
echo '<div class="clear"></div>';
}
exit();
}
add_action('wp_ajax_onetone_shortcode_form', 'onetone_shortcode_form');
add_action('wp_ajax_nopriv_onetone_shortcode_form', 'onetone_shortcode_form');
/*
* Shortcode Generator
* ---------------------------------------------------------------------
*/
function onetone_get_shortcode(){
global $onetone_shortcodes ;
$attr = isset($_POST['attr'])?$_POST['attr']:"";
$shortcode = isset($_POST['shortcode'])?$_POST['shortcode']:"";
$content = "";
$result = "";
$shortcodes_attr = array();
if(is_array($attr) && $attr != null && array_key_exists( $shortcode,$onetone_shortcodes))
{
foreach($onetone_shortcodes[$shortcode] as $key=>$value){
$shortcodes_attr[] = $value['id'];
}
$result = '['.$shortcode.' ';
foreach($attr as $k=>$v){
if($v["name"] != "content" && $v["name"] != "text_content"){
if($v["value"] !="" && in_array($v["name"],$shortcodes_attr)){
$result .= $v["name"].'=\''.$v["value"].'\' ';
}
}
else{
$content = $v["value"] . '[/'.$shortcode.']';
}
}
$result .= ']';
$result .= $content ;
}
$result = stripslashes($result);
// $result = str_replace("\r\n",' ', $result);
echo $result;
exit();
}
add_action('wp_ajax_onetone_get_shortcode', 'onetone_get_shortcode');
add_action('wp_ajax_nopriv_onetone_get_shortcode', 'onetone_get_shortcode');
//add a button to the content editor, next to the media button
//this button will show a popup that contains inline content
add_action('media_buttons_context', 'onetone_add_my_custom_button');
//add some content to the bottom of the page
//This will be shown in the inline modal
if(is_admin()){
add_action('admin_footer', 'onetone_add_inline_popup_content');
}
//action to add a custom button to the content editor
function onetone_add_my_custom_button($context) {
//path to my icon
$img = get_template_directory_uri() .'/images/shortcode_button.png';
//our popup's title
$title = __('Onetone Shortcodes','onetone');
//append the icon
$context .= "<a class='onetone_shortcodes button' title='{$title}'><img style='margin-bottom:2px' src='{$img}' />".__('Onetone Shortcodes','onetone')."</a>";
return $context;
}
function onetone_add_inline_popup_content() {
global $onetone_shortcodes ;
?>
<div class="white-popup onetone_shortcodes_container mfp-with-anim mfp-hide" id="onetone_shortcodes_container" style="display:none;" >
<form>
<h4><?php _e("Onetone Shortcodes Generator",'onetone');?></h4>
<ul class="onetone_shortcodes_list">
<?php if(is_array($onetone_shortcodes )):foreach($onetone_shortcodes as $key => $val){
if(in_array($key ,array("testimonial_item","pricing_item","testimonial",'tab','accordion'))){continue;}
?>
<li><a class='onetone_shortcode_item <?php //echo $key;?>' title='<?php echo ucwords(str_replace("_"," ",$key));?>' data-shortcode="<?php echo $key;?>" href="javascript:;"><?php echo ucwords(str_replace("_"," ",$key));?></a></li>
<?php } ?>
<?php endif;?>
</ul>
<div id="onetone-shortcodes-settings">
<div id="onetone-generator-breadcrumbs">
<a title="Click to return to the shortcodes list" class="onetone-shortcodes-home" href="javascript:void(0);"><?php _e("All shortcodes",'onetone');?></a> → <span class="current_shortcode"></span>
<div class="clear"></div>
</div>
<div id="onetone-shortcodes-settings-inner"></div>
<input name="onetone-shortcode" type="hidden" id="onetone-shortcode" value="" />
<input name="onetone-shortcode-textarea" type="hidden" id="onetone-shortcode-textarea" value="" />
<div class="onetone-shortcode-actions onetone-shortcode-clearfix">
<!--<a class="button button-secondary button-large onetone-shortcode-preview " href="javascript:void(0);"><?php _e("Preview shortcode",'onetone');?></a>-->
<a class="button button-primary button-large onetone-shortcode-insert " href="javascript:void(0);"><?php _e("Insert shortcode",'onetone');?></a>
</div>
<div class="clear"></div>
</div></form>
<div class="clear"></div>
</div>
<div id="onetone-shortcode-preview" style="display:none;">
</div>
<?php }
################################################
#
# End Onetone Shortcodes Generator
#
################################################<file_sep>/single-portfolio.php
<?php
/**
* The sigle template file.
*
*/
get_header();
$related_number = absint(onetone_option('related_number','8'));
$left_sidebar = onetone_option('left_sidebar_portfolio_posts');
$right_sidebar = onetone_option('right_sidebar_portfolio_posts');
$aside = 'no-aside';
if( $left_sidebar !='' )
$aside = 'left-aside';
if( $right_sidebar !='' )
$aside = 'right-aside';
if( $left_sidebar !='' && $right_sidebar !='' )
$aside = 'both-aside';
?>
<div id="portfolio-<?php the_ID(); ?>" <?php post_class("clear"); ?>>
<section class="page-title-bar title-left no-subtitle" style="">
<div class="container">
<hgroup class="page-title">
<h1><?php the_title();?></h1>
</hgroup>
<?php onetone_get_breadcrumb(array("before"=>"<div class=''>","after"=>"</div>","show_browse"=>false,"separator"=>'','container'=>'div'));?>
<div class="clearfix"></div>
</div>
</section>
<div class="post-wrap">
<div class="container">
<div class="post-inner row <?php echo $aside; ?>">
<div class="col-main">
<section class="post-main" role="main" id="content">
<?php if (have_posts()) :?>
<?php
while ( have_posts() ) : the_post();
?>
<article class="portfolio type-portfolio" id="">
<?php
$galleryArray = get_post_gallery_ids( get_the_ID() );
if( count( $galleryArray ) >0 && $galleryArray[0] != "" ):
?>
<div class="post-slider">
<!--slider-->
<div id="portfolio-carousel" class="carousel slide" data-ride="carousel">
<!-- Wrapper for slides -->
<div class="carousel-inner" role="listbox" style=" ">
<?php
$i = 0 ;
foreach ($galleryArray as $id) {
?>
<div class="item <?php echo $i==0?'active':'';?>">
<img src="<?php echo wp_get_attachment_url( $id ); ?>" alt="" />
</div>
<?php
$i++;
}
?>
</div>
<!-- Controls -->
<div class="multi-carousel-nav style1 nav-bg">
<a class="" href="#portfolio-carousel" role="button" data-slide="prev">
<span class="multi-carousel-nav-prev"></span>
</a>
<a class="" href="#portfolio-carousel" role="button" data-slide="next">
<span class="multi-carousel-nav-next"></span>
</a>
</div>
</div>
<!--slider end-->
</div>
<?php endif ; ?>
<div class="entry-main">
<div class="entry-header">
<h1 class="entry-title"><?php the_title();?></h1>
</div>
<div class="entry-content">
<?php the_content();?>
</div>
<div class="entry-footer">
<?php
$taxonomy = 'portfolio-tag';
$tax_terms = wp_get_post_terms($post->ID,$taxonomy);
if( $tax_terms ){?>
<ul class="entry-tags no-border pull-left">
<?php _e('Tags','onetone');?>:
<?php
foreach ($tax_terms as $tax_term) {
echo '<li><a href="' . esc_attr(get_term_link($tax_term, $taxonomy)) . '" title="' . sprintf( __( "View all posts in %s" ,'onetone'), $tax_term->name ) . '" ' . '>' . $tax_term->name.'</a></li>';
} ?>
</ul>
<?php
}
?>
</div>
</div>
</article>
<?php endwhile;?>
<?php endif;?>
<div class="post-attributes">
<!--Related Projects-->
<?php
$related = onetone_get_related_posts($post->ID, $related_number,'portfolio','portfolio-category');
?>
<?php if($related->have_posts()):
$date_format = onetone_option('date_format','M d, Y');
?>
<div class="related-posts">
<h3><?php _e( 'Related Project', 'onetone' );?></h3>
<div id="related-portfolio" class="multi-carousel onetone-related-posts owl-carousel owl-theme">
<?php while($related->have_posts()): $related->the_post(); ?>
<?php if(has_post_thumbnail()): ?>
<?php $full_image = wp_get_attachment_image_src(get_post_thumbnail_id($post->ID), 'full');
$thumb_image = wp_get_attachment_image_src(get_post_thumbnail_id($post->ID), 'related-post');
?>
<div class="owl-item">
<div class="portfolio-box">
<div class="feature-img-box">
<div class="img-box figcaption-middle text-center from-top fade-in">
<img src="<?php echo $thumb_image[0];?>" class="feature-img"/>
<div class="img-overlay">
<div class="img-overlay-container">
<div class="img-overlay-content">
<div class="img-overlay-icons">
<a href="<?php the_permalink(); ?>"><i class="fa fa-link"></i></a>
<a rel="portfolio-image" href="<?php echo $full_image[0];?>"><i class="fa fa-search"></i></a> </div>
</div>
</div>
</div>
</div>
</div>
<div class="entry-main text-center">
<div class="entry-header">
<a href="<?php the_permalink(); ?>"><h1 class="entry-title"><?php the_title(); ?></h1></a>
</div>
<div class="entry-meta">
<div class="entry-category">
<?php
$taxonomy = 'portfolio-tag';
$tax_terms = wp_get_post_terms($post->ID,$taxonomy);
$tags = array();
if( $tax_terms ){
foreach ( $tax_terms as $tax_term ) {
$tags[] = '<a href="' . esc_attr(get_term_link($tax_term, $taxonomy)) . '" title="' . sprintf( __( "View all posts in %s" ,'onetone'), $tax_term->name ) . '" ' . '>' . $tax_term->name.'</a>';
}
}
$tags = implode(', ',$tags);
echo $tags;
?>
</div>
</div>
</div>
</div>
</div>
<?php endif; endwhile; ?>
</div>
</div>
<?php wp_reset_postdata(); endif; ?>
<!--Related Posts End-->
<!--Comments Area-->
<div class="comments-area text-left">
<?php
// If comments are open or we have at least one comment, load up the comment template
if ( comments_open() ) :
comments_template();
endif;
?>
</div>
<!--Comments End-->
<?php echo onetone_post_nav();?>
</div>
</section>
</div>
<?php if( $left_sidebar !='' ):?>
<div class="col-aside-left">
<aside class="blog-side left text-left">
<?php get_sidebar('portfolioleft');?>
</aside>
</div>
<?php endif; ?>
<?php if( $right_sidebar !='' ):?>
<div class="col-aside-right">
<?php get_sidebar('portfolioright');?>
</div>
<?php endif; ?>
</div>
</div>
</div>
</div>
<?php get_footer(); ?><file_sep>/home-sections/section-11.php
<?php
global $onetone_animated;
$i = 10 ;
$section_title = onetone_option( 'section_title_'.$i );
$section_menu = onetone_option( 'menu_title_'.$i);
$parallax_scrolling = onetone_option( 'parallax_scrolling_'.$i );
$section_css_class = onetone_option( 'section_css_class_'.$i );
$section_content = onetone_option( 'section_content_'.$i );
$full_width = onetone_option( 'full_width_'.$i );
$content_model = onetone_option( 'section_content_model_'.$i,1);
$section_subtitle = onetone_option( 'section_subtitle_'.$i);
$color = onetone_option( 'section_color_'.$i );
if( !isset($section_content) || $section_content=="" )
$section_content = onetone_option( 'sction_content_'.$i );
$section_id = sanitize_title( onetone_option( 'menu_slug_'.$i ,'section-'.($i+1) ) );
if( $section_id == '' )
$section_id = 'section-'.($i+1);
$section_id = strtolower( $section_id );
$container_class = "container";
if( $full_width == "yes" ){
$container_class = "";
}
if( $parallax_scrolling == "yes" || $parallax_scrolling == "1" ){
$section_css_class .= ' onetone-parallax';
}
?>
<section id="<?php echo $section_id; ?>" class="section home-section-<?php echo ($i+1); ?> <?php echo $section_css_class;?>">
<div class="home-container <?php echo $container_class; ?> page_container">
<?php
if( $content_model == '0' ):
?>
<div style="color:<?php echo $color; ?>;">
<?php if( $section_title != '' ):?>
<?php
$section_title_class = '';
if( $section_subtitle == '' )
$section_title_class = 'no-subtitle';
?>
<h1 class="section-title <?php echo $section_title_class; ?>"><?php echo $section_title; ?></h1>
<?php endif;?>
<?php if( $section_subtitle != '' ):?>
<h3 class="section-subtitle"><?php echo do_shortcode($section_subtitle);?></h3>
<?php endif;?>
<div class="<?php echo $onetone_animated;?>" data-animationduration="1.2" data-animationtype="bounceIn" data-imageanimation="no">
<div class="magee-pricing-table row no-margin 4_columns" id="">
<?php
for($j=1;$j<=4;$j++):
$featured = absint(onetone_option('section_featured_'.$i.'_'.$j));
$icon = str_replace('fa-','',esc_attr(onetone_option('section_icon_'.$i.'_'.$j)));
$image = esc_attr(onetone_option('section_image_'.$i.'_'.$j));
$currency = esc_attr(onetone_option('section_currency_'.$i.'_'.$j));
$price = esc_attr(onetone_option('section_price_'.$i.'_'.$j));
$unit = esc_attr(onetone_option('section_unit_'.$i.'_'.$j));
$title = esc_attr(onetone_option('section_title_'.$i.'_'.$j));
$features = onetone_option('section_features_'.$i.'_'.$j);
$button_text = onetone_option('section_button_text_'.$i.'_'.$j);
$button_link = onetone_option('section_button_link_'.$i.'_'.$j);
$button_target = onetone_option('section_button_target_'.$i.'_'.$j);
?>
<div class="magee-pricing-box-wrap col-md-3 no-padding">
<div class="panel panel-default text-center magee-pricing-box <?php echo $featured=='1'?'featured':'';?>">
<div class="panel-heading">
<div class="pricing-top-icon">
<?php if( $image !="" ):?>
<img src="<?php echo $image ;?>" alt=""/>
<?php else:?>
<i class="fa fa-<?php echo $icon ;?>"></i>
<?php endif;?>
</div>
<h3 class="panel-title prcing-title"><?php echo $title ;?></h3>
</div>
<div class="panel-body">
<div class="pricing-tag">
<span class="currency"><?php echo $currency ;?></span><span class="price"><?php echo $price ;?></span><span class="unit"><?php echo $unit?'/ '.$unit:'' ;?></span>
</div>
<ul class="pricing-list">
<?php
if( $features ){
$features = explode("\n",$features);
foreach( $features as $feature ){
if( $feature != '' ){
echo '<li>'.$feature.'</li>';
}
}
}
?>
</ul>
</div>
<div class="panel-footer">
<a href="<?php echo $button_link ;?>" target="<?php echo $button_target ;?>" class="magee-btn-normal"><i class="fa fa-shopping-cart"></i> <?php echo $button_text ;?></a>
</div>
</div>
</div>
<?php endfor;?>
</div></div>
</div>
<?php
else:
?>
<?php if( $section_title != '' ):?>
<h2 class="section-title"><?php echo do_shortcode($section_title);?></h2>
<?php endif;?>
<div class="home-section-content">
<?php
if(function_exists('Form_maker_fornt_end_main'))
{
$section_content = Form_maker_fornt_end_main($section_content);
}
echo do_shortcode($section_content);
?>
</div>
<?php
endif;
?>
</div>
<div class="clear"></div>
</section> | 559b217a527bdb9cdc828fe76e8b8c2f09b46c66 | [
"JavaScript",
"Text",
"PHP"
] | 25 | JavaScript | JSDiez/ThemePRO | 991b1e59ef3d1bc92e7036cdf06ab08c1df38e14 | 4614f0d18e6e3f0f2a3dc0a7a862442e8189066d | |
refs/heads/master | <repo_name>JonnoFTW/OpenCL-Support-Plugin<file_sep>/src/com/jonathanmackenzie/opencl/plugin/psi/OpenClElementType.java
package com.jonathanmackenzie.opencl.plugin.psi;
import com.intellij.psi.tree.IElementType;
import com.jonathanmackenzie.opencl.plugin.OpenClLanguage;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
/**
* Created by mack0242 on 3/03/17.
*/
public class OpenClElementType extends IElementType {
public OpenClElementType(@NotNull @NonNls String debugName) {
super(debugName, OpenClLanguage.INSTANCE);
}
}
<file_sep>/src/com/jonathanmackenzie/opencl/plugin/OpenClStructureViewModel.java
package com.jonathanmackenzie.opencl.plugin;
import com.intellij.ide.structureView.*;
import com.intellij.ide.util.treeView.smartTree.Sorter;
import com.intellij.psi.PsiFile;
import com.jonathanmackenzie.opencl.plugin.psi.OpenClFile;
import org.jetbrains.annotations.NotNull;
public class OpenClStructureViewModel extends StructureViewModelBase implements
StructureViewModel.ElementInfoProvider {
public OpenClStructureViewModel(PsiFile psiFile) {
super(psiFile, new OpenClStructureViewElement(psiFile));
}
@NotNull
public Sorter[] getSorters() {
return new Sorter[]{Sorter.ALPHA_SORTER};
}
@Override
public boolean isAlwaysShowsPlus(StructureViewTreeElement element) {
return false;
}
@Override
public boolean isAlwaysLeaf(StructureViewTreeElement element) {
return element instanceof OpenClFile;
}
}<file_sep>/src/com/jonathanmackenzie/opencl/plugin/psi/impl/OpenClPsiImplUtil.java
package com.jonathanmackenzie.opencl.plugin.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.navigation.ItemPresentation;
import com.intellij.psi.*;
import com.jonathanmackenzie.opencl.plugin.OpenClIcons;
import com.jonathanmackenzie.opencl.plugin.psi.*;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public class OpenClPsiImplUtil {
public static String getKey(OpenClProperty element) {
ASTNode keyNode = element.getNode().findChildByType(OpenClTypes.KEY);
if (keyNode != null) {
// IMPORTANT: Convert embedded escaped spaces to simple spaces
return keyNode.getText().replaceAll("\\\\ ", " ");
} else {
return null;
}
}
public static String getValue(OpenClProperty element) {
ASTNode valueNode = element.getNode().findChildByType(OpenClTypes.VALUE);
if (valueNode != null) {
return valueNode.getText();
} else {
return null;
}
}
public static String getName(OpenClProperty element) {
return getKey(element);
}
public static PsiElement setName(OpenClProperty element, String newName) {
ASTNode keyNode = element.getNode().findChildByType(OpenClTypes.KEY);
if (keyNode != null) {
OpenClProperty property = OpenClElementFactory.createProperty(element.getProject(), newName);
ASTNode newKeyNode = property.getFirstChild().getNode();
element.getNode().replaceChild(keyNode, newKeyNode);
}
return element;
}
public static PsiElement getNameIdentifier(OpenClProperty element) {
ASTNode keyNode = element.getNode().findChildByType(OpenClTypes.KEY);
if (keyNode != null) {
return keyNode.getPsi();
} else {
return null;
}
}
public static ItemPresentation getPresentation(final OpenClProperty element) {
return new ItemPresentation() {
@Nullable
@Override
public String getPresentableText() {
return element.getKey();
}
@Nullable
@Override
public String getLocationString() {
PsiFile containingFile = element.getContainingFile();
return containingFile == null ? null : containingFile.getName();
}
@Nullable
@Override
public Icon getIcon(boolean unused) {
return OpenClIcons.FILE;
}
};
}
}<file_sep>/src/com/jonathanmackenzie/opencl/plugin/OpenClLanguage.java
package com.jonathanmackenzie.opencl.plugin;
import com.intellij.lang.Language;
/**
*
*/
public class OpenClLanguage extends Language {
public static final OpenClLanguage INSTANCE = new OpenClLanguage();
private OpenClLanguage() {
super("OpenCl");
}
}
<file_sep>/src/com/jonathanmackenzie/opencl/plugin/psi/OpenClTokenType.java
package com.jonathanmackenzie.opencl.plugin.psi;
import com.intellij.lang.Language;
import com.intellij.psi.tree.IElementType;
import com.jonathanmackenzie.opencl.plugin.OpenClLanguage;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Created by mack0242 on 3/03/17.
*/
public class OpenClTokenType extends IElementType {
public OpenClTokenType(@NotNull @NonNls String debugName) {
super(debugName, OpenClLanguage.INSTANCE);
}
@Override
public String toString() {
return OpenClTokenType.class.getOpenClName() + "." + super.toString();
}
}
<file_sep>/README.md
# OpenCL Plugin
A plugin for IntelliJ family of IDE's to provide features for OpenCL files. Provides:
* Syntax highlighting
* Error highlighting
* Lint and other compiler output
# Configuration
* Build options are used for each kernel file in your project.
* Path to opencl compiler
<file_sep>/src/com/jonathanmackenzie/opencl/plugin/OpenClTodoIndexer.java
package com.jonathanmackenzie.opencl.plugin;
import com.intellij.lexer.Lexer;
import com.intellij.psi.impl.cache.impl.OccurrenceConsumer;
import com.intellij.psi.impl.cache.impl.todo.LexerBasedTodoIndexer;
public class OpenClTodoIndexer extends LexerBasedTodoIndexer {
@Override
public Lexer createLexer(OccurrenceConsumer consumer) {
return OpenClIdIndexer.createIndexingLexer(consumer);
}
}
| e36353159955b7801e94b190339c8e7a583c38c8 | [
"Markdown",
"Java"
] | 7 | Java | JonnoFTW/OpenCL-Support-Plugin | 4b025d8848de3ad67ffcb189140114516383e109 | ed588b09e550b769557e1dcd5cf6d0a4dfa22a4e | |
refs/heads/master | <repo_name>readytowork-org/deepar-example-reactnative<file_sep>/src/screens/HomeScreen.js
import * as React from 'react';
import {
Alert,
Dimensions,
Image,
PermissionsAndroid,
Platform,
StyleSheet,
Text,
TouchableOpacity,
View,
} from 'react-native';
import DeepARView from './../components/DeepARView';
import {effectsData} from '../effectsData';
import {slideTransitionDefinition} from '../components/simplenavigator/TransitionDefinitions';
class HomeScreen extends React.Component {
constructor(props) {
super(props);
this.state = {
permissionsGranted: Platform.OS === 'ios',
currentEffectIndex: 0,
switchCameraInProgress: false,
};
}
componentDidMount() {
if (Platform.OS === 'android') {
PermissionsAndroid.requestMultiple([
PermissionsAndroid.PERMISSIONS.CAMERA,
PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE,
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
]).then(result => {
if (
result['android.permission.CAMERA'] === 'granted' &&
result['android.permission.WRITE_EXTERNAL_STORAGE'] === 'granted' &&
result['android.permission.RECORD_AUDIO'] === 'granted'
) {
this.setState({permissionsGranted: true, showPermsAlert: false});
} else {
this.setState({permissionsGranted: false, showPermsAlert: true});
}
});
}
}
didAppear() {
if (this.deepARView) {
this.deepARView.resume();
}
}
willDisappear() {
if (this.deepARView) {
this.deepARView.pause();
}
}
onEventSent = event => {
if (event.type === 'cameraSwitch') {
this.setState({switchCameraInProgress: false});
} else if (event.type === 'initialized') {
} else if (event.type === 'didStartVideoRecording') {
} else if (event.type === 'didFinishVideoRecording') {
} else if (event.type === 'recordingFailedWithError') {
} else if (event.type === 'screenshotTaken') {
this.screenshotTaken(event.value);
} else if (event.type === 'didSwitchEffect') {
} else if (event.type === 'imageVisibilityChanged') {
}
};
onChangeEffect = direction => {
if (!this.deepARView) {
return;
}
const {currentEffectIndex} = this.state;
var newIndex =
direction > 0 ? currentEffectIndex + 1 : currentEffectIndex - 1;
if (newIndex >= effectsData.length) {
newIndex = 0;
}
if (newIndex < 0) {
newIndex = effectsData.length - 1;
}
const newEffect = effectsData[newIndex];
console.log('new effect', newEffect.name);
this.deepARView.switchEffect(newEffect.name, 'effect');
this.setState({currentEffectIndex: newIndex});
};
takeScreenshot = () => {
if (this.deepARView) {
this.deepARView.takeScreenshot();
}
};
screenshotTaken = screenshotPath => {
const path = 'file://' + screenshotPath;
const transition = slideTransitionDefinition({
isVertical: true,
direction: 1,
duration: 200,
});
this.props.push('preview', transition, {screenshotPath: path});
};
switchCamera = () => {
const {switchCameraInProgress} = this.state;
if (!switchCameraInProgress && this.deepARView) {
this.setState({switchCameraInProgress: true});
this.deepARView.switchCamera();
}
};
render() {
const {permissionsGranted, currentEffectIndex} = this.state;
const {width} = Dimensions.get('window');
const effect = effectsData[currentEffectIndex];
const screenshotImg = require('../../assets/images/screenshot.png');
const cameraSwitchImg = require('../../assets/images/camera.png');
return (
<View style={styles.container}>
{permissionsGranted ? (
<DeepARView
onEventSent={this.onEventSent}
ref={ref => (this.deepARView = ref)}
style={{width: width, height: '100%'}}
/>
) : null}
<TouchableOpacity
style={styles.cameraSwitchContainer}
onPress={() => this.switchCamera()}>
<Image style={styles.camera} source={cameraSwitchImg} />
</TouchableOpacity>
<View style={styles.bottomBtnContainer}>
<TouchableOpacity
style={{flex: 1, alignItems: 'center'}}
onPress={() => this.onChangeEffect(-1)}>
<View style={styles.prevContainer}>
<Text style={styles.prev}>Previous</Text>
</View>
</TouchableOpacity>
<TouchableOpacity
style={{flex: 1, alignItems: 'center'}}
onPress={() => this.takeScreenshot()}>
<View style={styles.screenshotContainer}>
<Image style={styles.screenshot} source={screenshotImg} />
</View>
</TouchableOpacity>
<TouchableOpacity
style={{flex: 1, alignItems: 'center'}}
onPress={() => this.onChangeEffect(1)}>
<View style={styles.nextContainer}>
<Text style={styles.next}>Next</Text>
</View>
</TouchableOpacity>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'white',
},
deepARView: {
position: 'absolute',
width: '100%',
height: '100%',
},
titleContainer: {
position: 'absolute',
top: 100,
width: '50%',
backgroundColor: 'white',
borderRadius: 4,
},
title: {
flex: 1,
textAlign: 'center',
fontSize: 20,
},
bottomBtnContainer: {
position: 'absolute',
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
width: '100%',
bottom: 100,
height: 50,
},
nextContainer: {
flex: 1,
width: '100%',
alignItems: 'center',
justifyContent: 'center',
borderRadius: 4,
backgroundColor: 'white',
},
prevContainer: {
flex: 1,
width: '100%',
alignItems: 'center',
justifyContent: 'center',
borderRadius: 4,
backgroundColor: 'white',
},
next: {
textAlign: 'center',
fontSize: 28,
},
prev: {
textAlign: 'center',
fontSize: 28,
},
screenshotContainer: {},
screenshot: {
width: 70,
height: 70,
},
cameraSwitchContainer: {
position: 'absolute',
width: 50,
height: 40,
right: 20,
top: 50,
justifyContent: 'center',
alignItems: 'center',
},
camera: {
width: '100%',
height: '100%',
},
});
export default HomeScreen;
<file_sep>/README.md
# React Native DeepAR SDK example
**[This example is outdated since it uses the older DeepAR SDK and it does not run out of the box. We've kept it here as a reference how one could use RN native components to integrate DeepAR. We accept pull-requests for updating to latest DeepAR version.]**
To run the example
* In the root folder:
- First install all the dependencies with: npm install
- Install pods for iOS: cd ios; pod update; pod install; cd..;
* Download the SDK from https://developer.deepar.ai for both iOS and Android and:
- Drag and drop the DeepAR.framework to your native iOS project. In the project settings General->Frameworks, Libraries and Embedded Content make sure the DeepAR.framework is listed and Embed option is set to "Embed & Sign". For any issues consult native iOS integration documentation.
- For Android follow the instructions on how to embed the DeepAR SDK into native apps. The workflow is the same:
+ Open the example-reactnative/android project in Android Studio
+ File->Project Structure->Dependencies->Add new (+ sign)->Import .JAR/.AAR Package
+ Select deepar.aar and finish the import
+ Sync gradle to make sure everything is ok
* Go to https://developer.deepar.ai, sign up, create the project and the Android and iOS apps. Copy the license key and paste it to:
- Android - RNTDeepAR.java (instead of your_licence_key_here string). Additionally change your applicationId in the application build.gradle file to the one you've set in the developer portal for Android app
- iOS - RNTDeepAR.m (insted of your_licence_key_here string). Additionally change your bundle identifier in the project settings Signing & Capabilities section to the one you've set in the developer portal for iOS app
* To run on iOS:
- Connect your phone to your Mac.
- Open ios/deeparRNExample.xcworkspace
- Run the project in the xCode
* To run on Android:
- Connect Android phone to the computer
- In the root folder run 'react-native run-android'
* This example has been done with RN version 0.61.2. Depending on your RN version some things may differ slightly - for example if you are running on RN < 0.60 you will need to link the dependencies manually etc. Make sure to understand RN native component management for both iOS and Android platforms:
- https://reactnative.dev/docs/native-components-ios
- https://reactnative.dev/docs/native-components-android
| 8114c9f866fa09647220c21e058062fa3c55ecd8 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | readytowork-org/deepar-example-reactnative | c87e252659772cc55e1781425e7577746695bee7 | a7c6a8c14fed2bc0b9cd27515f35c439ba90e944 | |
refs/heads/master | <repo_name>binghe17/bulk<file_sep>/v2/originAjax.js
//-------------------------------------JS정의
//완전한 요청주소
function originAjax(url, data, callback){
url = originUrl(url, data);
$.getScript(url , callback);//비동기실행
}
//url주소 만들기 //url + param
function originUrl(url, data){
if(data == null) return url;
else {
if(url.indexOf('?') > 0) return url + '&'+ serialize(data);
else return url +'?'+ serialize(data);
}
}
//objcet를 url param형태로 전환
function serialize(obj, prefix) {
var str = [], p;
for (p in obj) {
if (obj.hasOwnProperty(p)) {
var k = prefix ? prefix + "[" + p + "]" : p,
v = obj[p];
str.push((v !== null && typeof v === "object") ? serialize(v, k) : encodeURIComponent(k) + "=" + encodeURIComponent(v));
}
}
// return decodeURIComponent(str.join("&"));
return str.join("&");
}
//-----------------
//origin : output is js code
//문제: origin으로 설정된 사이트가 외부 사이트와 데이터를 주고 받지 못할때. ($.get, $.post, $.ajax $.load를 사용할수 없음) //js주입
//해결: get방식으로 모든 데이터를 서버에 전송하고 조건에 따라 실행후의 결과를 js방식으로 리턴해서 클라이언트에서 바로실행하게 함
//주의: 전역변수에 let가 없어야 함.
//실행1: $("#resultBox").html('<script src="bulkdata.php?m=3" ></script>');//스크랩드 수정
//실행2: $.getScript("bulkdata.php?m=3");//jquery비동기 요청
//originAjax('bulkdata.php?m=5', {mode:'searchId', id:'leecojo'})
//originAjax('savefile_origin.php', {mode:'searchId', id:'leecoo'}, function(){ console.log(res); });
<file_sep>/v1/savefile.php
<?php
$filename = './datas/bulkdata.json';
if($_GET['mode'] == 'searchId'){
if($_POST['id'] == null){
echo 'NO_ID';
}else{
if(file_exists($filename)){
$data = file_get_contents($filename);
$data = json_decode($data, 1);
// print_r($data);
$id = $_POST['id'];
if(isset($data['data'][$id]) && $data['data'][$id] >= $_POST['maxPrice']) {
$data['data'] = 'YES';
}else {
$data['data'] = 'NO';
}
echo json_encode($data, JSON_UNESCAPED_UNICODE + JSON_PRETTY_PRINT);
}
}
}
else if($_GET['mode'] == 'getTextFile'){
if(file_exists($filename)){
echo file_get_contents($filename);
}else echo 'NOFILE';
}
else if($_GET['mode'] == 'saveTextFile'){
if($_POST != null){
// print_r($_POST);
$data = json_encode($_POST, JSON_UNESCAPED_UNICODE + JSON_PRETTY_PRINT);
if(file_put_contents($filename , $data)) echo $_POST['lastTime'];
}
}
// // 설정
// $uploads_dir = './datas';
// $allowed_ext = array('csv');//헝용한 파일 확장자
// //save upload file
// else if($_GET['mode'] == 'upload'){
// $error = $_FILES['myfile']['error'];
// $name = $_FILES['myfile']['name'];
// $ext = array_pop(explode('.', $name));
// if( $error != UPLOAD_ERR_OK ) { // 오류 확인
// switch( $error ) {
// case UPLOAD_ERR_INI_SIZE:
// case UPLOAD_ERR_FORM_SIZE:
// echo "파일이 너무 큽니다. ($error)";
// break;
// case UPLOAD_ERR_NO_FILE:
// echo "파일이 첨부되지 않았습니다. ($error)";
// break;
// default:
// echo "파일이 제대로 업로드되지 않았습니다. ($error)";
// }
// exit;
// }
// if( !in_array($ext, $allowed_ext) ) {// 확장자 확인
// echo "허용되지 않는 확장자입니다.";
// exit;
// }
// move_uploaded_file( $_FILES['myfile']['tmp_name'], $uploads_dir .'/'. $name);// 파일 이동
// // 파일 정보 출력
// echo "<h2>파일 정보</h2>
// <ul>
// <li>파일명: $name</li>
// <li>확장자: $ext</li>
// <li>파일형식: {$_FILES['myfile']['type']}</li>
// <li>파일크기: {$_FILES['myfile']['size']} 바이트</li>
// </ul>";
// }
// //save string to file
// else if($_GET['mode'] == 'string'){
// $filename = $uploads_dir .'/'. $_POST['filename'];
// $data = $_POST['data'];
// file_put_contents($filename , $data);
// }<file_sep>/v2/origin_test.php
<?php
//origin : output is js code
//문제: origin으로 설정된 사이트가 외부 사이트와 데이터를 주고 받지 못할때. ($.get, $.post, $.ajax $.load를 사용할수 없음) //js주입
//해결: get방식으로 모든 데이터를 서버에 전송하고 조건에 따라 실행후의 결과를 js방식으로 리턴해서 클라이언트에서 바로실행하게 함
//주의: 전역변수에 let가 없어야 함.
//실행1: $("#resultBox").html('<script src="https://waterplay.kr/addonshop_event/bulk/datas/bulkdata.php?m=3" ></script>');//스크랩드 수정
//실행2: $.getScript("https://waterplay.kr/addonshop_event/bulk/datas/bulkdata.php?m=3");//jquery비동기 요청
//originAjax('https://waterplay.kr/addonshop_event/bulk/datas/bulkdata.php?m=5', {mode:'searchId', id:'leecojo'})
//originAjax('https://waterplay.kr/addonshop_event/bulk/savefile_origin.php', {mode:'searchId', id:'leecoo'}, function(){ console.log(res); });
//예제
// echo <<<xxxxxxxxxx
// //JSCODE
// xxxxxxxxxx;
if(isset($_GET['m'])){
if($_GET['m'] == 1){//-------------테스트 출력
echo <<<xxxxxxxxxx
res = 111;
console.log(res, '-----test m=1----');
xxxxxxxxxx;
die();
}
else if($_GET['m'] == 2){//--------------테스트 출력 (php변수 포함)
echo <<<xxxxxxxxxx
console.log("bbb222", {$_GET['m']});
xxxxxxxxxx;
die();
}
else if($_GET['m'] == 3){//--------------\\r, \\n, \t, \\t, \', \\', \", \\", \\\, 모든 글자체 전송가능함
$filename = basename(__FILE__);
echo <<<xxxxxxxxxx
var param = serialize({
m:4,
foo: "hi th bar['blah'] eraaa+aa&bar=bb 한글,中文测试,abc ,/, -\\\-, -\\n-, =\\t= \\' \\" ---++++=== ",
bar: {
blah: 123,
quux: [1, 2, {a:"1111"}]
}
})
data = '{$filename}?'+ param;
$.getScript(data);//비동기실행
// console.log(data);
// var b =originUrl('originAjax.php', {m:2});
// console.log(b);
xxxxxxxxxx;
}
else if($_GET['m'] == 4){//--------------클라이언트에서 보낸 모든 데이터(object) 출력
$json = json_encode( $_GET, JSON_UNESCAPED_UNICODE + JSON_PRETTY_PRINT);
echo <<<xxxxxxxxxx
console.log(`$json`);
xxxxxxxxxx;
die();
}
else if($_GET['m'] == 5){//--------------페이지 리다이렉트 완료후 반환값 보기
unset($_GET['m']);
$json = json_encode( $_GET, JSON_UNESCAPED_UNICODE);
echo <<<xxxxxxxxxx
// var param = serialize($json);
// url = 'https://waterplay.kr/addonshop_event/bulk/savefile_origin.php?'+ param;
// // console.log(url)
// $.getScript(url , function(){
// console.log(res );//전역변수res
// });//비동기실행
originAjax('https://waterplay.kr/addonshop_event/bulk/savefile_origin.php', $json, function(){
console.log(res ) //전역변수res
});
xxxxxxxxxx;
die();
}
}
//----------include
echo <<<xxxxxxxxxx
//-------------------------------------JS정의
//완전한 요청주소
function originAjax(url, data, callback){
url = originUrl(url, data);
$.getScript(url , callback);//비동기실행
}
//url주소 만들기 //url + param
function originUrl(url, data){
if(data == null) return url;
else {
if(url.indexOf('?') > 0) return url + '&'+ serialize(data);
else return url +'?'+ serialize(data);
}
}
//objcet를 url param형태로 전환
function serialize(obj, prefix) {
var str = [], p;
for (p in obj) {
if (obj.hasOwnProperty(p)) {
var k = prefix ? prefix + "[" + p + "]" : p,
v = obj[p];
str.push((v !== null && typeof v === "object") ? serialize(v, k) : encodeURIComponent(k) + "=" + encodeURIComponent(v));
}
}
// return decodeURIComponent(str.join("&"));
return str.join("&");
}
//---------------------------------------//
xxxxxxxxxx;
<file_sep>/v2/savefile_origin.php
<?php
$filename = './datas/bulkdata.json';
if($_GET['mode'] == 'searchId'){
if($_GET['id'] == null){
$data = "NO_ID";
}else{
if(file_exists($filename)){
$data = file_get_contents($filename);
$data = json_decode($data, 1);
// print_r($data);
$id = $_GET['id'];
if(isset($data['data'][$id]) && $data['data'][$id] >= $_GET['maxPrice']) {
$data['data'] = 'YES';
}else {
$data['data'] = 'NO';
}
}else $data = 'NO_DATA';
}
echo originResult($data);
die();
}
else if($_GET['mode'] == 'getTextFile'){
if(file_exists($filename)){
echo originResult( file_get_contents($filename), '','json');
}else echo originResult('NOFILE');
}
else if($_GET['mode'] == 'saveTextFile'){
unset($_GET['mode']);
// echo 'res = ';
// echo json_encode($_GET['data']);
if($_GET != null){
// print_r($_GET);
$data = json_encode($_GET, JSON_UNESCAPED_UNICODE + JSON_PRETTY_PRINT);
file_put_contents($filename , $data);
// echo $data;
echo originResult( $_GET['lastTime'] );
}
}
//--------ORIGIN : print js code
function originResult($data, $keyname='res', $type='text'){
if($keyname == '') $keyname = 'res';
if($type == 'text') return $keyname .' = '. json_encode($data, JSON_UNESCAPED_UNICODE + JSON_PRETTY_PRINT) ;//string, array일때 (json 이외의 데이터유형 일때)
else return $keyname .' = '. $data ;//데이터가 json일때
}
| ca60b8553908baae421b3070469d92c0cd4c0a77 | [
"JavaScript",
"PHP"
] | 4 | JavaScript | binghe17/bulk | c098ead0b6260e81625ed481a939bca19756e5ea | 5a346071d6364e783befd649f9e15d0ee06236f9 | |
refs/heads/master | <repo_name>synergy2411/gep-b2<file_sep>/playground/app.ts
// Arrow function
// var user = {
// firstName : "Foo",
// lastName : "Bar",
// getFullName : function(){
// // var that = this;
// // function nestedFunc(){
// // console.log(this); // ?
// // return "Hello " + that.firstName + " " + this.lastName
// // }
// // return nestedFunc();
// var nestedFunc = () => "Hello " + this.firstName + " " +this.lastName;
// return nestedFunc();
// }
// }
// console.log(user.getFullName()); // ?
// Block Scopes
// const username = "Foo";
// username = "Bar";
// const user = {
// name : "Foo"
// }
// user = {
// name : "Bar"
// }
// // user.name = "Bar";
// console.log(user);
// Destructuring : Array & Objects
// let arr = ["Foo", "bar", "bas"];
// let [arr1, arr3] = arr;
// console.log(arr3); // ?
// let drawObj = {
// drawCircle : r => console.log(Math.PI * r * r),
// drawTest : text => console.log("DRawing " + text),
// test : true
// }
// let {drawTest, drawCircle, test} = drawObj;
// test = false;
// console.log(drawObj.test); // ?
// drawCircle(3);
// let arr = [3, 4, 5];
// let newArr = [1, 2, arr, 6, 7];
// console.log(newArr); // [3,4,5]
// let newArr2 = [1,2,...arr,6,7];
// console.log(newArr2); // 3
// let obj = {
// fname : "Foo"
// }
// let newObj = {
// ...obj,
// lname : "Bar",
// fname : "Baz"
// }
// console.log(newObj); // ?
function demo(name, ...args){
console.log(args)
}
// demo("foo")
// demo("foo", "<EMAIL>")
demo("foo", "<EMAIL>", true)
| a99ae12f731444f178f5db9c796fa0f87064606d | [
"TypeScript"
] | 1 | TypeScript | synergy2411/gep-b2 | 670b1392dbe068ed55a37e1c1f211fcc32077ab2 | 7a422890e11672815fbf00d22ded9321242fc021 | |
refs/heads/master | <repo_name>Larry-Volz/connect-four<file_sep>/connect4.js
/*
IMPROVEMENTS TO ORIGINAL
- Used setTimeout for a few microseconds delay to fix winning subroutine to place piece FIRST so player can SEE
the four in a row before the win sequence
- Used a reversed gradient to highlight the winning pieces
- refactored the code to check for 4 in a row into functions so I could use them to acces the coordinates
needed to visually highlight the win
- used set interval to increase/decrease size and transition in CSS to make it smooth
- Do Play again prompt refresh to restart the game (on no go to my portfolio)
- make table responsive for smaller devices
- create animation subroutine for dropping pieces
*/
/** Connect Four
*
* Player 1 and 2 alternate turns. On each turn, a piece is dropped down a
* column until a player gets four-in-a-row (horiz, vert, or diag) or until
* board fills (tie)
*/
const PORTFOLIO = "https://www.larry-volz.com/software-development-portfolio";
const COLOR = ["","red", "blue"];
const WIDTH = 7;
const HEIGHT = 6;
let currPlayer = 1; // active player: 1 or 2
let board = []; // array of rows, each row is array of cells (board[y][x])
let winningFour = [[]];
/** makeBoard: create in-JS board structure:
* board = array of rows, each row is array of cells (board[y][x])
*/
makeBoard = ()=> {
for (let y = 0; y < HEIGHT; y++) {
board.push(Array.from({ length: WIDTH }));
}
}
/** makeHtmlBoard: make HTML table and row of column tops. */
makeHtmlBoard = () => {
htmlBoard = document.querySelector("#board");
// TODO: Create top row & listener
let top = document.createElement("tr");
top.setAttribute("id", "column-top");
top.addEventListener("click", handleClick);
for (let x = 0; x < WIDTH; x++) {
let headCell = document.createElement("td");
headCell.setAttribute("id", x);
top.append(headCell);
}
htmlBoard.append(top);
// Creates grid
for (let y = 0; y < HEIGHT; y++) {
const row = document.createElement("tr");
for (let x = 0; x < WIDTH; x++) {
const cell = document.createElement("td");
cell.setAttribute("id", `${y}-${x}`);
row.append(cell);
}
htmlBoard.append(row);
}
}
/** findSpotForCol: given column x, return top empty y (null if filled) */
findSpotForCol = (x) => {
// finds the empty vertical slot for a given x value
for (let y = HEIGHT-1; y >=0; y--){
if (!board[y][x]) return y;
}
return null;
}
/** placeInTable: update DOM to place piece into HTML table of board */
placeInTable = (y, x) => {
// Makes a div and inserts into correct table cell
let div = document.createElement("div");
div.classList.add("piece");
div.classList.add(`p${currPlayer}`);
// div.style.backgroundColor = COLOR[currPlayer-1];
let cell = document.getElementById(`${y}-${x}`);
cell.append(div);
console.log("in placeInTable");
}
//functions to check for 4 in a row
let getHoriz = (y,x) => [[y, x], [y, x + 1], [y, x + 2], [y, x + 3]];
let getVert = (y,x) => [[y, x], [y + 1, x], [y + 2, x], [y + 3, x]];
let getDiagDR = (y,x) => [[y, x], [y + 1, x + 1], [y + 2, x + 2], [y + 3, x + 3]];
let getDiagDL = (y,x) => [[y, x], [y + 1, x - 1], [y + 2, x - 2], [y + 3, x - 3]];
/** endGame: announce game end */
endGame = (msg) => {
//visually highlights the disks that are 4 in a row
for (let disk = 0; disk < 4; disk++) {
let y1 = winningFour[disk][0];
let x1 = winningFour[disk][1]
highlight = document.getElementById(`${y1}-${x1}`);
highlight.classList.add(`p${currPlayer}Win`);
}
// Pops up winning alert message
//used setTimeout because the alert was popping up before the screen had the chance
//to re-draw the piece which was VERY unsatisfying to the players
setTimeout(() => {
let playAgain = confirm(msg);
if(playAgain) {location.reload()}
else (window.location.replace(PORTFOLIO));
}, 2);
}
/* ---------------------------------- MAIN GAME LOOP from EventListener -----------------------------------------*/
/** handleClick: handle click of column top to play piece */
handleClick = (evt) => {
// get x from ID of clicked cell
let x = +evt.target.id;
// get next spot in column (if none, ignore click)
let y = findSpotForCol(x);
if (y === null) {
return;
}
// place piece in board and add to HTML table
placeInTable(y, x);
// TODO: add line to update in-memory board
board[y][x] = currPlayer;
// check for win
if (checkForWin()) {
return endGame(`${COLOR[currPlayer].toUpperCase()} Wins!\nWant to play again?`);
}
// check for tie
// TODO: check if all cells in board are filled; if so call, call endGame
// switch players
// currPlayer 1 <-> 2
if (currPlayer === 1) { currPlayer = 2} else { currPlayer = 1};
}
/** checkForWin: check board cell-by-cell for "does a win start here?" */
checkForWin = () => {
_win = (cells) => {
// Check four cells to see if they're all color of current player
// cells: list of four (y, x) cells
// returns true IF ALL are legal coordinates...
return cells.every(
([y, x]) =>
y >= 0 &&
y < HEIGHT &&
x >= 0 &&
x < WIDTH &&
// AND all match currPlayer (all the same color)
board[y][x] === currPlayer
);
}
//Create all the sequences of 4 on the board and make into arrays of coordinates
for (let y = 0; y < HEIGHT; y++) {
for (let x = 0; x < WIDTH; x++) {
//for each column (x) check and see if there are 4 in a row horizontally
//make each check into a 2d array
let horiz = getHoriz(y,x);
//then vertically
let vert = getVert(y,x);
//then for each diagonal direction
let diagDR = getDiagDR(y,x);
let diagDL = getDiagDL(y,x);
//then send through _win to see if any of those are legal sequences of four
if (_win(horiz) || _win(vert) || _win(diagDR) || _win(diagDL)) {
if (_win(horiz)) {
winningFour = getHoriz(y,x);
} else if (_win(vert)){
winningFour = getVert(y,x);
} else if (_win(diagDR)){
winningFour = getDiagDR(y,x)
} else {
winningFour = getDiagDL(y,x);
}
//return true if a win
return true;
}
}
}
}
makeBoard();
makeHtmlBoard();
| 2299335f1582b0be2523d00c22ecb9ceed8b40e2 | [
"JavaScript"
] | 1 | JavaScript | Larry-Volz/connect-four | cde437061f89c55ceedc26ea745b0187c5c60fba | 11064202a00a88795d828449e4be959548c7c0ce | |
refs/heads/master | <file_sep>Rails.application.routes.draw do
resources :departamentos
root to: "produtos#index"
resources :produtos, only: [:new,:create,:destroy,:edit,:update]
get "produtos/busca", to: "produtos#busca", as: :busca_produto
end
<file_sep>class ProdutosController < ApplicationController
before_action :set_produto, only:[:edit,:update,:destroy]
def index
@produtos = Produto.order(nome: :asc)
@produto_com_desconto = Produto.order(preco: :asc).limit 1
end
def new
@produto = Produto.new
@departamento = Departamento.all
end
def create
@produto = Produto.new produto_params
if @produto.save
flash[:notice] = "Produto cadastrado com sucesso!"
redirect_to root_path
else
renderiza :new
end
end
def destroy
@produto.destroy
flash[:notice] = "Produto removido com sucesso!"
redirect_to root_path
end
def busca
@nome = params[:nome]
@produtos = Produto.where "nome like ?", "%#{@nome}%"
end
def edit
renderiza :edit
end
def update
if @produto.update produto_params
flash[:notice] = "Produto atualizado com sucesso!"
redirect_to root_path
else
renderiza :edit
end
end
private
def produto_params
params.require(:produto).permit(:nome,:descricao,:preco,:quantidade,:departamento_id)
end
def set_produto
@produto = Produto.find(params[:id])
end
def renderiza(view)
@departamento = Departamento.all
render view
end
end
| c570ef125ea60c2f0f5ef60e0ad4dd9c16324850 | [
"Ruby"
] | 2 | Ruby | guilhermeparente17/sistemadeprodutos | cb68d41de35e9c297477ba8fcb804d991ed875a1 | a66784e434dcb8995e4121480ae20f5d3b8907ae | |
refs/heads/master | <repo_name>mdk194/mem_prometheus_exporter<file_sep>/proc/proc.go
package proc
import (
"fmt"
"os"
"strconv"
)
type Proc struct {
PID int
}
// AllProcs returns a list of all currently available processes.
func AllProcs() ([]Proc, error) {
d, err := os.Open("/proc")
if err != nil {
return []Proc{}, err
}
defer d.Close()
names, err := d.Readdirnames(-1)
if err != nil {
return []Proc{}, fmt.Errorf("could not read %s: %s", d.Name(), err)
}
p := []Proc{}
for _, n := range names {
pid, err := strconv.ParseInt(n, 10, 64)
if err != nil {
continue
}
p = append(p, Proc{PID: int(pid)})
}
return p, nil
}
<file_sep>/main.go
package main
import (
"net/http"
"github.com/prometheus/client_golang/prometheus/promhttp"
"go.uber.org/zap"
)
func main() {
logger, _ := zap.NewProduction()
defer logger.Sync()
http.Handle("/metrics", promhttp.Handler())
logger.Info("Starting exporter at :8080/metrics")
if err := http.ListenAndServe(":8080", nil); err != nil {
logger.Fatal("Failed to start", zap.Error(err))
}
}
<file_sep>/go.mod
module github.com/mdk194/mem_prometheus_exporter
go 1.13
require (
github.com/prometheus/client_golang v1.4.0
go.uber.org/zap v1.13.0
)
<file_sep>/collector.go
package main
import (
"fmt"
"strconv"
"sync"
"github.com/prometheus/client_golang/prometheus"
"github.com/mdk194/mem_prometheus_exporter/proc"
)
var _ prometheus.Collector = &collector{}
type collector struct {
ProcessMemory *prometheus.Desc
stats func() ([]proc.ProcStatus, error)
}
func init() {
c := newCollector(stats)
prometheus.MustRegister(c)
}
func stats() ([]proc.ProcStatus, error) {
procList, err := proc.AllProcs()
if err != nil {
return nil, fmt.Errorf("Failed to list processes %v", err)
}
var wg sync.WaitGroup
var out []proc.ProcStatus
errChan := make(chan error)
done := make(chan interface{})
for _, p := range procList {
wg.Add(1)
// Parallel read status
go func(pid int) {
defer wg.Done()
ps, err := proc.NewStatus(pid, fmt.Sprintf("/proc/%d/status", pid))
if err != nil {
errChan <- err
}
out = append(out, ps)
}(p.PID)
}
go func() {
wg.Wait()
close(done)
}()
select {
case <-done:
case err := <-errChan:
return nil, fmt.Errorf("Failed to read status %v", err)
}
return out, nil
}
func newCollector(stats func() ([]proc.ProcStatus, error)) prometheus.Collector {
return &collector{
ProcessMemory: prometheus.NewDesc(
"process_memory_rss_bytes",
"Size of memory resient set size of process read from /proc/[pid]/status",
[]string{"pid", "name"},
nil,
),
stats: stats,
}
}
// Describe implements prometheus.Collector
func (c *collector) Describe(ch chan<- *prometheus.Desc) {
ds := []*prometheus.Desc{
c.ProcessMemory,
}
for _, d := range ds {
ch <- d
}
}
// Collect implements prometheus.Collector
func (c *collector) Collect(ch chan<- prometheus.Metric) {
stats, err := c.stats()
if err != nil {
ch <- prometheus.NewInvalidMetric(c.ProcessMemory, err)
}
for _, s := range stats {
ch <- prometheus.MustNewConstMetric(
c.ProcessMemory,
prometheus.GaugeValue,
float64(s.VmRSS),
strconv.Itoa(s.PID), s.Name,
)
}
}
<file_sep>/proc/proc_status_test.go
package proc
import (
"io/ioutil"
"os"
"testing"
)
var mockStatus = `Name: systemd
Umask: 0000
State: S (sleeping)
Tgid: 1
Ngid: 0
Pid: 1
PPid: 0
TracerPid: 0
Uid: 0 0 0 0
Gid: 0 0 0 0
FDSize: 128
Groups:
NStgid: 1
NSpid: 1
NSpgid: 1
NSsid: 1
VmPeak: 238376 kB
VmSize: 172840 kB
VmLck: 0 kB
VmPin: 0 kB
VmHWM: 10612 kB
VmRSS: 10612 kB
RssAnon: 2556 kB
RssFile: 8056 kB
RssShmem: 0 kB
VmData: 25464 kB
VmStk: 132 kB
VmExe: 908 kB
VmLib: 7996 kB
VmPTE: 96 kB
VmSwap: 0 kB
HugetlbPages: 0 kB
CoreDumping: 0
Threads: 1
SigQ: 0/62811
SigPnd: 0000000000000000
ShdPnd: 0000000000000000
SigBlk: 7be3c0fe28014a03
SigIgn: 0000000000001000
SigCgt: 00000001800004ec
CapInh: 0000000000000000
CapPrm: 0000003fffffffff
CapEff: 0000003fffffffff
CapBnd: 0000003fffffffff
CapAmb: 0000000000000000
NoNewPrivs: 0
Seccomp: 0
Speculation_Store_Bypass: thread vulnerable
Cpus_allowed: ff
Cpus_allowed_list: 0-7
Mems_allowed: 00000001
Mems_allowed_list: 0
voluntary_ctxt_switches: 120965
nonvoluntary_ctxt_switches: 6455`
func TestProcStatus(t *testing.T) {
tmpfile, err := ioutil.TempFile("", "mockStatus")
if err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name()) // clean up
if _, err := tmpfile.Write([]byte(mockStatus)); err != nil {
t.Fatal(err)
}
if err := tmpfile.Close(); err != nil {
t.Fatal(err)
}
s, err := NewStatus(1, tmpfile.Name())
if err != nil {
t.Fatal(err)
}
for _, test := range []struct {
name string
want int
have int
}{
{name: "Pid", want: 1, have: s.PID},
{name: "Tgid", want: 1, have: s.TGID},
{name: "VmPeak", want: 238376 * 1024, have: int(s.VmPeak)},
{name: "VmSize", want: 172840 * 1024, have: int(s.VmSize)},
{name: "VmLck", want: 0 * 1024, have: int(s.VmLck)},
{name: "VmPin", want: 0 * 1024, have: int(s.VmPin)},
{name: "VmHWM", want: 10612 * 1024, have: int(s.VmHWM)},
{name: "VmRSS", want: 10612 * 1024, have: int(s.VmRSS)},
{name: "RssAnon", want: 2556 * 1024, have: int(s.RssAnon)},
{name: "RssFile", want: 8056 * 1024, have: int(s.RssFile)},
{name: "RssShmem", want: 0 * 1024, have: int(s.RssShmem)},
{name: "VmData", want: 25464 * 1024, have: int(s.VmData)},
{name: "VmStk", want: 132 * 1024, have: int(s.VmStk)},
{name: "VmExe", want: 908 * 1024, have: int(s.VmExe)},
{name: "VmLib", want: 7996 * 1024, have: int(s.VmLib)},
{name: "VmPTE", want: 96 * 1024, have: int(s.VmPTE)},
{name: "VmPMD", want: 0 * 1024, have: int(s.VmPMD)},
{name: "VmSwap", want: 0 * 1024, have: int(s.VmSwap)},
{name: "HugetlbPages", want: 0 * 1024, have: int(s.HugetlbPages)},
{name: "VoluntaryCtxtSwitches", want: 120965, have: int(s.VoluntaryCtxtSwitches)},
{name: "NonVoluntaryCtxtSwitches", want: 6455, have: int(s.NonVoluntaryCtxtSwitches)},
} {
if test.want != test.have {
t.Errorf("want %s %d, have %d", test.name, test.want, test.have)
}
}
}
<file_sep>/Makefile
SVC=mem_prometheus_exporter
LDFLAGS=-ldflags '-s -w -extldflags "-static"'
.PHONY: default
default: bin
.PHONY: test
test:
GO111MODULE=on go test ./... -count=1
.PHONY: bin
bin: test
GO111MODULE=on CGO_ENABLED=0 GOARCH=amd64 GOOS=linux go build ${LDFLAGS} -o bin/${SVC}-amd64-linux
.PHONY: install
install: test
go install ./...
.PHONY: clean
rm -rf bin
<file_sep>/README.md
This is an example of prometheus exporter that:
- list current running processes in linux host
- read /proc/[pid]/status
- export VM RSS of those processes to prometheus format at :8080/metrics
# Build
Run:
make
# Run
go run *.go
curl localhost:8080/metrics
Should have output like below:
# HELP process_memory_rss_bytes Size of memory resient set size of process read from /proc/[pid]/status
# TYPE process_memory_rss_bytes gauge
process_memory_rss_bytes{name="(sd-pam)",pid="758"} 2.92864e+06
process_memory_rss_bytes{name="NetworkManager",pid="691"} 2.0226048e+07
process_memory_rss_bytes{name="Xorg",pid="801"} 2.7267072e+07
process_memory_rss_bytes{name="acpi_thermal_pm",pid="127"} 0
process_memory_rss_bytes{name="at-spi-bus-laun",pid="834"} 5.967872e+06
process_memory_rss_bytes{name="at-spi2-registr",pid="932"} 5.976064e+06
process_memory_rss_bytes{name="auditd",pid="59"} 0
process_memory_rss_bytes{name="avahi-daemon",pid="688"} 4.067328e+06
process_memory_rss_bytes{name="avahi-daemon",pid="699"} 348160
process_memory_rss_bytes{name="blockd",pid="70"} 0
process_memory_rss_bytes{name="bluetoothd",pid="689"} 5.967872e+06
| 5d00fac8a3a1d844bcdaea7b0bb3cc4c7543864c | [
"Makefile",
"Go Module",
"Go",
"Markdown"
] | 7 | Go | mdk194/mem_prometheus_exporter | 0bc193e1023a2ed1304c07429f6854e6da5828c9 | 4287a0d5f9ec217806189f1b4475e7a4b565a9e4 | |
refs/heads/master | <file_sep>
#include <libftprintf.h>
#include <stdio.h>
//int main(int ac, char **av)
int main(void)
{
char *str1 = "bonjour les amis ravis de vous revoir";
int str3 = 1234;
int ret;
ret = ft_printf("");
ft_putnbr(ret);
ft_putchar('\n');
ret = ft_printf("");
ft_putnbr(ret);
// ft_putchar('\n');
// setlocale(LC_ALL, "");
// printf("😆 😎 😵 😗 😈\n");
}
<file_sep>#ifndef LIBFTPRINTF_H
#define LIBFTPRINTF_H
#include <unistd.h>
#include <libft.h>
#include <stdarg.h>
/*
typedef struct s_ft_printf_flags;
{
int minus;
int plus;
int zero;
int dies;
int escape;
} t_ft_printf_flags;
typedef struct s_ft_printf_special_conversion;
{
int h;
int hh;
int l;
int ll;
int j;
int z;
} t_ft_printf_special_conversion;
typedef struct s_ft_printf_conversion;
{
int c;
int C;
int d;
int D;
int i;
int o;
int O;
int p;
int s;
int S;
int u;
int U;
int x;
int X;
} t_ft_printf_conversion;
typedef struct s_ft_printf_precision;
{
int precision;
} t_ft_printf_precision;
*/
int ft_printf(char *premier, ...);
#endif
<file_sep># **************************************************************************** #
# #
# ::: :::::::: #
# Makefile :+: :+: :+: #
# +:+ +:+ +:+ #
# By: lgatibel <<EMAIL>> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2016/02/10 10:00:24 by lgatibel #+# #+# #
# Updated: 2016/09/16 13:42:49 by lgatibel ### ########.fr #
# #
# **************************************************************************** #
NAME = ft_printf
SRC_NAME = main.c ft_printf.c
OBJ_NAME = $(SRC_NAME:.c=.o)
SRC_PATH = ./srcs/
OBJ_PATH = ./objs/
SRC = $(addprefix $(SRC_PATH),$(SRC_NAME))
OBJ = $(addprefix $(OBJ_PATH),$(OBJ_NAME))
INC = -I./headers/
INC_LIBFT = -I./Libft/includes/
LIBFT_PATH = ./Libft/
LIBFT = -L./$(LIBFT_PATH)/ -lft
CC = clang
CFLAGS = -Wall -Wextra -Werror
all: $(NAME)
$(NAME): $(OBJ)
@echo "\nlibft"
@make -C Libft
$(CC) $(OBJ) $(INC) $(INC_LIBFT) $(LIBFT) -o $(NAME)
@echo "\n"
#ne pas ooublier les flags
$(addprefix $(OBJ_PATH),%.o) : $(addprefix $(SRC_PATH),%.c)
@mkdir -p $(OBJ_PATH) 2> /dev/null
@$(CC) -c $< $(INC) $(INC_LIBFT) -o $@
echo "!!!!!!!!!!!!!!!!!!!!!!!!ne pas ooublier les flags"
$(LIB):
make -C $(LIBFT_PATH)
clean:
rm -rf $(OBJ_PATH)
fclean: clean
rm $(NAME)
re: fclean all
r: fclean $(OBJ)
$(CC) $(OBJ) $(INC) $(INC_LIBFT) $(LIBFT) -o $(NAME)
norme:
norminette $(SRC)
norminette $(INC_PATH)
| 8f77d1c26bb7109af3c09a79c6675eaca07f8f68 | [
"C",
"Makefile"
] | 3 | C | lgatibel/Ft_printf | b40a3e9eb96a1f2b256900f4e9ad55ca2ce9f690 | cc44c9dd0679310a2eb488b8ec9a4698ec7bf916 | |
refs/heads/master | <repo_name>Valmos22/CRUD---PHP-MYSQl<file_sep>/pagina_inicio.php
<?php include("db.php"); ?>
<?php include('includes/header.php'); ?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="estilos.css">
<title>Inicio</title>
</head>
<body>
<main class="container p-4">
<div class="row">
<div class="col-md-12" style="height: 100%; ">
<!-- <div>
<h5>Esto es la pagina inicial</h5>
</div> -->
<div>
<div class="row">
<div class="col-sm-6">
<div class="card" style="height: 100%; border-radius: 20px; margin-top:30px; box-shadow: 4px 6px 8px grey;">
<div class="card-body" >
<h5 class="card-title" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;">Especialízate ahora mismo</h5>
<p class="card-text">With supporting text below as a natural lead-in to additional
content.</p>
<a href="#" style="box-shadow: 4px 6px 8px grey;" class="btn btn-outline-dark rounded-pill">Conoce mas</a>
</div>
</div>
</div>
<div class="col-sm-6">
<div class="card" style="height: 100%; border-radius: 20px; margin-top:30px; box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h5 class="card-title" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;">Especialízate ahora mismo</h5>
<p class="card-text">With supporting text below as a natural lead-in to additional
content.</p>
<a href="#" style="box-shadow: 4px 6px 8px grey;" class="btn btn-outline-dark rounded-pill">Conoce mas</a>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="col-md-12 container" style="height: 100%; border-radius: 20px; margin-top:80px;">
<p>
<h2 style="text-align: center; text-shadow: 2px 2px 5px black;">Estudia desde cero o especialízate en las áreas con mayor demanda laboral</h2>
</p>
<div style="display: flex; justify-content: center; align-items: center;">
<div class="row row-cols-3" style="border-radius: 20px; margin-top:30px;">
<div class="col">
<div class="card" style="border-radius: 20px; box-shadow: 4px 6px 8px grey; background-color: rgba(78, 77, 80, 0.5);">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Finanzas Personales</a>
</div>
</div>
</div>
<div class="col">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Periodismo Digital</a>
</div>
</div>
</div>
<div class="col">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Produccion Audiovisual</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Negocios</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Diseño Grafico</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Desarrollo Web</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Ingles</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Publicidad Digital</a>
</div>
</div>
</div>
<div class="col" style="border-radius: 20px; margin-top:30px;">
<div class="card" style="border-radius: 20px; background-color: rgba(78, 77, 80, 0.5); box-shadow: 4px 6px 8px grey;">
<div class="card-body">
<h6 class="card-title">Estudia:</h6>
<a href="#" style="color: #ff0000b5;text-shadow: 7px 3px 6px #222;" class="card-link">Videojuegos</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?>
</body>
</html><file_sep>/delete_curso.php
<?php
include("db.php");
if(isset($_GET['id'])) {
$id = $_GET['id'];
$query = "DELETE FROM cursos WHERE id = $id"; //DELETE FROM `cursos` WHERE `cursos`.`id` = 15
$result = mysqli_query($conn, $query);
if(!$result) {
die("Query Failed.");
}
header('Location: cursos_mostrar.php');
}
?><file_sep>/edit_estudiante.php
<?php
include("db.php");
$nombre = '';
$correo= '';
$contraseña= '';
if (isset($_GET['id'])) {
$id = $_GET['id'];
$query = "SELECT * FROM estudiantes WHERE id=$id";
$result = mysqli_query($conn, $query);
if (mysqli_num_rows($result) == 1) {
$row = mysqli_fetch_array($result);
$nombre = $row['nombre'];
$correo = $row['correo'];
$contraseña = $row['contraseña'];
}
}
if (isset($_POST['update'])) {
$id = $_GET['id'];
$nombre= $_POST['nombre'];
$correo = $_POST['correo'];
$contraseña = $_POST['contraseña'];
$query = "UPDATE estudiantes set nombre = '$nombre', correo = '$correo', contraseña = <PASSWORD>' WHERE id=$id";
mysqli_query($conn, $query);
$_SESSION['message'] = 'Task Updated Successfully';
$_SESSION['message_type'] = 'warning';
header('Location: estudiante.php');
}
?>
<?php include('includes/header.php'); ?>
<div class="container p-4">
<div class="row">
<div class="col-md-4 mx-auto">
<div class="card card-body">
<form action="edit_estudiante.php?id=<?php echo $_GET['id']; ?>" method="POST">
<div class="form-group">
<input name="nombre" type="text" class="form-control" value="<?php echo $nombre; ?>" placeholder="Update Title">
</div>
<div class="form-group">
<input name="correo" type="text" class="form-control" value="<?php echo $correo; ?>" placeholder="Update Title">
</div>
<div class="form-group">
<input name="contraseña" type="text" class="form-control" value="<?php echo $contraseña; ?>" placeholder="Update Title">
</div>
<button class="btn-success" name="update">
Actualizar
</button>
</form>
</div>
</div>
</div>
</div>
<?php include('includes/footer.php'); ?>
<file_sep>/registrarse.php
<?php include('includes/headerdos.php'); ?>
<main class="container p-4">
<div class="row">
<div class="col-md-4">
<?php if (isset($_SESSION['message2'])) { ?>
<div class="alert alert-<?= $_SESSION['message_type2']?> alert-dismissible fade show" role="alert">
<?= $_SESSION['message2']?>
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<?php session_unset(); } ?>
<!-- Agregar estudiante -->
<div class="card card-body" style="position: relative; left: 110%;top: 50%; border-radius: 20px; box-shadow: 4px 6px 8px grey;">
<form action="save_registro.php" method="POST">
<div class="form-group">
<label for="exampleInputEmail1">Nombre</label>
<input type="text" style="box-shadow: 4px 6px 8px grey;" name="nombre" class="form-control rounded-pill" placeholder="Nombre" autofocus>
</div>
<div class="form-group">
<label for="exampleInputEmail1">Email</label>
<input type="text" style="box-shadow: 4px 6px 8px grey;" name="Email" class="form-control rounded-pill" placeholder="Correo" autofocus>
</div>
<div class="form-group">
<label for="exampleInputEmail1">Contraseña</label>
<input type="password" style="box-shadow: 4px 6px 8px grey;" name="contraseña" class="form-control rounded-pill" placeholder="Contraseña"
autofocus>
</div>
<input type="submit" name="guardar" style="box-shadow: 4px 6px 8px grey;" class="btn btn btn-outline-dark btn-block rounded-pill" value="Aceptar">
<a class="" style="display: flex; justify-content: center; align-items: center; color:black; margin-top: 20px;" href="index.php">INICIA SESION</a>
</form>
</div>
</div>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?><file_sep>/cursos_mostrar.php
<?php include("db.php"); ?>
<?php include('includes/header.php'); ?>
<!-- Mostrar Mis Cursos Registrados -->
<main class="container p-4">
<div class="col" style="margin-top:20px;">
<div class="col-md-12">
<div class="card" style="width: 100%; border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<?php
$query = "SELECT cursos.nombre FROM usuarios INNER JOIN cursos ON usuarios.id = cursos.usuario";
$result_student = mysqli_query($conn, $query);
while($row = mysqli_fetch_assoc($result_student)) { ?>
<ol style="text-align: center;">
<a class="card-title" style="color: black;"><?php echo $row['nombre']; ?></a>
<br>
<!-- <input type="submit" name="delete_curso" class="btn btn-outline-dark rounded-pill"
value="Eliminar"> -->
<!-- <a href="delete_curso.php?id=<?php echo $row['id']?>" class="btn btn-danger">
<i class="far fa-trash-alt"></i>
</a> -->
<br>
</ol>
<?php } ?>
</div>
</form>
</div>
</div>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?><file_sep>/cursos_obtener.php
<?php include("db.php"); ?>
<?php include('includes/header.php'); ?>
<main class="container p-4">
<div class="row row-cols-3">
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-b.udemycdn.com/course/240x135/1075334_8b5f_4.jpg?secure=Hrs-YOFwX2SAaY81iIW-2Q%3D%3D%2C1606563455"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="Angular: Desde cero a experto + 15 proyectos jajajaja"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">Angular: Desde cero a experto + 15 proyectos jajajaja</h5>
<p class="card-text">$ 50.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-a.udemycdn.com/course/240x135/782428_b5cf_4.jpg?_UZmRyG2CSoNyNmgUBa1lkncytFkXWOuaK3z6EpDubl8_7gdSwmDLtg6UkKfxCw0LHa3-PUxEqaxvMk4Lzar27cFNec2uLLAAqpA-PHVhIG3G_9H1IHImvhgHxRA5H0"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="Diseño Web Profesional El Curso Completo, Práctico y desde 0"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">Diseño Web Profesional El Curso Completo, Práctico y desde 0</h5>
<p class="card-text">$ 60.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-b.udemycdn.com/course/240x135/672600_1def_7.jpg?secure=Bvr-7xuIGIXFGuW0F_q1oQ%3D%3D%2C1606554269"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="PHP 7 y MYSQL: El Curso Completo, Práctico y Desde Cero !"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">PHP 7 y MYSQL: El Curso Completo, Práctico y Desde Cero !</h5>
<p class="card-text">$ 100.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-b.udemycdn.com/course/240x135/1509816_dff8.jpg?secure=Nb3DfVpx1AFQdolS_1M1oA%3D%3D%2C1606568258"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="JavaScript Moderno Guía Definitiva Construye +15 Proyectos"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">JavaScript Moderno Guía Definitiva Construye +15 Proyectos</h5>
<p class="card-text">$ 100.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-a.udemycdn.com/course/240x135/1756340_0543_4.jpg?Fty2AMou-pxDnsuvoODvS4njNb6l31HWLao7DrpI7isJUaaN0DDORrQTfWxmPQ3S4f-Zag0KzD27enZiRpPWcywBVK_8G8F87PBALTuLpilNYUcV3_U7Vwxk2B9fZaxf"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="React - La Guía Completa: Hooks Context Redux MERN +15 Apps"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">React - La Guía Completa: Hooks Context Redux MERN +15 Apps</h5>
<p class="card-text">$ 100.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
<div class="col" style="margin-top:20px;">
<div class="col-md-4">
<div class="card" style="width: 18rem; border-radius:20px; box-shadow: 10px 10px 5px grey;">
<img src="https://img-a.udemycdn.com/course/240x135/1467412_94b5_11.jpg?eiHQ5t362GOdfglRrwjuPo-DruynFMTmSUrmHJUJ36kitzyKT_M9DJQWKmyfvMZnXN1vGeDuo5sS_8werq2QfcIoRG4r586CnSlZn3DGJNzOd5KR7kdioiyqtgYX2RT6Cw"
class="card-img-top" alt="..." style="border-radius:20px;">
<div class="card-body">
<form action="save_curso.php" method="POST">
<!-- nombre -->
<div class="form-group">
<input type="hidden" name="nombre" class="form-control" value="Crea sistemas POS Inventarios y ventas con PHP 7 y AdminLTE"
autofocus>
<input type="hidden" name="usuario" value="1">
</div>
<h5 class="card-title">Crea sistemas POS Inventarios y ventas con PHP 7 y AdminLTE</h5>
<p class="card-text">$ 100.000</p>
<input type="submit" style="box-shadow: 4px 6px 8px grey;" name="save_curso" class="btn btn-outline-dark rounded-pill" value="Obtener">
</form>
</div>
</div>
</div>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?><file_sep>/save_curso.php
<?php
include('db.php');
if (isset($_POST['save_curso'])) {
$nombre = $_POST['nombre'];
$usuario = $_POST['usuario'];
$query = "INSERT INTO cursos(nombre, usuario) VALUES ('$nombre', '$usuario')";
$result = mysqli_query($conn, $query);
if(!$result) {
die("Query Failed.");
}
$_SESSION['message_is'] = 'Tarea Creada';
$_SESSION['message_type_is'] = 'success';
header('Location: cursos_obtener.php');
}
?><file_sep>/validar.php
<?php
include('db.php');
// if(isset($_POST['email']) && isset($_POST['contraseña'])){
// $email = $_POST['email'];
// $contraseña= $_POST['contraseña'];
// $query = "SELECT * FROM estudiantes where correo = '$email' and contraseña = '$contraseña'";
// $result = mysqli_query($conn, $query);
// $filas = mysqli_num_rows($result);
// if(!$filas){
// header('Location: index_2.php');
// }else{
// // $_SESSION['login'] = $filas;
// // header('Location: index_2.php');
// echo('Usuario o contraseña son incorrectos');
// }
// }
// if(isset($_POST['email']) && isset($_POST['contraseña'])){
// $correo = $_POST['email'];
// $contraseña = $_POST['contraseña'];
// $db = new Database();
// // $query = "SELECT * FROM estudiantes where nombre = '$nombre' and contraseña = '$<PASSWORD>'";
// $query = $db->connect()->prepare("SELECT * FROM estudiantes where correo = '$correo' and contraseña = '$contr<PASSWORD>'");
// $query->execute(['correo'=>$correo, 'contraseña'=>$contraseña]);
// $row = $query->fetch(PDO::FETCH_NUM);
// if($row == true){
// //validar rol
// }else{
// echo('Usuario o contraseña son incorrectos');
// }
// }
// mysqli_free_result($result);
// mysqli_close($conn);
//-------------------------------------------------------------------
if (isset($_GET['cerrar_sesion'])){
session_unset();
session_destroy();
}
if (isset($_SESSION['rol'])){
switch($_SESSION['rol']){
case 1:
header('location: index_estudiante.php');
break;
case 2:
header('location: index_admin.php');
break;
default:
}
}
// if(isset($_POST['correo']) && isset($_POST['contraseña'])){
// $correo = $_POST['correo'];
// $contraseña = $_POST['contraseña'];
// $db = new Database();
// $query = $db->connect()->prepare("SELECT * FROM usuarios where correo = '$correo' and contraseña = '$contraseña'");
// $query->execute(['correo'=>$correo, 'contraseña'=>$contraseña]);
// $row = $query->fetch(PDO::FETCH_NUM);
// if($row == true){
// //validar
// $rol = $row[3];
// $_SESSION['rol'] = $rol;
// switch($_SESSION['rol']){
// case 1:
// header('location: index_estudiante.php');
// break;
// case 2:
// header('location: index_admin.php');
// break;
// default:
// }
// }else{
// echo('Usuario o contraseña son incorrectos');
// }
// }
if(isset($_POST['correo']) && isset($_POST['contraseña'])){
$email = $_POST['correo'];
$contraseña= $_POST['<PASSWORD>'];
$query = "SELECT * FROM usuarios where correo = '$email' and contraseña = '$contr<PASSWORD>'";
$result = mysqli_query($conn, $query);
$filas = mysqli_num_rows($result);
// $row = $query->fetch(PDO::FETCH_NUM);
if($filas == true){
// header('Location: index_2.php');
$rol = $filas;
$_SESSION['rol'] = $rol;
switch($_SESSION['rol']){
case 1:
header('location: pagina_inicio.php');
break;
case 2:
header('location: index_admin.php');
break;
default:
}
}else{
echo('Usuario o contraseña son incorrectos');
}
}
// <!-- ENVIO DE COOREO ELECTRONICO -->
require 'src/Exception.php';
require 'src/PHPMailer.php';
require 'src/SMTP.php';
$correo = $_POST["correo"];
$mensaje = $_POST["mensaje"];
$mail = new PHPMailer\PHPMailer\PHPMailer(true);
try {
//Server settings
$mail->SMTPDebug = 0; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'smtp.gmail.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
//https://support.google.com/mail/answer/185833?hl=es-419 POR ACA INGRESAN PARA CREAR LA CLAVE DE LA APP
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//DESDEEE -->
$mail->setFrom('<EMAIL>', '<NAME>');
//La siguiente linea, se repite N cantidad de veces como destinarios tenga
$mail->addAddress($correo, $correo); // Add a recipient
// Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Mensaje automatico';
$mail->Body = $mensaje;
$mail->AltBody = 'This is the body in plain text for non-HTML mail clients';
$mail->send();
$data["res"] = 'Message has been sent';
} catch (Exception $e) {
$data["res"] = 'Error';
echo "Message could not be sent. Mailer Error: {$mail->ErrorInfo}";
}
echo json_encode($data);
?><file_sep>/index.php
<?php include("db.php");
session_start();
?>
<?php include('includes/headerdos.php'); ?>
<main class="container p-4">
<div class="row">
<div class="col-md-4">
<div class="card card-body" style="position: relative; left: 110%;top: 50%; border-radius: 20px; box-shadow: 4px 6px 8px grey;">
<div class="sesion">
<form action="validar.php" method="POST">
<div class="form-group">
<label for="exampleInputEmail1">Email</label>
<input type='text' style="box-shadow: 4px 6px 8px grey;" class="form-control rounded-pill" placeholder="Email" name="correo">
<input type='hidden' class="form-control" name="mensaje" value="Has iniciado sesión en HugoLearn">
</div>
<div class="form-group">
<label for="exampleInputPassword1">Contraseña</label>
<input type="<PASSWORD>" style="box-shadow: 4px 6px 8px grey;" class="form-control rounded-pill" placeholder="<PASSWORD>" name="contraseña">
</div>
<input type="submit" name="ini_sesion" style="box-shadow: 4px 6px 8px grey;" class="btn btn btn-outline-dark btn-block rounded-pill" value="Aceptar">
<!-- <button type="submit" name="ini_sesion" class="btn btn-primary">Aceptar</button> -->
<a class="" style="display: flex; justify-content: center; align-items: center; color:black; margin-top: 20px;" href="registrarse.php">REGISTRARSE</a>
</form>
</div>
</div>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?><file_sep>/imprimir.php
<?php
header("Content-Type: application/vnd.ms-excel; charset=utf-8");
header("Content-Disposition: attachment; filename=abc.doc"); //File name extension was wrong
header("Expires: 0");
header("Cache-Control: must-revalidate, post-check=0, pre-check=0");
header("Cache-Control: private",false);
$mysqli = new mysqli("localhost", "root", "", "php_mysql_crud");
if ($mysqli->connect_errno) {
echo "Falló la conexión con MySQL: (" . $mysqli->connect_errno . ") " . $mysqli->connect_error;
}
$resultado = $mysqli->query("SELECT * FROM `task`");
if($resultado){
echo "<table border='1'>";
echo "<tr>
<td>Id</td>
<td>Titulo</td>
<td>Descripcion</td>
<td>Fecha</td>
</tr>
";
while ($itemTemp = $resultado->fetch_assoc()) {
echo "<tr>
<td>".$itemTemp['id']."</td>
<td>".$itemTemp['title']."</td>
<td>".$itemTemp['description']."</td>
<td>".$itemTemp['created_at']."</td>
</tr>
";
}
echo "</table>";
}else{
echo "Falló sql: (" . $mysqli->errno . ") " . $mysqli->error;
}
?><file_sep>/index_2.php
<?php include("db.php"); ?>
<?php include('includes/header.php'); ?>
<main class="container p-4">
<div class="row rounded-pill" style="display: flex; justify-content: center; align-items: center;">
<div class="col-md-4">
<!-- MESSAGES -->
<?php if (isset($_SESSION['message'])) { ?>
<div class="alert alert-<?= $_SESSION['message_type']?> alert-dismissible fade show" role="alert">
<?= $_SESSION['message']?>
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<?php session_unset(); } ?>
<!-- ADD TASK FORM -->
<div class="card card-body" style="border-radius:20px; box-shadow: 4px 6px 8px grey">
<form action="save_task.php" method="POST">
<div class="form-group">
<label for="exampleInputEmail1">Titulo</label>
<input type="text" style="box-shadow: 4px 6px 8px grey;" name="title" class="form-control rounded-pill" placeholder="Titulo" autofocus>
</div>
<div class="form-group">
<label for="exampleInputEmail1">Descripcion</label>
<textarea name="description" rows="1" style="box-shadow: 4px 6px 8px grey;" class="form-control rounded-pill" placeholder="Descripcion"></textarea>
</div>
<input type="submit" name="save_task" style="box-shadow: 4px 6px 8px grey;" class="btn btn btn-outline-dark btn-block rounded-pill" value="Guardar Tarea">
</form>
</div>
</div>
<!-- Mostrar task en tabla -->
<div class="col-md-12 ver-tarea rounded-pill" style="margin-top: 20px">
<table class="table table-bordered" style="border-radius:20px;">
<thead class="thead-dark" style="border-radius: 20px">
<tr>
<th>Titulo</th>
<th>Descripcion</th>
<th>Fecha</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<?php
$query = "SELECT * FROM task";
$result_tasks = mysqli_query($conn, $query);
while($row = mysqli_fetch_assoc($result_tasks)) { ?>
<tr>
<td><?php echo $row['title']; ?></td>
<td><?php echo $row['description']; ?></td>
<td><?php echo $row['created_at']; ?></td>
<td>
<a href="edit.php?id=<?php echo $row['id']?>" class="btn btn-secondary">
<i class="fas fa-marker"></i>
</a>
<a href="delete_task.php?id=<?php echo $row['id']?>" class="btn btn-danger">
<i class="far fa-trash-alt"></i>
</a>
</td>
</tr>
<?php } ?>
</tbody>
</table>
<a type="submit" href="imprimir.php" style="box-shadow: 4px 6px 8px grey;" class="btn btn-outline-dark rounded-pill">Imprimir Tareas</a>
</div>
</div>
</main>
<?php include('includes/footer.php'); ?> | 21ce3b4e3a8a1f98eba3f4bab79e44fb3d97f706 | [
"PHP"
] | 11 | PHP | Valmos22/CRUD---PHP-MYSQl | 6908c2f1a43249f8f168976ee95b8a062978f00b | fd04c4ec39b9098ca814b096ec76487274e7529d | |
refs/heads/master | <repo_name>cats0414/GPSMaps<file_sep>/app/src/main/java/com/example/hmrtgps/Enviar.java
package com.example.hmrtgps;
import android.os.AsyncTask;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.Socket;
import static com.example.hmrtgps.MainActivity.message;
import static com.example.hmrtgps.MainActivity.ip;
import static com.example.hmrtgps.MainActivity.puertotcp;
public class Enviar extends AsyncTask<String, Void, Void> {
Socket s;
DataOutputStream dt;
PrintWriter pw;
@Override
protected Void doInBackground(String... strings) {
try {
s= new Socket(ip, Integer.parseInt(puertotcp));
pw= new PrintWriter(s.getOutputStream());
pw.write(message);
pw.flush();
pw.close();
s.close();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/settings.gradle
rootProject.name='HMRT GPS'
include ':app'
| ee502ae91e81823e2ef06d36058d16acec94f98b | [
"Java",
"Gradle"
] | 2 | Java | cats0414/GPSMaps | 478af9b541a2be13f99d119f61c6d7d84ad49c80 | 9fb86325861e0b6a93baab99506d5ed1ed95c59a | |
refs/heads/master | <repo_name>chcko/chcko-r<file_sep>/chcko/r/cr/de.rst
.. raw:: html
%path = "Mathe/Funktionen"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Eine **Funktion (= Abbildung)** kann als Menge von Wertepaaren `(x,y)` mit
`x\in X` (**Definitionsmenge**) und `y\in Y` (**Wertemenge**) identifiziert
werden. Wichtig ist die **Eindeutigkeit**: für ein `x` gibt es genau ein `y`.
`x` kann man aus einer Menge frei wählen.
Man nennt `x` **unabhängiger Wert, Urbild, Veränderliche, Argument oder Stelle**.
`y` ist durch `x` bestimmt. Es ist keine zusätzliche Information notwendig.
Das macht letztendlich den Begriff Funktion wichtig.
Man nennt `y` **abhängige Variable, Bild oder Funktionswert**.
Ist diese Eindeutigkeit nicht gegeben, dann spricht man von einer **Relation**.
Ein Funktion `f` hat ein Richtung von der Menge aller `x` (`X`) auf die Menge
aller `y` (`Y`). Man schreibt `f:X\rightarrow Y`.
Die Wertepaare können normalerweise nicht alle angegeben werden,
deshalb beschreibt man eine Funktion mit einer Rechenvorschrift,
d.h. einem **analytischen Ausdruck**, z.B. `y=x^2+1`.
Das ist im Grunde ein Algorithmus, ein kleines Programm.
.. admonition:: Grundkonzepte:
- Definitionsmenge
- Wertemenge
- Abbildung
Wenn man keinen eigenen Buchstabe `f` für die Funktionen haben will,
kann man auch schreiben: `y(x)`, d.h. die Klammern sagen aus,
dass `y` sich aus `x` eindeutig ergibt, d.h. Funktion von `x` ist.
Manchmal kann `f` die Funktion meinen oder den Funktionswert.
Konzentriert man sich nur auf die Abbildung, so schreibt man statt `g(f(x))`
auch `g\circ f` und das heißt: bilde zuerst mittels `f`, dann mittels `g` ab,
d.h. von rechts nach links in beiden Schreibweisen.
Es kann mehrere `x` mit gleichem `y` geben und es ist immer noch eine Funktion.
Gibt es nur ein Urbild `x` für ein Bild `y`, dann ist die Funktion
linkseindeutig (**injektive**), d.h. sie bewart Unterscheidbarkeit oder
verliert nicht an Information.
Wird darüber hinaus noch jedes element der
Bildmenge erreicht (**surjektiv**), dann ist die Funktion unkehrbar eindeutig,
eineindeutig oder **bijektiv**. Dann kann man auch `y` beliebig wählen und `x`
ist dadurch bestimmt (`x(y)`, **Umkehrfunktion**).
Wenn die Bilder von Elementen, die sich nahe sind, auch nahe beisammen sind,
dann ist die Funktion **stetig**. Nahe ist intuitiv, muss aber mathematisch
erst definiert werden. Das geschieht über eine **Metrik** `d` (`d(x,y)\ge 0`,
`d(x,y)=d(y,x)` und `d(x,z)\le d(x,y)+d(y,z)`, z.B. `d(x,y)=|y-x|`)
(oder auch abstrakter in der Topologie über eine Menge von (offenen) ineinander
verschachtelten Mengen.)
.. admonition:: Stetigkeit bei `x`
Für jedes `\varepsilon > 0` gibt es eine `\delta`, so dass
für alle y mit `d(x,y)<\delta` gilt: `d(f(x),f(y))<\varepsilon`.
Für jede `\varepsilon`-Umgebung gibt es eine `\delta`-Umgebung.
Eine Funktion setzt keine Ordnung voraus. Ist sie aber gegeben,
dann sagt man eine Funktion ist **(streng) monoton** steigend,
wenn aus `x\le y` (`x<y`) folgt: `f(x)\le f(y)` (`f(x)<f(y)`).
(Streng) monoton fallend wird analog definiert.
Verwandt mit Funktion ist Morphismus (:lnk:`r.cs`).
Zur graphischen Darstellung einer Funktion in einem **Koordinatensystem**:
- Werden die Werte der beteiligten Variablen `X` und `Y` mittels einer Einheit
auf Zahlen abgebildet
- Es wird eine Längeneinheit für die Darstellung gewählt (z.B. cm).
Das Verhältnis dieser Längeneinheit zur realen Einheit (kg, km, m/s,...) ist
der **Maßstab**.
- Für einen Wert der unabhängigen Variablen `X` geht man den Zahlenwert
in dieser Längeneinheit nach links (`x`-Koordinate, Abszisse).
- für einen Wert der abhängige Variable `Y` geht man den Zahlenwert
in dieser Längeneinheit nach oben (`y`-Koordinate, Ordinate).
- Das wiederholt man für einige Werte und man erhält Punkte
Die `(x,y)`-Paare dieser Punkte kann man als Zwischenschritt auch
in eine Tabelle eintragen (**Wertetabelle**).
- Weil es sich meistens um stetige Funktionen handelt,
kann man eine Kurve durch diese Punkte legen.
Ist die Kurve eine Gerade, dann spricht man von einer **linearen Funktion**.
Beispiele für Graphen von bestimmten grundlegenden Funktionstypen mit
einer unabhängigen Variablen gibt es hier: :lnk:`r.cf`.
<file_sep>/chcko/r/a3/__init__.py
from chcko.chcko.hlp import Struct
import numpy as np
import pint
u = pint.UnitRegistry()
import random
def chiven():
g = Struct()
g.Vcc = random.choice(range(7,15))*u.V
g.Vi = random.choice(range(4,g.Vcc.magnitude-2))*u.V
g.Vo = random.choice(range(1,g.Vi.magnitude))*u.V
g.hfe = random.choice(range(5,15))*10
g.Icmax = random.choice(range(5,15))*u.mA
g.Vbe = random.choice([0.3,0.7])*u.V
return g
def chalc(g):
#g = chiven()
VceCutoff = g.Vcc
VceSaturation = 0
ReRc = (g.Vcc-VceSaturation)/g.Icmax
#Rc = (g.Vcc/g.Vo-1)*Re
Re = ReRc*g.Vo/g.Vcc
Rc = ReRc - Re
Ibmax = g.Icmax/g.hfe
Rb = (g.Vi-g.Vo-g.Vbe)/Ibmax
res = [x.to('kΩ').magnitude for x in [Rc,Re,Rb]]
return res
chames = [r'\(R_c/kΩ=\)', r'\(R_e/kΩ=\)', r'\(R_b/kΩ=\)']
P = lambda x: "{:~P}".format(x)
<file_sep>/chcko/r/cr/en.rst
.. raw:: html
%path = "maths/functions"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
A **function (= mapping)** can be seen as set of value pairs `(x,y)` with
`x\in X` (**domain**, **preimage**) and `y\in Y` (**codomain**, **image**, **range**).
Important is the **uniqueness** of the image: for any `x` there is exactly one `y`.
`x` can be freely chosen. `x` is **independent value, preimage, argument or position**.
`y` is determined by `x`. No new information is needed to select `y`.
This is why functions are important.
`y` is called **(dependent or function) value or image**.
If the uniqueness is not satisfied, then it is called a **relation**.
A function `f` as a direction from the set of all `x` (`X`) to the set
of all `y` (`Y`). This is notated as `f:X\rightarrow Y`.
The value pairs normally cannot be written down because too many or infinite.
Therefore one describes the function via an **analytic expression**, i.e.
`y=x^2+1`. This basically is an algorithm, a program.
.. admonition:: Basic Concepts:
- domain
- codomain
- mapping
If we do not want to have a separate letter `f` for the mapping,
we can write: `y(x)`, i.e. the parentheses say that
`y` follow from `x`, i.e. is function of `x`.
Sometimes `f` can mean both the mapping or the function value.
If we concentrate only on the mapping, instead of `g(f(x))`
we can write `g\circ f` meaning: first we map via `f`, then via `g`,
i.e. the ordering is the same in both writings.
There can be more `x` with the same `y` and it is still a function.
If there is only one preimage `x` for a `y`, then the function
is **injective**, i.e. it keeps the distinction or does not loose information.
If in addition every element of `Y` is reached (**surjective**),
then the function is **bijektive**.
In this case by choosing `y` we also choose `x` (`x(y)`, **inverse function**).
If the images of elements that are close together are also close together,
then the function is **continuous**. Close is intuitive, but still needs
a formal definition. This is done via a **metric** `d` (`d(x,y)\ge 0`,
`d(x,y)=d(y,x)` and `d(x,z)\le d(x,y)+d(y,z)`, e.g. `d(x,y)=|y-x|`)
(or in a more abstract way in topology via sets of nested open sets).
.. admonition:: Continuity at `x`
For every `\varepsilon > 0` there is a `\delta`, such that
for all `y` with `d(x,y)<\delta` we have `d(f(x),f(y))<\varepsilon`.
For every `\varepsilon`-neighbourhood there is a `\delta`-neighbourhood.
A function does not presuppose order of domain and codomain. But if it is given,
then a function is said **(strictly) monotonically increasing**,
if `x\le y` (`x<y`) makes `f(x)\le f(y)` (`f(x)<f(y)`).
Analogously one defines **(strictly) monotonically decreasing**.
Morphisms are related to functions (see :lnk:`r.cs`).
Regarding graphical representation of a function in a **coordinate system**:
- First values of variables `X` and `Y` are mapped via units to numbers
- A unit for the graph is chosen (e.g. cm).
The ratio of unit in graph to unit in reality (kg, km, m/s,...) is the **scale**.
- For a value of the independent variable `X` one goes the number
of graph units to the left (`x`-coordinate, abscissa).
- For a value of the dependent variable `Y` one goes the number of
graph units upward (`y`-coordinate, ordinate).
- This one repeats for a few pairs of values.
These `(x,y)`-pairs can also be written into a table as a intermediate step
(**value table**).
- Since usually it will be a continuous function,
one can connect the points with a continuous line.
If the line is linear then it is called a **linearen function**.
Examples of graphs for fundamental types of functions of one variable
can be found here: :lnk:`r.cf`.
<file_sep>/chcko/r/d/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
import itertools
from math import log
from sympy.abc import C, U, R, T
from sympy import E
from sympy import simplify
from chcko.chcko.hlp import Struct, norm_rounded
gc = np.array(range(500, 600, 10)) # μF
gu = np.array(range(900, 1000, 10)) # V
gr = np.array(range(150, 250, 10)) # kΩ
gt = np.array(range(20, 30))
curt = list(itertools.product(gc, gu, gr, gt))
num = len(gc) * len(gu) * len(gr) * len(gt)
ee = E ** (-T / (R * C))
def chiven():
i = random.randrange(num)
g = Struct()
g.c, g.u, g.r, g.t = curt[i]
g.u2 = g.u * E ** (-1e3 * g.t / (g.c * g.r))
return g
def chalc(g):
# r,u,u2,t=230,940,773.54,26
r, u, u2, t = g.r, g.u, g.u2, g.t
c = 1e3 * t / (r * log(u / u2))
return ['e^(-t/(R*C))',
c,
1e-6 * c * u,
1e-6 * c * u2,
1e-3 * r * c * log(2)]
def chorm(answers):
# answers=chalc(chiven())
try:
e = simplify(answers[0].upper())
a0 = str(e)
except:
a0 = answers[0]
return [a0] + [norm_rounded(a, 2) for i, a in enumerate(answers) if i > 0]
<file_sep>/chcko/r/ca/__init__.py
# -*- coding: utf-8 -*-
from chcko.chcko.hlp import Struct
import random
import numpy as np
from math import acos, pi
__all__ = ['chiven', 'chalc']
def chiven():
g = Struct()
g.x = random.sample(range(-9, 9), 2)
g.y = random.sample(range(-9, 9), 2)
g.dx = random.sample(list(range(-4, -1)) + list(range(1, 4)), 1)[0]
return g
angle = lambda a, b: 180 * \
acos(np.around(np.dot(a, b) / np.linalg.norm(a) / np.linalg.norm(b),6)) / pi
taria = lambda a, b: abs(np.cross(a, b) / 2)
def chalc(g):
a = angle(g.x, g.y)
A = taria(g.x, g.y)
nx = np.linalg.norm(g.x)
ny = np.linalg.norm(g.y)
x0 = g.x / np.linalg.norm(g.x)
y0 = g.y / np.linalg.norm(g.y)
A1 = taria(g.y, g.y + g.dx * x0)
res = [a, A, nx, ny, x0[0], x0[1], y0[0], y0[1], A1]
return res
<file_sep>/chcko/r/co/de.rst
.. raw:: html
%path = "Mathe/abstrakt-konkret"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
In der Mathematik so wie im täglichen Leben wird versucht, das **Konkrete**,
das sich wiederholt, einmal zu beschreiben und dann darauf zu verweisen.
Dieses Gemeinsame vieler Beobachtungen ist **abstrakt**. Das macht viele
Beschreibungen kürzer und vermindert vor allem den Gesamtumfang.
Für Lernende ist es aber oft schwierig, die Verbindung mit dem Konkreten wieder
aufzubauen. Abstrakte Definitionen und Sätze werden deshalb am besten mit
Kommentaren begleitet über ihre Motivation, ihre Reichweite, mit Beispielen oder
Anwendungen.
Normalerweise geht man vom Abstracten aus, findet Gemeinsamkeit durch Vergleichen
und das nennt man **Analyse**. Das Resultat der Analyse ist eine **Abstraktion**.
Informationstechnisch ist das **Reduktion von Redundanz** oder **Kompression**.
Es ist ein grundlegender, im Grunde alltäglicher, kognitiver Vorgang.
.. admonition:: Hinweis
Als Werkzeug zur Abstraktion wird oft die **Äquivalenzrelation**
herangezogen. (= **reflexive, symmetrische und transitive Relation**,
`\sim`) Dabei konzentriert man sich auf eine (einige) Eigenschaft(en) und
lässt anderes weg. Alle Elemente mit einer Ausprägung der Eigenschaft (=
Wert der Variable) sind äquivalent und bilden zusammen eine
Äquivalenzklasse. Alle Äquivalenzklassen bilden die **Quotientenmenge**
`M/\sim`, eine Menge von disjunkten Teilmengen von `M`.
Abstrakte Beschreibungen können neu kombiniert werden (**Synthese**). Das ist
auch das Grundprinzip der **Kreativität**.
<file_sep>/chcko/r/bp/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct
import sympy
from sympy import S, Rational as R
from sympy.abc import x
def chiven():
b = randrange(3, 6)
c = b + randrange(4, 9)
d = c + randrange(3, 6)
u = randrange(1, 4)
v = u + randrange(1, 4)
w = v + randrange(1, 4)
l = w / b ** R(1, 2)
ff = l * x ** R(1, 2)
gg = (u - w) * (x - b) / (c - b) + w
hh = (v - u) * (x - c) / (d - c) + u
t = randrange(3, 9)
g = Struct(
f=sympy.sstr(ff),
g=sympy.sstr(gg),
h=sympy.sstr(hh),
b=b,
c=c,
d=d,
t=t)
return g
def chalc(g):
r1 = sympy.integrate(sympy.pi * S(g.f) ** 2, (x, 0, S(g.b)))
r2 = sympy.integrate(sympy.pi * S(g.g) ** 2, (x, S(g.b), S(g.c)))
r3 = sympy.integrate(sympy.pi * S(g.h) ** 2, (x, S(g.c), S(g.d)))
vr = float(S(r1 + r2 + r3).n())
s1 = sympy.integrate(2 * S(g.f), (x, 0, S(g.b)))
s2 = sympy.integrate(2 * S(g.g), (x, S(g.b), S(g.c)))
s3 = sympy.integrate(2 * S(g.h), (x, S(g.c), S(g.d)))
vs = float(S((s1 + s2 + s3) * g.t).n())
return [vs, vr]
<file_sep>/chcko/r/i/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy.abc import x
from sympy import log, latex
from chcko.chcko.hlp import Struct, norm_int as chorm
jsFuncs = {'exp': 'return Math.pow(({0}),x-({1}))+({2})',
'log': 'if (x-({0})>0) return Math.log(x-({0}))+({1})',
'pow': 'return ({0})*Math.pow(x-({1}),({2}))+({3})'}
def chiven():
# r,i,n,m=143,3,5,50
N = 4
rs = lambda r: random.sample(r, 1)[0]
def gete():
e = e0, e1, e2 = rs([0.2, 0.5, 2, 3]), rs(
[-2, -1, 0, 1, 2]), rs([-2, -1, 0, 1, 2])
ee = e0 ** (x - e1) + e2
jse = jsFuncs['exp'].format(*e)
return (latex(ee), jse)
def getl():
l = l0, l1 = rs([-2, -1, 0, 1, 2]), rs([-2, -1, 0, 1, 2])
el = log(x - l0) + l1
jsl = jsFuncs['log'].format(*l)
return (latex(el), jsl)
def getp():
p = (p0, p1, p2, p3) = (
rs([-2, -1, -1.0 / 2, 1.0 / 2, 1, 2]),
rs([-2, -1, 0, 1, 2]),
rs([-0.2, -0.5, -2, -3, 0.2, 0.5, 2, 3]),
rs([-2, -1, 0, 1, 2]))
ep = p0 * (x - p1) ** p2 + p3
jsp = jsFuncs['pow'].format(*p)
return (latex(ep), jsp)
funcs = []
while len(funcs) < N:
f = rs([gete] * 100 + [getl] * 25 + [getp] * 1200)
while True:
nf = f()
if nf not in funcs:
funcs.append(nf)
break
order = list(range(len(funcs)))
random.shuffle(order)
g = Struct(funcs=funcs, order=order)
return g
def chalc(g):
return [o + 1 for o in g.order]
<file_sep>/chcko/r/m/__init__.py
# -*- coding: utf-8 -*-
import random
import numpy as np
from chcko.chcko.hlp import Struct, norm_int as chorm
def chiven():
g = Struct()
g.i = random.sample(range(2, 12), 1)[0]
g.K1 = random.sample(range(30, 50), 1)[0] * 1000
g.n1, g.n2 = random.sample(range(1, 20), 2)
dK = random.sample(list(range(-50, -1)) + list(range(1, 50)), 1)[0] * 10
g.K2 = int(g.K1 * (1.0 + g.i / 100.0) ** (g.n2 - g.n1) + dK)
return g
def chalc(g):
# g=Struct(i=8,K1=47000,n1=18,K2=36940,n2=15)
res = 2 if g.K1 * (1.0 + g.i / 100.0) ** (g.n2 - g.n1) < g.K2 else 1
return [res]
<file_sep>/chcko/r/cx/de.rst
.. raw:: html
%path = "Mathe/Externe Links"
%kind = chindnum["Texte"]
%level = 0
<!-- html -->
**Mengenlehre**
http://www.mathe-online.at/mathint/mengen/i.html
**Zahlen**
http://www.mathe-online.at/mathint/zahlen/i.html
http://members.chello.at/gut.jutta.gerhard/kurs/zahlen.htm
http://de.wikibooks.org/wiki/Mathematik_für_Schüler/_Zahlmengen
http://www.mathe-online.at/materialien/Andreas.Pester/files/ComNum/inhalte/komZahlen.html
**Algebra**
http://de.bettermarks.com/mathe-portal/fachgebiete/algebra.html
Körper:
http://www.mathe-online.at/lernpfade/lin_alg_glatz/?kapitel=1
Gruppe:
http://de.bettermarks.com/mathe-portal/fachgebiete/gruppentheorie.html
Vektoren:
http://www.mathe-online.at/mathint/vect1/i.html
Koordinatensystem:
http://www.mathe-online.at/mathint/zeich/i.html
**Analysis**
Funktion:
http://www.mathe-online.at/mathint/fun1/i.html
Koordinatensystem:
http://www.frustfrei-lernen.de/mathematik/koordinatensystem-2d-3d-abszisse-ordinate-mathematik.html
Ableitung:
http://www.mathe-online.at/mathint/diff1/i.html
<file_sep>/chcko/r/cl/de.rst
.. raw:: html
%path = "Mathe/Stukturen/Gruppe"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
*Gruppenartige algebraische Strukturen* bestehen aus einer Menge `M`
mit einer binären Verknüpfung `\circ`, kurz `(M,\circ)`.
Für alle Elemente (`\forall_{a,b\in M}`):
- `a\circ b \in M` **Abgeschlossenheit** `\rightarrow` **Magma**
- `a\circ(b\circ c) = (a\circ b)\circ c` **Assoziativgesetz** `\rightarrow` **Halbgruppe**
- `a^n = a` **Idempotente** Halbgruppe `\rightarrow` **Verband**
- `\exists_e|e\circ a = a\circ e = a` **Neutrales Element** `\rightarrow` **Monoid**
- `\exists_\bar{a}|\bar{a}\circ a = a\circ\bar{a} = e` **Inverses Element** `\rightarrow` **Gruppe**
- `a\circ b = b\circ a` **Kommutativgesetz** `\rightarrow` kommutative oder **abelsche Gruppe**
<file_sep>/chcko/r/cc/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct, norm_frac as chorm
import chcko.r.u as ru
def chiven():
g = ru.chiven()
g.v = random.sample(range(-9, 9), 2)
return g
#g.m=np.array([[3./13, 1./13],[4./13, -3./13]])
# g.v=np.array([-6,4])
def chalc(g):
A = np.linalg.inv(np.array(g.m)).round()
res = np.dot(A, np.transpose(g.v))
return res.tolist()
<file_sep>/chcko/r/dk/en.rst
.. raw:: html
%path = "maths/functions/exponential"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
.. role:: asis(raw)
:format: html latex
Basics
------
In the **exponential function**
.. math::
y = a^x
- `x` is the **exponent**
- `a` is the **basis**
- `y` is the **exponential function** of `x` on the basis `a`
The **exponent** tells how often *multiplication* with `a` is repeated.
`a` must be a positive real number: `a\in\mathbb{R}`.
.. admonition:: Multiplication
Multiplication is an operation happening in the real world and we
encode it with a number. In the number set `\mathbb{Q}` the operation is
part of the number: `2` means `\cdot 2`, and `1/2` mean `/2`. `\cdot`
stands for the multiplication operation and `/` stands for the
*inverse operation*, the division. But the inverse operation is made part
of the number by the inclusion of the fractions in `\mathbb{Q}`.
So we only speak of *multiplication* and mean the application
of the operation of `\mathbb{Q}\subset\mathbb{R}`.
If `a` is bigger than `1`, then `y` will increase (grow) with `x` *strictly monotonically*: `x_1<x_2 \Rightarrow y_1<y_2`.
.. tikz:: \begin{axis}[grid=both,axis lines=middle,xmin=-3,xmax=3,ymin=0,ymax=8, samples=50]
\addplot[green] {pow(2,x)} node[above]{$y=2^x$};
\end{axis}
If `a` is smaller than `1`, then `y` will decrease (diminish) with `x` *strictly monotonically*: `x_1<x_2 \Rightarrow y_1>y_2`.
.. tikz:: \begin{axis}[grid=both,axis lines=middle,xmin=-3,xmax=3,ymin=0,ymax=8, samples=50]
\addplot[green] {pow(1/2,x)} node[above]{$y=(\frac{1}{2})^x$};
\end{axis}
Discussion
----------
Let's compare the number of combinations of n bits:
.. math::
2^n
with the growing processes, like with accruing of capital with annual compounding
.. math::
(1+\frac{i}{100})^n
or the especially interesting natural growing
.. math::
e^x = \lim_{n->\infty}(1+\frac{1}{n})^{nx}
= \lim_{m->\infty}(1+\frac{x}{m})^m = (1+\frac{x}{\infty})^\infty
`e` is `Euler's Number <https://en.wikipedia.org/wiki/E_(mathematical_constant)>`_
whose importance is founded on the given relation.
The key to compare them is to understand **information** in the shape of bits as a growing process.
Every bit increases the size by `1` times what is there already.
Let's denote this aspect of the bit by `(1+1)` to emphasize that an additional `1`
is added to the one there already. The parentheses make this an operator, an element of the number set `\mathbb Q`.
`n` repeated applications of `(1+1)` produces a multitude of size
.. math::
(1+1)^n = 2^n
Every new bit is *compounded* to the existing combinations.
The information measure for a real variable of size `C` is the
number of bits `n=\log_2 C` needed to grow `C` combinations.
.. admonition:: Which other variable to compare to?
Instead of bits we could as well use the considered variable itself because
that is there physically. But combinations are also physically there and the
selection of values, which ultimately gives birth to variables, is physical,
and the number of involved variables plays a role. First this means that
information is physical and secondly, considering quantum mechanics, the
physical number of involved variables is huge and their individual
contributions are tiny.
If we start from a *number of variables*, the *exponential function*
gives the *number of value combinations*. If we start from a *number of
values*, the *logarithm* gives the *number of variables* needed to represent
it.
For **interest calculation** we look at an amount of money (the `1`), which is
deposited in the bank with interest `i`. After `n` years the `1` has grown to
.. math::
(1+i/100)^n = q^n
The *growth factor* `q` is not `2`, normally just a little above `1`. The corresponding
"information" measure in this financial context would be the
number of years.
The essential difference with respect to bit information is that, what is added,
is a *fraction* of what is there. But then, fraction is actually just a matter
of units.
The units of living organisms are cells and the ultimate units in the real
world are the quantum particles. Both of them are small compared to the things
around us. And with such small units one can also *compound* arbitrarily
(infinitely) often:
.. math::
\lim_{m->\infty}(1+\frac{x}{m})^m = \lim_{n->\infty}(1+\frac{1}{n})^{nx} = e^x
In the first equality we see that, given a certain growth, varying the
*compounding steps* amounts to varying the *growth factor*. Due to the
importance of `e^x` one often moves the *growth factor* `q` in `y=q^x`
to the exponent of `e` (`y=e^{kx}`). `k=\ln q` is called the *gowth constant*.
.. admonition:: Natural compounding in the finantial world
Actually in the financial world the real compounding takes place in very
small steps, just that the bank forwards them to the customer in larger units
of time.
`x` is the information in the **natural information** unit
`nat <https://en.wikipedia.org/wiki/Nat_(unit)>`_. Basically we split up the size
of the variable to infinitely many infinitely small variables, such that
the growth factor per step is almost `1`.
<file_sep>/chcko/r/cx/en.rst
.. raw:: html
%path = "maths/external links"
%kind = chindnum["texts"]
%level = 0
<!-- html -->
**wikis**
http://en.wikipedia.org/wiki/Portal:Mathematics
http://en.wikibooks.org/wiki/Wikibooks:Mathematics_bookshelf
http://www.proofwiki.org/wiki/Main_Page
http://www.encyclopediaofmath.org/index.php/Main_Page
http://math.wikia.com/wiki/Math_Wiki
**forums and material**
http://math.stackexchange.com/
http://www.mymathforum.com/
http://mathforum.org/dr.math/
http://mathhelpboards.com/
http://www.purplemath.com/modules/index.htm
http://www.mathplanet.com/
http://www.math.com/
http://hyperphysics.phy-astr.gsu.edu/hbase/hframe.html
http://www.onlinemathlearning.com/
http://www.ask.com/question/science/mathematics
http://www.coolmath4kids.com/
http://www.coolmath.com/
http://www.mathplayground.com/
**online academies**
https://www.khanacademy.org/
http://www.cosmolearning.com/mathematics/
http://academicearth.org/online-college-courses/mathematics/
**books**
http://mathbooks.library.cornell.edu/
http://www.freebookcentre.net/SpecialCat/Free-Mathematics-Books-Download.html
http://people.math.gatech.edu/~cain/textbooks/onlinebooks.html
http://www.e-booksdirectory.com/mathematics.php
http://bib.tiera.ru/
**online math tools**
http://www.geogebra.org/cms/en/
http://www.geogebratube.org/
http://live.sympy.org/
http://octave-online.net/
http://www.wolframalpha.com/
https://www.mathway.com/
<file_sep>/chcko/r/cm/en.rst
.. raw:: html
%path = "maths/stuctures/ring"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
*Ring-like algebraic structures* build on top of
group-like structures (:lnk:`r.cl`)
and consist of a set `M` with two binary
operations `+` and `\cdot` , in short `(M,+,\cdot)`.
- `(M,+)` and `(M,\cdot)` are monoids and `0\cdot a = 0` holds
`\rightarrow` **Semiring**.
- `(M,+)` is commutative group
`\rightarrow` **Ring**.
- In `(M,\cdot)` there are no two numbers, whose product is 0.
Free of zero divisor `\rightarrow` **Integral domain**.
- `(M\setminus\{0\},\cdot)` is a commutative Group
`\rightarrow` **Field**.
- `(M,\cdot)` satisfies the Jakobi Identity `a\cdot (b \cdot c) + c\cdot (a \cdot b) + b\cdot (c \cdot a) = 0`
`\rightarrow` **Lie Ring**.
- `(M,\cdot)` is idempotent
`\rightarrow` **Boolean Algebra**.
<file_sep>/chcko/r/a4/__init__.py
from chcko.chcko.hlp import Struct
import numpy as np
import pint
u = pint.UnitRegistry()
import random
def chiven():
g = Struct()
g.Vcc = random.choice(range(7,15))*u.V
g.Vi = random.choice(range(4,g.Vcc.magnitude-2))*u.V
g.hfe = random.choice(range(5,15))*10
g.Vbe = random.choice([0.3,0.7])*u.V
g.Rc = (random.choice(range(100))+100)*10*u.ohm
Vo1 = random.choice(range(2,g.Vi.magnitude-1))*u.V
ReRL = g.Rc/(g.Vcc/Vo1-1)
g.RL = g.Re = 2*round(ReRL.magnitude)*u.ohm
return g
def chalc(g):
#g = chiven()
Vo0 = g.RL/(g.RL+g.Rc)*g.Vcc
Vo1 = g.Vcc*(1-g.Rc/(g.Rc+1/(1/g.Re+1/g.RL)))
g.Icmax = Vo1/g.Re
Ibmax = g.Icmax/g.hfe
Rb = (g.Vi-Vo1-g.Vbe)/Ibmax
res = [x.to('V').magnitude for x in [Vo0,Vo1]] + [Rb.to('kΩ').magnitude]
return res
chames = [r'\(V_0/V=\)', r'\(V_1/V=\)', r'\(R_b/kΩ=\)']
P = lambda x: "{:~P}".format(x)
<file_sep>/chcko/r/bw/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct
from datetime import datetime, timedelta
def chiven():
g = Struct()
g.K0 = randrange(20, 100) * 1000
g.d1 = datetime.now() - timedelta(days=randrange(300)) - \
timedelta(days=365)
g.d2 = datetime.now() + timedelta(days=randrange(300)) + \
timedelta(days=365)
g.i = 1.0 + 1.0 * randrange(20) / 10
return g
def chalc(g):
K0 = g.K0
i = g.i
d1 = g.d1
d2 = g.d2
d1d = 30 * (12 - d1.month) + ((30 - d1.day) if d1.day <= 30 else 0)
d2d = 30 * (d2.month - 1) + d2.day
n = d2.year - d1.year - 1
q1 = 1 + i * d1d / 360.0 / 100
q2 = 1 + i * d2d / 360.0 / 100
q = 1 + i / 100.0
Kn1 = K0 * q1 * q ** n * q2
nf = n + d1d / 360.0 + d2d / 360.0
Kn2 = K0 * q ** nf
dKn = Kn1 - Kn2
return [Kn1, dKn]
<file_sep>/chcko/r/ba/de.html
%path = "Mathe/Folgen und Reihen/arithmetische u. geometrische"
%kind = chindnum["Übungen"]
%level = 10
Gegeben ist die Folge {{g_fn[chiven.rr](chiven.a1,chiven.q,1)}},
{{g_fn[chiven.rr](chiven.a1,chiven.q,2)}}, {{g_fn[chiven.rr](chiven.a1,chiven.q,3)}},...<br>
Gesucht ist das {{chiven.n}}te Glied
\(a_{ {{chiven.n}} }\) =
%chq(0)
<br>
sowie die Summe der ersten {{chiven.N}} Glieder
\(\sum_{k=1}^{ {{chiven.N}} } a_k\) =
%chq(1)
<file_sep>/chcko/r/ch/en.rst
.. raw:: html
%path = "maths/sequences and series"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
Sequences and Series
--------------------
A **Sequence** is a function of natural numbers (positive integers).
The natural number is used to refer to the position of an element (term) of the sequence.
It is called index.
- `a_1` is the first element of the sequence
- `a_2` is the second element
- ...
- `a_n` is the n-th element
If we sum up the first n elements of a sequence,
then we get the n-th element of the sum sequence or **series**.
The use of *function* says:
If I know which position, then I know the number at that position.
Many sequences have a regularity that allows to descibe them in a much shorter way
(short description = low complexity). Here are the most important ones
Arithmetic Sequence
...................
In the arithmetic sequence one element follows from the previous one by adding a contant
`a_{n+1} = a_n + d`.
This is called **recursive** description of the arithmetic sequence.
To get to the n-ten element from the first one, we repeat adding with d for n-1 times:
`a_n = a_1 + (n-1) d`
This is the **term description**:
.. admonition:: note
In many programming languages one starts with 0, because then you can do
`nd` instead of `(n-1)d`.
To recognize a sequence to be arithmetic you check whether the differences
between successive numbers stay the same.
Arithmetic Series
.................
If you look at the sum of the first n elements, you can see a regularity,
which is always used to make calculations simpler.
In the above term description you can notice, that starting from the beginning
each element is larger by d, and starting from the end (nth) backward each element
is smaller by d. These operations cancel and therefore we can calculate Count/2 times
sum of first plus nth element.
`\sum_{k=1}^{n} a_k = \frac{n(a_1+a_n)}{2}`
Specifically we have `1+2+...n=\frac{(n+1)n}{2}`.
Geometric Sequence
..................
In the geometric sequence one element follows from the previous one by
multiplying a constant
`a_{n+1} = a_n \cdot q`.
This is the **recursive** description of the geometric sequence.
To get to the nth element starting from the first one, we repeat multiplying by q for n-1 times:
`a_n = a_1 q^{n-1}`
This is the **term description** of the geometric Sequence.
To recognize a given sequence as geometric, you check whether the quotient of successive elements
stays the same.
Geometric Series
................
In
.. math::
\begin{matrix}
1+&q+q^2+...+q^{n-1}&=&S_n\\
&q+q^2+...+q^n&=&q S_n\\
\end{matrix}
you see many equal terms. By subtraction you get
`\sum_{k=1}^{n} q^{k-1} = 1 + q + ... + q^{n-1} = \frac{q^n-1}{q-1}=\frac{1-q^n}{1-q}`
<file_sep>/chcko/r/k/__init__.py
# -*- coding: utf-8 -*-
import random
import numpy as np
from chcko.chcko.hlp import Struct
def chiven():
r = sorted(random.sample(range(-9, -1), 2) + random.sample(range(1, 9), 2))
c = [-1, +r[0] + r[1] + r[2] + r[3], # x**4,x**3
-r[0] * r[1] - r[0] * r[2] - r[0] * r[3] - r[1] *
r[2] - r[1] * r[3] - r[2] * r[3], # x**2
+r[0] * r[1] * r[2] + r[0] * r[1] * r[3] +
r[0] * r[2] * r[3] + r[1] * r[2] * r[3], # x
-r[0] * r[1] * r[2] * r[3]]
g = Struct(r=r, c=c)
return g
def chalc(g):
p = np.poly1d(g.c)
p_i = np.polyint(p)
I = +p_i(g.r[1]) - p_i(g.r[0]) - p_i(g.r[2]) + \
p_i(g.r[1]) + p_i(g.r[3]) - p_i(g.r[2])
return [I]
<file_sep>/Makefile
.PHONY: check html dist up
check:
restview --long-description --strict
html:
cd chcko/r
doit -kd. html
doit -kd. initdb
dist: html
sudo python setup.py bdist_wheel
up:
twine upload dist/`ls dist -rt | tail -1`
<file_sep>/chcko/r/bv/__init__.py
# -*- coding: utf-8 -*-
from random import sample
from chcko.chcko.hlp import Struct, norm_list, norm_frac
import numpy as np
from numpy.linalg import det
def chiven():
z = zip("ABC", zip(sample(range(-9, 10), 3), sample(range(-9, 10), 3)))
g = Struct(z)
return g
def chalc(g):
D = (np.array(g.C) + np.array(g.A) - np.array(g.B)).tolist()
A = int(
abs(det(np.vstack([np.array(g.C) - np.array(g.B), np.array(g.A) - np.array(g.B)]))))
return [','.join([str(dd) for dd in D]), str(A)]
chorm = lambda v: norm_list(v, norm_frac)
<file_sep>/chcko/r/cg/de.rst
.. raw:: html
%path = "Mathe/Vektoren/Transformation und Inverse"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
Koordinatentransformation und Inverse Matrix
============================================
Nicht immer sind die Basisvektoren unabhängig, d.h. orthogonal zueinander.
Wenn man etwa die Zutaten von einer Auswahl von Kuchen als Vektorraum auffasst
(*Zutatenvektorraum*), dann ist jeder Kuchen ein Vektor, d.h. eine unabhängige
Auswahl aus mehreren Variablen (Quantitäten der Zutaten, 0 falls nicht
verwendet).
Die Zutaten kann man als orthogonal zueinander ansehen. Der Kontext macht
einen genaueren Vergleich nicht notwendit.
*Das skalare Produkt ist 0.*
Die Kuchen wollen wir aber genauer vergleichen und zwar über deren Zutaten.
Dann werden etwa Kuchen A und Kuchen B sicher gleiche Zutaten haben.
Die Einheitsvektoren im *Kuchenvektorraum* sind nicht orthogonal zueinander,
wenn man genauer hinschaut, was man aber nicht tun muss.
*Das skalare Produkt ist nicht 0.*
Ein Vektor im Kuchenvektorraum (Wieviel von jeder Sorte Kuchen?) kann auf den Vektorraum der Zutaten
transformiert werden, indem man ihn mit einer Matrix multipliziert.
Jede Spalte in dieser Matrix stellt einen Kuchen dar.
Was man bei Matrizen und Vektoren macht, ist eine Positionskodierung. Die Position einer Zahl
bestimmt, was sie bedeutet. Das macht man auch im Zahlensystem so (Einer, Zehner, Hunderter,...).
In diesem Beispiel mit den Kuchen und den Zutaten sind die Anzahl der Variablen (=Dimension)
in den zwei Vektorräumen nicht notwendigerweise gleich. Die Dimensionen können anders sein,
etwa 10 Zutaten und 3 Kuchensorten. Die Transformationsmatrix ist dann 10x3 (10 Zeilen, 3 Spalten).
Eine solche `m\times n` Matrix mit `m\not = n` kann man nicht invertieren,
d.h. man kann nicht aus einem Zutatenvektor auf die Kuchensorten (Kuchenvektor) schließen.
Oder anders ausgedrückt: Es gibt nicht für jede Kombination von Zutaten eine Kombination
(*Linearkombination*) von Kuchen, die genau diese Zutatenmengen brauchen.
Fixiert man die Anzahl der Kuchen in einer kleinen Sortenauswahl
wird weniger Information festgelegt, d.h. es werden weniger Auswahlentscheidungen getroffen,
als im Zutatenraum, der im Beispiel als größer angenommen wird.
.. admonition:: Pseudoinverse
Man kann sie aber pseudo-invertieren (Moore-Penrose Pseudoinverse). Im
Beispiel erzeugt letztere aus den Zutaten einen Kuchensortenvektor der
minimal Zutatenreste zurück lässt (Methode der kleinsten Quadrate) bzw. die
Zutaten bestmöglichst ausnützt (maximale Entropie).
Wenn man von einem Vektorraum in einen mit gleicher Dimension transformiert,
dann kann man wieder auf die urspünglichen Vektoren kommen,
indem man mit der *inversen Matrix* multipliziert.
Damit die Inverse existiert, muss zusätzlich zur quadratischen Form jede
Spalte/Zeile *linear unabhängig* von den anderen sein, sonst befindet man sich
effektiv in einer kleineren Matrix (*Rang einer Matrix*). Im Kuchenbeispiel
bedeutet das, dass jede Kuchensorte eine andere Zutatenkombination haben muss,
damit man sie von den anderen unterscheiden kann und damit mit ihr zusätzliche
Information kodiert werden kann.
.. admonition:: Lineare Unabhängigkeit
Quadratische Matrizen können invertiert werden,
wenn eine Spalte (oder Zeile) sich nicht aus den anderen durch Linearkombination
ergibt. Der Rang der Matrix ist gleich seiner Dimension.
Die Inverse einer quadratischen Matrix kann man allgemein berechnen indem man:
- das `ij` Kreuz weglässt und Determinante berechnet = Minor `M_{ij}`
- das Vorzeichen ändert, falls `i+j` ungerade ist
- dann transponiert, d.h. an der Diagonale spiegelt
(unten:`ij` bei `A` und `ji` bei `M`)
- alles durch die Determinante teilt
Kurz
.. math::
(A^{-1})_{ij} = \frac{1}{det(A)}(-1)^{i+j} M_{ji}
`\frac{1}{det(A)}` schreibt man oft vor der Matrix. Man kann diesen Wert aber
auch mit jeder Zahl in der Matrix multiplizieren.
Für eine *2x2 Matrix* ist `M_{ij}` die diagonal gegenüberliegende Zahl.
Wegen des Transponierens bleibt die Zahl links unten und rechts oben (Nebendiagonale),
aber das Vorzeichen ändert sich.
Auf der Hauptdiagonalen werden die Zahlen vertauscht und da `i+j` gerade ist,
bleibt das Vorzeichen.
- Hauptdiagonale `\rightarrow` Vorzeichen bleibt
- Nebendiagonale `\rightarrow` Position bleibt
<file_sep>/chcko/r/a/__init__.py
import random
import math as m
from chcko.chcko.hlp import Struct
def angle_deg(i, g):
d = dict(zip('abc', ([g.a, g.b, g.c]*2)[i:]))
return eval('180*acos((a*a+b*b-c*c)/2/a/b)/pi', {**d,'acos':m.acos,'pi':m.pi})
def chiven():
random.seed()
a, b = random.sample(range(1, 10), 2)
c = random.randrange(max(a - b + 1, b - a + 1), a + b)
return Struct(a=a, b=b, c=c)
def chalc(g):
return [angle_deg(i, g) for i in range(3)]
chames = [r'\(\alpha=\)', r'\(\beta=\)', r'\(\gamma=\)']
<file_sep>/chcko/r/ct/de.rst
.. raw:: html
%path = "Mathe/Richtung"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
Die Variablen, mit denen man es normalerweise zu tun hat,
sind Ausdehnungen (Größen, Teilmengen, Differenzen) und nicht Punkte.
3m meint alle Punkte von von 0 bis 3m.
Zwei unterschiedliche Mengen, für die alle Kombinationen vorkommen,
kann man als orthogonal zueinander ansehen.
Der Winkel zwischen ihnen ist `\frac{\pi}{2}`.
Sie spannen die größtmögliche Kombinationsmenge (Fläche) auf.
Vektorprodukt maximal. Skalares Produkt 0.
Ein zweidimensionaler Vektor `\vec{v}` und eben auch `z\in\mathbb{C}` bezeichnet
eine solche Ausdehnung. Die eingeschlossene Fläche ist
`\vec{v_1}\times\vec{v_2}` oder `Im(z_1\bar{z_2})`.
`z_1\bar{z_2}` hat das Skalarprodukt im Realteil und das Vektorprodukt im Imaginärteil.
Größen, die in die gleiche Richtungen zeigen, kann man addieren. Ungleiche
Richtungen kann man komponentenweise addieren.
`\frac{\vec{v_1}\vec{v_2}}{|\vec{v_1}|}=\vec{v_1}_0\vec{v_2}` ist die
Komponente von `\vec{v_2}`, die in Richtung `\vec{v_1}` addiert werden kann.
`\frac{z_1\bar{z_2}}{|z_1|}` ist die komplexe Zahl mit Realteil addierbar in
Richtung `z_1` und Imaginäteil orthogonal zu `z_1`, multiplizierbar um die
aufgespannte Fläche zu erhalten. Besser man rechnet jedoch normal `z_1+z_2`,
d.h. mit den durch das Koordinatensystem gegebenen Komponenten.
Der Winkel ergibt sich aus dem Verhältnis der aufgespannten Fläche zur maximalen Fläche
`\angle(\vec{v_1},\vec{v_2})=\arcsin\frac{|\vec{v_1}\times \vec{v_2}|}{|\vec{v_1}||\vec{v_2}|}`
oder aus dem Verhältnis der addierbaren Komponente zur gesamter Länge
`\angle(\vec{v_1},\vec{v_2})=\arccos\frac{\vec{v_1}\vec{v_2}}{|\vec{v_1}||\vec{v_2}|}`
und im Komplexen zusammen
`\angle(z_1,z_2)=\arg(\frac{z_1\bar{z_2}}{|z_2||z_2|})=\arg{z_1\bar{z_2}}`.
Ein anderes Wort für Richtung ist Phase, das wohl von dem umgangssprachlich
vorbelegten Wort Richtung etwas ins Abstraktere ablenken soll. Essentiell ist
der Vergleich zweier Größen bezüglich addierbarer Komponenten. Dazu werden
Variablen die keine Richtung darstellen, aber eben Einfluss auf Addierbarkeit
haben, auf einen Winkel umgerechnet, der dann Phase heißt. Beispiel:
Der Zeitpunkt `t` bei Schwingungen wird `\varphi=\frac{2\pi}{T}t` oder die
kombinierte Zeit-, Raum-Position bei Wellen wird
`\varphi=\frac{2\pi}{\lambda}x-\frac{2\pi}{T}t`. `Ae^{i\varphi}` gibt dann die
momentan addierbare Amplitude wieder.
<file_sep>/chcko/r/bn/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from math import log
from chcko.chcko.hlp import Struct
def chiven():
g = Struct()
g.Kn = randrange(1000, 2000)
g.K0 = randrange(20, 999)
g.i = randrange(2, 9)
return g
def chalc(g):
res = log(g.Kn / g.K0) / log(1.0 + g.i / 100.0)
return [res]
<file_sep>/chcko/r/a0/__init__.py
# # for a problem uncomment (non-problem texts need no code in __init__.py)
# from chcko.chcko.hlp import Struct
# # randomize numbers using e.g. sample and randrange
# import random
# def chiven():
# g = Struct()
# # fill g
# return g
# def chalc(g):
# res = []
# # fill res
# return res
# # #remove if default norm_rounded works fine
# # def chorm(answers):
# # return norm_rounded(answers)
# # #remove if default equal_eq works fine
# # def chequal(a, r):
# # return equal_eq(a, r)
<file_sep>/chcko/r/a2/en.html
%path = "maths/numbers/NZQR"
%kind = 0
%level = 9
<!-- html -->
Given two integers \(a\) and \(b\),
with
\(a {{'<>'[chiven.a.is_positive]}} 0\) and \(b={{chiven.rep[1].replace('*','')}}\),
which expression is always a natural numbers?
<br>
%include('r/a2/x_')
<file_sep>/chcko/r/bz/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from math import pi, atan
from chcko.chcko.hlp import Struct
def chiven():
g = Struct()
g.l = randrange(21, 100)
g.h = randrange(5, int(g.l / 2))
return g
def chalc(g):
percentage = 100 * g.h / (g.l * g.l - g.h * g.h) ** 0.5
angle = 180 * atan(g.h / (g.l * g.l - g.h * g.h) ** 0.5) / pi
return [percentage, angle]
<file_sep>/chcko/r/bb/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from itertools import permutations
from chcko.chcko.hlp import Struct, norm_frac
pyt = [[3, 4], [5, 12], [6, 8], [4, 3], [12, 5],
[8, 6], [-3, 4], [-5, 12], [-6, 8],
[-4, 3], [-12, 5], [-8, 6], [3, -4],
[5, -12], [6, -8], [4, -3], [12, -5], [8, -6]]
ipyt = 0
per = list(permutations([0, 1, 2]))
def chiven():
global pyt, ipyt
A = np.array(random.sample(range(-9, 10), 2))
ipyt = (ipyt + 1) % len(pyt)
d1 = np.array(pyt[ipyt])
d2 = np.array([d1[1], -d1[0]])
if random.random() > 0.5:
d2 = -d2
B = A + d1
C = A + d2
p = [A.tolist(), B.tolist(), C.tolist()]
random.shuffle(p)
g = Struct(p=p)
return g
def chalc(g):
for i in per:
a = np.array(g.p[i[1]]) - np.array(g.p[i[0]])
b = np.array(g.p[i[2]]) - np.array(g.p[i[0]])
if np.dot(a, b) == 0:
break
F = abs(np.linalg.det([a, b])) / 2
return ["ABC"[i[0]], F]
def chorm(answers):
return [answers[0].upper(), norm_frac(answers[1])]
<file_sep>/chcko/r/bm/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct, norm_expr, norm_rounded, equal_0 as equal
import sympy
from sympy import S, Rational as R, latex
from sympy.abc import x
def chiven():
xp0 = randrange(7, 10)
pmax = randrange(7, 10)
Ep = R(-pmax, xp0) * x + pmax
E = sympy.integrate(Ep, x)
b = randrange(2, xp0 - 2)
cmax = int(Ep.subs(x, b).n())
c = randrange(1, cmax)
a = R(randrange(1, pmax - c), b * b)
Kp = a * (x - b) ** 2 + c
Gp = Ep - Kp
rG = [r.n() for r in sympy.roots(Gp) if r > 0][0]
G = sympy.integrate(Gp, x)
mG = G.subs(x, rG)
Ko = int(mG - 1) / 2
#K = sympy.integrate(Kp,x)+Ko
#G = E.n()-K.n()
#rts = sorted([r.n() for r in sympy.roots(G) if r > 0])
g = Struct(pmax=pmax, xp0=xp0, Ko=Ko, Kp=sympy.sstr(Kp))
return g
def chorm(a):
res = [norm_expr(aa) for aa in a[:5]] + norm_rounded(a[5:], 2)
return res
def chalc(g):
Kp = S(g.Kp)
Ep = R(-g.pmax, g.xp0) * x + g.pmax
E = sympy.integrate(Ep, x)
K = sympy.integrate(Kp, x) + S(g.Ko)
Gp = Ep - Kp
rG = [r.n() for r in sympy.roots(Gp) if r > 0][0]
G = E - K
mG = G.subs(x, rG)
kk = sympy.Wild('kk')
dd = sympy.Wild('dd')
kd = Ep.match(kk * x + dd)
p = kd[kk] * x / 2 + kd[dd]
mp = p.subs(x, rG)
el = S(1 + kd[dd] / (kd[kk] * x / 2))
return [
sympy.sstr(Ep),
sympy.sstr(E),
sympy.sstr(K),
sympy.sstr(p),
sympy.sstr(el),
rG,
mp]
<file_sep>/chcko/r/cn/de.rst
.. raw:: html
%path = "Mathe/Zahlen/Darstellung"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Eine Anzahl ist etwas reales und unabhängig von deren Darstellung.
Hier werden nur `Stellenwertsysteme <http://de.wikipedia.org/wiki/Stellenwertsystem>`_
diskutiert, aber es gibt andere Systeme. Siehe
`Wikipedia Artikel <http://de.wikipedia.org/wiki/Zahlensystem>`_.
Darstellung von Zahlen
======================
Man kann nicht für jede Anzahl ein individuelles Zeichen machen. Stattdessen
verwenden wir Zeichen bis zu einer bestimmten Anzahl und, sobald diese erreicht
ist, zählen wir Häufchen von dieser Anzahl.
.. admonition:: Hinweis
Ein Zahlensystem kann man mit einem Buchstaben oder Lautsystem vergleichen.
In einer Sprachen werden Laute kombiniert und so eine Vielheit erzeugt, eben Wörter.
Diese werden dann zu realen Dingen abgebildet.
Mit Zahlen ist es gleich. Ziffern werden kombiniert und dann zur Anzahl abgebildet.
Das Dezimalsystem (Zehnsersystem)
---------------------------------
- Für eine Anzahl unter zehn gibt es ein eigenes Zeichen: 1, 2, 3, 4, 5, 6, 7, 8, 9.
- Für "kein" gibt es die **0**. Zusammen mit der 0 gibt es im Zehnersystem 10 Zeichen.
- Für eine Anzahl Zehn und darüber machen wir Zehnerhäufchen und zählt diese separat.
Positionscodierung:
Statt etwa 3 Zehner und 4 Einer zu schreiben, schreiben wir die 3 an eine Stelle für die Zehner
und 4 an die Stelle für die Einer: 34.
Das kann man Positionscodierung nennen: Über die Position identifiziert man, was man meint.
302 heißt 3 Zehnerhäufchen von Zehnerhäufchen (Hundert), 0 (keine) Zehner und 2 Einer.
Wertigkeit:
Die Wertigkeit der Stellen (=Positionen) ist von rechts nach links aufwärts:
... 10³=1000 10²=100 10¹=10 10⁰=1
Das sind alles Potenzen von 10.
10 ist die **Basis** des Dezimalsystems.
Brüche:
So wie ein zehnfaches Häufchen eine Stelle hat, so wird auch dem zehnten Teil eine
Stelle nach dem Komma gegeben.
,1/10¹=1/10 1/10²=1/100 1/10³=1/1000 ...
Das Dualsystem (Binärsystem, Zweiersystem)
------------------------------------------
Im Dualsystem wird eine Anzahl von 2 zu einem eigenen Häufchen.
Zusammen mit der 0 gibt es im Binärsystem 2 Zeichen, die dann bedeuten: **Da oder Nicht Da**
Die Wertigkeit der Stellen ist von rechts nach links aufwärts:
... 2⁴=16 2³=8 2²=4 2¹=2 2⁰=1 , `2^{-1}` `2^{-2}` ...
Beispiel:
1011₂ = 11₁₀
Das Dualsystem ist sehr wichtig, da es in Computern verwendet wird und weil 2 die kleinste Anzahl ist,
bei der man noch auswählen kann, d.h. Information codieren kann.
Das Hexadezimalsystem (Sechszehnersystem)
-----------------------------------------
Hier macht man jeweils 16 zu einem neuen Häufchen.
Zusammen mit der 0 gibt es im Hexadezimalsystem 16 unterschiedliche Zeichen:
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, F.
Dabei bedeuten A=10, B=11, C=12, D=13, E=14, F=15.
Die Wertigkeit der Stellen sind von rechts aufwärts:
... 16⁴=65536 16³=4096 16²=256 16¹=16 16⁰=1 , `16^{-1}` `16^{-2}` ...
Da 2⁴=16 braucht man für eine Stelle im Hexadezimalsystem immer 4 Stellen im Dualsystem.
Da das 2er System wichtig ist, macht diese Eigenschaft auch das Hexadezimalsystem und alle
Zahlensystem, die Potenzen von 2 sind wichtig,
z.B. Basis 8 (octal), 64 (base64), 128(ASCII) und 256 (ANSI).
Duodezimalsystem
-----------------
Zwölf hat viele Teiler: 2, 3, 4, 6
Das erlaubt eine einfache Darstellung von Brüchen mit diesem Nenner.
Aber wie beim Dezimalsystem (1/3 = 0.333...)
gibt es auch im Duodezimalsystem Brüche, die periodische sind (1/5 = 0.2497 2497...).
<file_sep>/chcko/r/cu/en.rst
.. raw:: html
%path = "maths/functions/integral of 1÷z"
%kind = chindnum["texts"]
%level = 12
<!-- html -->
From real calculus we know, that
`\frac{dy}{dx}=\frac{d\,e^x}{dx}=e^x=y`, and therefore for the inverse `\ln`
`\frac{dx}{dy}=\frac{d\,\ln y}{dy}=\frac{1}{y}` for `y>0`. For the antiderivative
of `\frac{1}{y}` we can include negative `y`, if we take the absolute value:
`\int\frac{1}{y}dy=ln|y|+C`. This follows from the symmetry of `\frac{1}{y}`.
At 0 there is a singularity, i.e. one cannot integrate over it.
In `\mathbb{C}` we have `e^z=e^{x+iy}=e^xe^{iy}`,
i.e. the real part becomes the absolute value `e^x` and the imaginary part becomes the argument.
That is the reason for the period `2\pi i` along the imaginary axis.
The antiderivative of `\frac{1}{z}` is the inverse of `e^z`,
which means that the absolute value becomes the real part `ln|z|` and the argument
becomes the imaginary part.
`\int \frac{1}{z}dz=ln|z|+i\arg(z)+C`
In `\mathbb{C}` one can integrate around the singularity:
.. math::
\oint_{|z|=1}\frac{1}{z}dz =
(\ln|z| + i\arg z)\bigr|_{\arg z=0,\,|z|=1}^{\arg z=2\pi,\,|z|=1} = 2\pi i
This is the precursor of the residue theorem.
<file_sep>/chcko/r/ba/en.html
%path = "maths/sequences and series/arithmetic and geometric"
%kind = chindnum["problems"]
%level = 10
In the sequence {{g_fn[chiven.rr](chiven.a1,chiven.q,1)}},
{{g_fn[chiven.rr](chiven.a1,chiven.q,2)}}, {{g_fn[chiven.rr](chiven.a1,chiven.q,3)}},...<br>
what is the {{chiven.n}}th term
\(a_{ {{chiven.n}} }\) =
%chq(0)
<br>
and what is the sum of the first {{chiven.N}} terms
\(\sum_{k=1}^{ {{chiven.N}} } a_k\) =
%chq(1)
<file_sep>/chcko/r/dh/en.rst
.. raw:: html
%path = "physics/mechanics/forces"
%kind = chindnum["examples"]
%level = 10 #in school years
<!-- html -->
.. role:: asis(raw)
:format: html latex
Structure Analysis Problem solved with Python
=============================================
This pin jointed truss is the initial example taken from `akabaila`_
.. _akabaila: http://akabaila.pcug.org.au/StructuralAnalysis.pdf
.. tikz:: \coordinate (A) at (0,0) (A) node [below]{0};
\coordinate (B) at (0,3) (B) node [above]{1};
\coordinate (C) at (4,0) (C) node [below]{2};
\coordinate (D) at (4,3) (D) node [above]{3};
\coordinate (E) at (8,0) (E) node [below]{4};
\coordinate (F) at (8,3) (F) node [above]{5};
\coordinate (G) at (12,0) (G) node [below]{6};
\coordinate (H) at (12,3) (H) node [above]{7};
\tikzset{-};
\draw[black,very thick] (A) -- (B) node [midway,left]{0};
\draw[black,very thick] (A) -- (C) node [midway,below]{3};
\draw[black,very thick] (B) -- (C) node [midway,left,below]{2};
\draw[black,very thick] (B) -- (D) node [midway,below]{1};
\draw[black,very thick] (C) -- (D) node [midway,left]{4};
\draw[black,very thick] (C) -- (E) node [midway,below]{7};
\draw[black,very thick] (D) -- (E) node [midway,left,below]{6};
\draw[black,very thick] (D) -- (F) node [midway,below]{5};
\draw[black,very thick] (E) -- (F) node [midway,left]{8};
\draw[black,very thick] (E) -- (G) node [midway,below]{10};
\draw[black,very thick] (E) -- (H) node [midway,right,below]{11};
\draw[black,very thick] (F) -- (H) node [midway,below]{9};
\draw[black,very thick] (G) -- (H) node [midway,right]{12};
\draw[black,thin] (0,0) -- (1,-1) -- (-1,-1) -- (0,0);
\draw[black,thin] (12,0) -- (13,-1) -- (11,-1) -- (12,0);
\tikzset{->};
\draw[black,thin] (4,-1) -- (4,-2) node [midway,right]{100kN};
\draw[black,thin] (8,-1) -- (8,-2) node [midway,right]{150kN};
\tikzset{<->};
\draw[black,very thin] (0,4) -- (12,4) node [midway,above]{3x8m};
\draw[black,very thin] (13,0) -- (13,3) node [midway,right]{6m};
It can be analysed using force and moment vectors,
because resulting linear equations are neither underdetermined
nor overdetermined, but determined.
The truss is said to be *statically determinate*.
**We want to find the forces along the members.**
To find all the forces along the members we use:
- no revolution: all moments must be matched by reacting moments
- no translation: all forces in all nodes add to zero.
We will solve this here using ``Python`` with ``numpy`` and ``scipy``
and more precisely we will do with these functions:
.. code-block:: python
:linenos:
from numpy import dot, cross, array, allclose, transpose, finfo, double;
from scipy.linalg import norm, solve;
unit = lambda v: v/norm(v)
R90 = array([[0, 1], [-1, 0]]) # rotate 90 degrees
V = array
eps = finfo(double).eps
First we need to describe the problem world, i.e. we need to find the variables.
By variable I mean the real thing here. A variable consists of values.
When using a value of a variable, then we do this via reference,
more precisely via index into the list of values representing the variable.
The following describes the system. I use capital letters for variables and
small letters for references to values of the variables. The names are short,
one letter if possible. I take the first letter of the english word. N is Nodes,
E is Edges, F is Forces, S is Support nodes. n, e, f and s reference values of these
variables. A value of Nodes consists of x and y, which reference the external variables
X and Y (the values of). By external I mean that they are not specified in the code.
.. code-block:: python
:linenos:
#nodes = x, y
N = [V((a*8,b*6)) for a,b in [(0,0),(0,1),(1,0),(1,1),(2,0),(2,1),(3,0),(3,1)]];
#edge = (n1,n2), n_i = indix into N
E = [(0, 1), (1, 3), (1, 2), (0, 2), (2, 3), (3, 5), (3, 4),
(2, 4), (4, 5), (5,7), (4, 6), (4, 7), (6, 7)]
#external forces = index into N for node of application, vector
F = [(2,V((0,-100))), (4,V((0,-150)))]
#support points = indices into N
S = [0,6];
Now let's find the forces along the edges.
1. No revolution.
We need to make the moment created around one support point zero by constructing a force
at the other support point. If there were more than one other support point for an axis,
the system would be overdetermined, which we don't handle here.
.. code-block:: python
:linenos:
def react_to_mp_at_q(mp,q):
"""p != q are any nodes.
m stands for moment. mp is the moment around node p.
"""
dp=N[q]-N[mp[0]]
norm_dp = norm(dp)
if norm_dp == 0:
return V((0,0))
ndp = dp/norm_dp
fq = mp[1]*dot(R90,ndp)/norm_dp
return -fq
2. No translation
We distribute the forces to a node to those edges not having a
force associated yet. In our 2D case we need two such edges. One is OK, if the
force is exactly in that direction.
.. admonition:: shortcoming
For more other edges, I take one edge, if it is in the direction of the
force and ignore the others. This is physically not correct, but the
method applied here is not for overdetermined systems.
The force placed on an edge via this distribution will be forwarded to the
other node, but there the direction must be changed: An edge under tension will
pull from both nodes and a contracted edge will push into both nodes.
.. code-block:: python
:linenos:
def distribute(f,es,q):#f = sum of forces on edges es to node q
ies = [i for i in range(len(E)) if q in E[i]]
mat = []
eo = []#edge, other node
for e in ies:
if e not in es:
#E[e]
t = [tt for tt in E[e] if tt==q][0]#this
o = [tt for tt in E[e] if tt!=q][0]#other
d0 = unit(N[o]-N[t])
mat.append(d0)
eo.append((e,o))
A = transpose(array(mat))
dim = len(f)
if len(eo)==dim:
r = solve(A,f)
for i in range(len(r)):
ff = r[i]*mat[i]
yield ff, eo[i]#even if ff==0
elif len(eo) > dim:
for i,v in enumerate(mat):
angle = dot(v,f)/norm(v)/norm(f)
if abs(angle) < eps or abs(angle+1) < 4 * eps: #same direction
yield f,eo[i]
return
raise ValueError('node %i overdetermined'%q)
else:
if allclose(unit(f),mat[0]):
yield f, eo[0]
else:
raise ValueError('node %i underdetermined'%q)
The above ``distribute`` needs the edges along which forces come into the node.
We keep track of the edges with forces in a ``{node, [(edge,force)..]}`` dictionary.
Initially this is empty. We add the external forces and the forces due to the moments.
Then we distribute forces in unbalanced nodes.
.. code-block:: python
:linenos:
def no_revolution():
EF = dict([(p,[]) for p in range(len(N))])
for p,ff in F:
EF[p].append(([],ff))
for i in range(len(S)):
for j in range(len(S)):
if j != i:
p = S[i]
q = S[j]
mp = (p,sum([cross(ff[1],(N[ff[0]]-N[p])) for ff in F]))
fq = react_to_mp_at_q(mp,q)
EF[q].append(([],fq))
return EF
def no_translation(EF):
_sum = lambda tt: [reduce(lambda x,y:x+y,t) for t in zip(*tt)]
unbalanced = lambda:[(i,v) for i,v in [(i,_sum(EF[i])) for i in EF]
if v and not allclose(norm(v[1]),0)]
u = unbalanced()
while len(u)>0:
q,(es,f) = u[0]
dist=list(distribute(f,es,q))
for ff,eo in dist:
EF[q].append(([eo[0]],-ff)) #q is this node of edge eo[0]
EF[eo[1]].append(([eo[0]],ff)) #eo[1] is the other node of edge eo[0]
u = unbalanced()
return EF
def format_ef(EF):
from itertools import chain
from pprint import pformat
e_f = list(chain.from_iterable([[(tuple(e),norm(f)) for e,f in EF[i]] for i in EF]))
e_f = list([(e[0],round(f,2)) for e,f in set(e_f) if e])
e_f.sort()
return pformat(e_f)
EF = no_revolution()
EF = no_translation(EF)
format_ef(EF)
Here are the resulting forces along the edges:
+--------+--------+--------+-----+-------+--------+-------+--------+-----+--------+-----+--------+--------+
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 |
+========+========+========+=====+=======+========+=======+========+=====+========+=====+========+========+
| 116.67 | 155.56 | 194.44 | 0.0 | 16.67 | 177.78 | 27.78 | 155.56 | 0.0 | 177.78 | 0.0 | 222.22 | 133.33 |
+--------+--------+--------+-----+-------+--------+-------+--------+-----+--------+-----+--------+--------+
<file_sep>/chcko/r/cv/en.rst
.. raw:: html
%path = "maths/entropy"
%kind = chindnum["texts"]
%level = 12
<!-- html -->
To exclusively select an element v (value, state, point) of a variable V
(cardinality `|V|=n`, n-set) we need `\log_bn` b-sets (normally `b=2`, bit).
.. math::
I(V)=log\;n
`I(V)` is the code width of the variable, i.e. die information (in bits) needed
to select a value and it is the same for all values. `I` is a property of the variable.
.. admonition:: random variable, variate, variable
A variate is a random variable.
Often the distinction between random variable and variable is not
necessary. Both mean a real quantity, whose values can occur repeatedly.
If we consider every occurrence `c \in C` of values of `V`,
then there is another way to refer to values of `V`.
We first choose an occurrence of any value with `I(c)=\log |C|`
and subtract the information to select occurrences of `v\in V` (`I(c_v)=\log |C_v|`).
`\frac{|C|}{|C_v|}` is the number of `|C_v|` sized subsets of `C`.
So to select such a `v` occurrences subset we need
.. math::
I(v)=\log\frac{|C|}{|C_v|}=-\log\;p(v)
This is different for all `v\in V` and represents the optimal code length for every value
(entropy code, Huffman code).
The average code width is
.. math::
H(V)=-\sum_vp(v)\log\;p(v)
`H(V)` is the **entropy** of the variable `V` (note: not of a value of `V`).
The information in a variable depends on the probability distribution
of value occurrences (= random event).
<file_sep>/chcko/r/bq/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct
import random
import sympy
from sympy.abc import x
def chiven():
g = Struct()
g.w = randrange(400, 500)
g.h = randrange(150, 250)
g.dw = randrange(30, 40)
g.dh = randrange(40, 50)
g.d = randrange(300, 500)
return g
def chalc(g):
w = g.w
h = g.h
dw = g.dw
dh = g.dh
d = g.d
pi = x * (x - w)
pimaxx = w / 2.0
pimaxy = pi.subs(x, w / 2.0)
pi = pi * h / abs(pimaxy)
po = (x + dw) * (x - w - dw)
pomaxx = w / 2.0
pomaxy = po.subs(x, w / 2.0)
po = po * (h + dh) / abs(pomaxy)
ipo = sympy.integrate(-po, x)
f1 = ipo.subs(x, 0) - ipo.subs(x, -dw)
f2 = ipo.subs(x, w + dw) - ipo.subs(x, w)
ipio = sympy.integrate(pi - po, x)
f3 = ipio.subs(x, w) - ipio.subs(x, 0)
f = f1 + f2 + f3
v = f * d
vm3 = v / 100.0 / 100.0 / 100.0
return [vm3]
<file_sep>/chcko/r/h/__init__.py
# -*- coding: utf-8 -*-
import random
import numpy as np
from chcko.chcko.hlp import Struct, norm_frac as chorm
def chiven():
k = np.array(random.sample(range(30, 60), 3))
A = np.array([1, 0, 0] + random.sample(range(1, 19), 6))
A.shape = (3, 3)
Z = np.array(random.sample(range(1, 19), 3))
m = np.array(random.sample(range(1, 19), 1))[0]
g = Struct(k=k.tolist(), A=A.tolist(), Z=Z.tolist(), m=m)
return g
def chalc(g):
axy = np.dot(g.A, g.k)
z = np.dot(g.Z, axy)
K = g.m * z
return [K]
<file_sep>/chcko/r/a2/__init__.py
from chcko.chcko.hlp import Struct
import random
from sympy import Integer
from itertools import product
from numpy import where
is_natural = lambda x: x.is_integer and x>=0
rch = random.choice
def chiven():
g = Struct()
random.seed()
g.a = Integer(rch([-1,1]))
op = '+-*/'
opnd = product(op,['ab','ba'])
ops = [o[0].join(o[1]) for o in opnd]
random.shuffle(ops)
takeop= lambda s: next(x for x in ops if s in x)
g.ops = [takeop(s) for s in op]
random.shuffle(g.ops)
g.rep = ['b',f"{rch('+-')}{rch('23')}*a"]
return g
def chalc(g):
#g=chiven()
res1 = [is_natural(eval(x.replace(*g.rep),{**g})) for x in g.ops]
res = [''.join([chr(65+x) for x in where(res1)[0]])]
return res
<file_sep>/chcko/r/bw/de.html
%path = "Mathe/Finanz/Zinsen/gemischt u. theoretisch"
%kind = chindnum["Übungen"]
%level = 11
Der Betrag von €{{chiven.K0}} wurde am {{chiven.d1.strftime('%d.%m.%Y')}}
auf die Bank gebracht und soll am {{chiven.d2.strftime('%d.%m.%Y')}}
mit Zinsen abgehoben werden. Die Bank rechnet volle Kalenderjahre (1.1. - 30.12, 12x30M, 360T)
mit Zinseszinsen und nicht volle Jahre linear (gemischte oder praktische Verzinsung).
Die Jahreszinsen der Bank sind {{chiven.i}}%.<br>
Berechne das Kapital zum Abhebezeitpunkt.
%chq(0)
<br>
Würde die Bank den ganzen Zeitraum mit Zinseszinsen rechnen,
wobei unvollständige Jahre als Brüche einfließten (theoretische Verzinsung),
um wieviel Euro unterschiede sich das Kapital zum Abhebezeitpunkt
bei dieser Berechnungsmethoden vom vorigen Ergebnis?
%chq(1)
<file_sep>/chcko/r/bo/__init__.py
# -*- coding: utf-8 -*-
from random import randrange, sample
from chcko.chcko.hlp import Struct
import numpy as np
def chiven():
n = 5
x = sorted(sample(range(20), n))
p = randrange(3, 6)
ymax = p * x[n - 1]
kf = randrange(1, ymax // 3)
yf = lambda v: int((1.0 * (ymax - kf) / x[n - 1] / x[n - 1]) * v * v + kf)
y = [yf(ax) for ax in x]
g = Struct(x=x, y=y, p=p)
return g
def chalc(g):
pf = np.polyfit(g.x, g.y, 2)
pf = [-pf[0], g.p - pf[1], -pf[2]]
xmax = np.roots(np.polyder(pf))
ymax = np.polyval(pf, xmax)
return [xmax[0], ymax[0]]
<file_sep>/chcko/r/cu/de.rst
.. raw:: html
%path = "Mathe/Funktionen/Integral von 1÷z"
%kind = chindnum["Texte"]
%level = 12
<!-- html -->
Aus der reellen Analysis wissen wir, dass
`\frac{dy}{dx}=\frac{d\,e^x}{dx}=e^x=y`, weswegen die Umkehrung
`\frac{dx}{dy}=\frac{d\,\ln y}{dy}=\frac{1}{y}` für `y>0`. Für die
Stammfunktion von `\frac{1}{y}` kann man auch negative `y` mit einschließen,
wenn man den Betrag nimmt: `\int\frac{1}{y}dy=ln|y|+C`. Das ergibt sich aus der
Symmetrie von `\frac{1}{y}`. Bei 0 besteht eine Singularität, d.h. man kann dort nicht
darüber integrieren.
In `\mathbb{C}` ist `e^z=e^{x+iy}=e^xe^{iy}`,
d.h. der Realteil wird Betrag `e^x` und der Imaginärteil wird Argument des Wertes.
Daher kommt auch die Periode `2\pi i` entlang der imaginären Achse.
Die Stammfunktion von `\frac{1}{z}` ist die Umkehrfunktion von `e^z`,
d.h. der Betrag wird Realteil `ln|z|` und das Argument wird Imaginärteil:
`\int \frac{1}{z}dz=ln|z|+i\arg(z)+C`.
In `\mathbb{C}` kann man um die Singularität herum integrieren:
.. math::
\oint_{|z|=1}\frac{1}{z}dz =
(\ln|z| + i\arg z)\bigr|_{\arg z=0,\,|z|=1}^{\arg z=2\pi,\,|z|=1} = 2\pi i
Das ist ein Vorläufer des Residuensatzes.
<file_sep>/chcko/r/bf/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct, listable
def chiven():
g = Struct()
g.a = randrange(20, 128)
g.b = randrange(20, 128)
return g
@listable
def chorm(a):
return a.lstrip('0')
def chalc(g):
return ['{0:b}'.format(g.a + g.b)]
<file_sep>/chcko/r/n/__init__.py
# -*- coding: utf-8 -*-
import random
import numpy as np
from chcko.chcko.hlp import Struct
def chiven():
a = random.sample(range(30, 50), 1)[0] * 1000
b = random.sample(range(50, 100), 1)[0] * 1000
n = random.sample(range(3, 6), 1)[0]
k = random.sample(range(6, 12), 1)[0]
y = np.array(range(1, n + 1))
T = np.sum((y * a + b) / (1 + 1.0 * k / 100) ** y)
c = round(T, -3)
dd = (T - c)
d = dd * random.sample(range(-4, 5), 1)[0]
c = round(T + d, -3)
g = Struct(a=a, b=b, c=c, n=n, k=k)
return g
def chalc(g):
# g={'a':31000,'b':98000,'c':721000,'n':5,'k':9}
y = np.array(range(1, g.n + 1))
T = np.sum((y * g.a + g.b) / (1 + 1.0 * g.k / 100) ** y)
dd = (T - g.c)
return [T, 1 if dd > 0 else 2]
<file_sep>/README.rst
chko-r
======
`chko-r`_ is an example content package and test data for `chcko`_.
`chko-r`_ is a random mix of problems and texts.
A live version is available at:
https://chcko.eu
This python package depends on the `chcko`_ python package,
which is not automatically installed,
because on gcloud `chcko`_ is *uploaded*, not installed.
Therefore locally install with::
pip install --user chcko
pip install --user chcko-r
There is no need to install,
if `chcko`_ is parallel to `chcko-r`_.
A content package can add additional dependencies.
This ``chcko-r`` depends on
`schemdraw <https://pypi.org/project/SchemDraw/>`__
`pint <https://pypi.org/project/Pint/>`__.
To install the dependencies without installing ``chcko-r``,
use ``pip install -r requirements.txt``.
Prepare content::
make html
To run content locally with `chcko`_ installed::
cd chcko-r
runchcko
Install content::
cd chcko-r
pip install --user .
Image files are compiled to ``/chcko-r/chcko/_images``,
and copied to ``chcko/chcko/_images`` common to all `chcko`_ content when installing.
Image files need to have globally unique names across all ``chcko-<author_id>`` packages.
Use ``<author_id>_<problem_id>_xxx`` scheme,
e.g ``.. texfigure:: r_dg_c1.tex``.
Images can also be generated on the fly.
Examples:
`r.a3 <https://github.com/chcko/chcko-r/blob/master/chcko/r/a3/circuit.html>`__
`r.a4 <https://github.com/chcko/chcko-r/blob/master/chcko/r/a4/circuit.html>`__.
.. _`chcko`: https://github.com/chcko/chcko
.. _`chcko-r`: https://github.com/chcko/chcko-r
<file_sep>/chcko/conf.py
try:
from chcko.chcko import conf
except ModuleNotFoundError:
import sys
import os.path as p
chckoparallel = p.normpath(p.join(p.dirname(__file__),'..','..','chcko'))
sys.path.insert(0,chckoparallel)
from chcko.chcko import conf
globals().update({k:v for k,v in conf.__dict__.items() if not k.startswith('__')})
<file_sep>/chcko/r/dg/en.rst
.. raw:: html
%path = "physics/circuits/thevenin"
%kind = chindnum["examples"]
%level = 13
<!-- html -->
.. role:: asis(raw)
:format: html latex
We will derive the gain of a band-stop filter using Thevenin's method.
Our starting circuit is the following from
`Op Amps for EveryOne (5-10) <http://www.ti.com/lit/an/slod006b/slod006b.pdf>`_.
.. texfigure:: r_dg_c1.tex
:align: center
The input voltage is against the ground. We redraw the circuit to reflect this:
.. texfigure:: r_dg_c2.tex
:align: center
We want to find `G=\frac{V_o}{V_i}`.
`V_o` is the voltage at the rightmost R. We will find the Thevenin equivalent there.
.. texfigure:: r_dg_c3.tex
:align: center
Next we find the currents using Kirchhoff's current law (KCL) and voltage law (KVL).
There are two loops where current flows.
There is no current at the R at which we have made an open circuit
.. texfigure:: r_dg_c4.tex
:align: center
The resulting equations are
.. math:: \begin{array}{l l l}
V_i - I_2 R - \frac{I_2}{iwC} - I_1 R & = 0\\
V_i - \frac{I_1 - I_2}{iwC} - I_1 R & = 0
\end{array}
We solve for `I_1` and `I_2`:
.. math:: \begin{array}{l l}
I_1 &= \frac{\omega C V_i (-2 i+C R \omega)}{-1-3 C R i \omega+C^2 R^2 \omega^2}\\
I_2 &= -\frac{i \omega C V_i}{-1-3 C R i \omega+C^2 R^2 \omega^2}
\end{array}
The small loop at `V_{th}` with the now known currents can be used to calculate
.. math:: V_{th}=\frac{I_2}{iwC} + I_1 R
Next we will need the Thevenin impedance equivalent. For this we remove the source `V_i`
and calculate the impedance as seen from `V_{th}`.
.. texfigure:: r_dg_c5.tex
:align: center
We redraw to see a little better, what is parallel and what is serial
.. texfigure:: r_dg_c6.tex
:align: center
With this we get
.. math:: Z_{th}=\left(\frac{1}{i \omega C}+\frac{R \frac{1}{i \omega C}}{R+\frac{1}{i \omega C}}\right) || R =
\frac{R (1+2 i \omega C R)}{1+3 i \omega C R - C^2 R^2 \omega^2}
and
.. math:: V_o = \frac{R}{R + Z_{th}}
Then we finally have the **gain**:
.. math:: G = \frac{V_o}{V_i} = \frac{(-i+C R \omega)^2}{-2-5 i \omega C R+C^2 R^2 \omega^2}
= \frac{(1+i \omega C R)^2}{2+5 i \omega C R-C^2 R^2 \omega^2}
= \frac{(1+ s\tau)^2}{2+5 s\tau+(s\tau)^2}
We have replaced `\tau=R C` and `s=i \omega`, as is custom for filters.
The denominator can be solved for `s\tau` (-4.56,-0.44) and the product of the solutions is 2.
Therefore
.. math:: G = \frac{(1+ s\tau)^2}{2(1+\frac{s\tau}{0.44})(1+\frac{s\tau}{4.56})}
<file_sep>/chcko/r/cv/de.rst
.. raw:: html
%path = "Mathe/Entropie"
%kind = chindnum["Texte"]
%level = 12
<!-- html -->
Um exklusiv ein Element v (Wert, Zustand, Punkt) von einer Variablen V
(Kardinalität `|V|=n`, n-Menge) auszuwählen, braucht es `\log_bn` b-Mengen
(normalerweise `b=2`, bit).
.. math::
I(V)=log\;n
`I(V)` ist die Breite der Variable, d.h. die Informationsmenge (Anzahl von bits),
die es braucht, um Werte von V auszuwählen. `I` ist gleich für alle Werte von V.
`I` bezieht sich auf die Variable.
.. admonition:: Zufallsvariable, Variate, Variable
Im Englischen gibt es Variate für Zufallsvariable (random variable).
Oft ist diese Unterscheidung zwischen Zufallsvariable und Variable nicht
notwendig. Beides meinen hier die reale Größe mit Werten, die wiederholt
auftreten können.
Wenn man jeden bekannten Auftritt (Verweis, Ereignis) `c \in C` der
Werte von `V` betrachtet, dann ist das ein zusätzlicher Weg, um auf die
Werte von `V` zu verweisen. Man wählt zuerst einen beliebigen Auftritt aus
mit `I(c)=\log |C|` und subtrahiert davon die Auswahl eines Auftritts eines
bestimmten Wertes `v` (`I(c_v)=\log |C_v|`). `\frac{|C|}{|C_v|}` ist die
Anzahl von `|C_v|` großen Untermengen von `C`. Um einen solchen Untermenge einmal
auszuwählen braucht es
.. math::
I(v)=\log\frac{|C|}{|C_v|}=-\log\;p(v)
`I(v)` ist für jedes `v\in V` anders und stellt den optimalen Code dar
(Entropie Code, Huffman code).
Die durchschnittliche Codebreite ist
.. math::
I(V)=-\sum_vp(v)\log\;p(v)
`I(V)` ist die **Entropie** der Variablen `V` (und nicht eines Wertes von `V`).
Wenn für alle `v` `p=\frac{1}{n}` ist, dann sind erhält man wieder `I(V)=log\;n`.
Die Information in einer Variablen hängt von der Wahrscheinlichkeitsverteilung
der Wertauftritte (= Wahrscheinlichkeitsereignis) ab.
<file_sep>/chcko/r/by/__init__.py
# -*- coding: utf-8 -*-
from random import randrange, sample
from math import pi, sin
from chcko.chcko.hlp import Struct
def chiven():
al, be = sample(range(10, 85), 2)
c = randrange(2, 20)
g = Struct(al=al, be=be, c=c)
return g
##g = Struct(al=39,be=69,c=13)
#g = chiven()
# chalc(g)
def chalc(g):
ga = 180 - g.be - g.al
csga = g.c / sin(pi * ga / 180)
a = sin(pi * g.al / 180) * csga
b = sin(pi * g.be / 180) * csga
return [a, b]
chames=[r'\(a=\)',r'\(b=\)']
<file_sep>/chcko/r/bt/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct
import sympy
from sympy import S, Rational as R
from sympy.abc import x
def chiven():
a = 1
b = R(randrange(3, 6))
c = R('0.6') + randrange(5) * R(1, 10)
e = c + randrange(4) * R(1, 10)
d = a + randrange(5, 9)
f = e + randrange(4, 7)
j = f - R(1, 2)
g = d + R(1, 2)
h = d + randrange(4, 9)
k = R(b - c, a * a)
u = -k * x ** 2 + b
v = (e - c) / (d - a) * (x - a) + c
l = (f - e) / (h - d) ** R(1, 2)
w = l * (x - d) ** R(1, 2) + e
z = l * (x - d) ** R(1, 2)
dc = Struct(
u=sympy.sstr(u),
v=sympy.sstr(v),
w=sympy.sstr(w),
z=sympy.sstr(z),
d=d,
g=g,
h=h)
return dc
def chalc(dc):
p1 = sympy.integrate(sympy.pi * S(dc.u) ** 2, (x, 0, 1))
p2 = sympy.integrate(sympy.pi * S(dc.v) ** 2, (x, 1, S(dc.d)))
p3 = sympy.integrate(sympy.pi * S(dc.w) ** 2, (x, S(dc.d), S(dc.h)))
m4 = sympy.integrate(sympy.pi * S(dc.z) ** 2, (x, S(dc.g), S(dc.h)))
res = S(p1 + p2 + p3 - m4).n()
return [res]
<file_sep>/chcko/r/g/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy.abc import x
from sympy.solvers import solve
from sympy import sstr, apart, S
from chcko.chcko.hlp import Struct, norm_frac as chorm
def chiven():
a, b, c, d, e, f = random.sample(list(range(-9, -1)) + list(range(1, 9)), 6)
#x = a/b
rs = a - c
ls = b * x - c
rs = d * rs / ls
ls = 1 - e * x / (d + e * x)
rs = rs / (d + e * x)
rs = apart(rs)
ls = ls / f
rs = rs / f
ls1, ls2 = ls.as_two_terms()
rs1, rs2 = rs.as_two_terms()
g = Struct()
g.sls = sstr(S(ls1) + S(ls2))
g.srs = sstr(S(rs1) + S(rs2))
return g
def chalc(g):
try:
sol = solve(S(g.sls) - S(g.srs), x)[0]
return [sol]
except:
return ['-']
<file_sep>/chcko/r/di/de.rst
.. raw:: html
%path = "Mathe/Zahlen/C"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
.. contents::
Die komplexen Zahlen `\mathbb{C}`
---------------------------------
Bei den reellen Zahlen (:lnk:`r.ci`) haben wir immer wieder neue Mengen zu den
"Zahlen" hinzu genommen.
Das machen wir bei folgendem Problem auch.
`x^2` nimmt nur positive Werte an.
- Es geht Information verloren (die Wertemenge ist kleiner) und
- Gleichungen wie `x^2+1=0` können nicht gelöst werden
Wenn wir ein "Zahl" `i` erfinden die `i^2=-1` ergibt, dann deckt `x^2` mit den Vielfachen
von `i` die ganzen negativen Zahlen ab. `i` nennen man **imaginäre Einheit**.
`i` und deren Vielfache haben mit den reellen Zahlen vorerst nichts zu tun.
Sie sind orthogonal zu `\mathbb{R}`. Orthogonal bedeutet, dass alle Kombinationen
zulässig sind und das entspricht einer Ebene, der **komplexen Zahlenebene** oder
**Gaussschen Zahlenebene**.
`z = a + ib \in \mathbb{C}`
Das ist wie ein zweidimensionaler Vektor: 2 orthogonale Richtungen unabhängig addiert.
Es gibt drei Darstellungen
- `z = a+ib`, also über Komponenten oder
- `z = r(cos\varphi + i sin\varphi)` über Länge `r` und Richtung `\varphi` (Argument, Phase) in Bogenmaß.
Aber nun folgendes:
- `i\cdot 1 = i`, also Multiplikation mit `i` macht aus 1 ein `i` das orthogonal zu
1, der **reellen Einheit**, ist. Das ist eine Drehung um den rechten Winkel nach links.
- `i\cdot i = -1`. Wieder eine Drehung um den rechten Winkel.
Allgemein: Multiplikation mit `i` macht eine Drehung um den rechten Winkel, per Konvention nach links.
Weiters: Multiplikation addiert den Winkel dazu, d.h. Multiplikation führt zur
Addition des Winkels. Das lässt einen vermuten, dass es eine Darstellung geben
könnte, die den Winkel im Exponenten hat. Reihenentwicklung von `\sin` und `\cos`
und `e^x` in der Analysis und Vergleich ergibt die **Eulersche Formel**:
- :inl:`r.cy`
`z=re^{i\varphi}` ist die **Normaldarstellung** komplexer Zahlen.
Von `\sin` und `\cos` weiß man, dass sie die Periode `2\pi` haben, so auch `e^{i\varphi}`.
Die n-te Wurzel teilt alle Perioden bis `2n\pi` auf unter `2\pi` und somit zu n
unterschiedlichen Werten:
.. math::
z^{1/n}=r^{1/n}e^{i(\varphi/n+2k\pi/n)}
Allgemeiner:
In `\mathbb{C}` hat jedes Polynom n-ten Grades genau n Nullstellen
(**Hauptsatz der Algebra**). Davon können manche aber zusammenfallen.
`\mathbb{C}` heißt deshalb **algebraisch abgeschlossen**.
Das heißt, dass nicht nur `x^2`, sondern alle Polynome ganz `\mathbb{C}` auf ganz `\mathbb{C}` abbilden.
Es geht keine Information verloren.
.. admonition:: Hinweis
In der Funktionentheorie erfährt man, das sich das auf alle Funktionen
ausdehnen lässt, die in ganz `\mathbb{C}` unendlich oft differenzierbar
(analytisch, holomorph) sind (ganze Funktionen), da sie sich in
Taylorreihen entwickeln lassen.
Weiteres:
- a = Re(z) ist der Realteil
- b = Im(z) ist der Imaginärteil
- `\bar{z}=re^{-i\varphi}=a-ib` heißt (komplex) Konjugierte zu z. `\bar{z^n}=\bar{z}^n`.
`z_1\bar{z_2}` vereint in sich Skalarprodukt (`Re(z_1\bar{z_2})=r_1r_2\cos\Delta\varphi`) und Vektorprodukt
(`Im(z_1\bar{z_2})=r_1r_2\sin\Delta\varphi`).
- `|z| = \sqrt{z\bar{z}} = \sqrt{a^2+b^2} = r` ist der Betrag (oder Modul) von z.
Das Quadrat über die Länge einer komplexen Zahl unabhängig von ihrer Richtung
ergibt sich durch `z\bar{z}` und nicht durch `z^2`.
- `φ = arg(z)` ist Argument (oder Phase) von z.
- `arg(z_1z_2)=arg(z_1)+arg(z_2)`
- `arg(\frac{z_1}{z_2})=arg(z_1)-arg(z_2)`
Anwendung von `\mathbb{C}`
--------------------------
Da `\mathbb{C}` eine Erweiterung von `\mathbb{R}` darstellt,
kann man alles mit `\mathbb{C}` machen, was man mit `\mathbb{R}` macht.
Das essentiell Neue an `\mathbb{C}` sind aber alle Richtungen, statt nur `+` und `-`.
Was heißt Richtung?
:inline:`r.ct`
Die komplexen Zahlen werden in der Physik und Technik im Umfeld von Schwingungen und Wellen
verwendet, und davon gibt es viele:
- Mechanik/Festkörper: Wasserwellen, Schallwellen, elastische Wellen,...
- Elektrotechnik: Wechselstrom, Wechselstromkeis (Widerstand, Kapazität und Induktivität),...
- Elektrodynamik: Elektromagnetische Wellen (Lichtwellen, Radiowellen), ...
- Optik: Lichtwelle, ...
- Quantenmechanik: Teilchenwellen, ....
Letztendlich basieren diese Anwendungen auf dem uneingeschränkteren Rechnen in `\mathbb{C}`
und auf den mathematisch auf `\mathbb{C}` aufbauenden Ergebnissen etwa der Funktionentheorie.
Viele physikalische Systeme werden mit Differentialgleichungen beschrieben.
Diese reduzieren sich auf Polynome mit komplexen Lösungen (Fundamentalsatz der Algebra)
und führen zu komplexen Funktionen.
<file_sep>/chcko/r/cj/de.rst
.. raw:: html
%path = "Mathe/Zahlen/Kombination mit Operationen"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Kombiniert man mit der Anzahl, also der natürlichen Zahl,
den Vorgang (Operation) des Hinzufügens und dessen Umkehrung,
dann kommt man zu den **ganzen Zahlen**.
.. math::
\mathbb{Z} = \mathbb{N}\times\{+,-\}
Wiederholung des Hinzufügen ist Vervielfachung.
Kombiniert man mit der ganzen Zahl
den Vorgang (Operation) des Vervielfachens und dessen Umkehrung,
dann kommt man zu den **rationalen Zahlen**.
.. math::
\mathbb{Q} = \mathbb{Z}\times\{+,-\}\\
\mathbb{Q} = \mathbb{N}\times\{+,-\}\times\{\cdot,\div\}
Wiederholen des Vervielfachens ist Potenzieren.
Nicht mehr so analog, aber prinzipiell erhält man
durch Kombination der rationalen Zahlen mit dem Potenzieren eine
Erweiterung, die **algebraischen Zahlen**.
Bis jetzt wurden Zahlen mit Vorgängen/Operation kombiniert. Das ist ein
Algorithmus. Erlaubt man auch nicht endende Algorithmen, dann schließt man die
**irrationalen Zahlen** (algebraische und transzendente Zahlen) mit ein und
erweitert zu den **reellen Zahlen** \mathbb{R}.
.. admonition:: Hinweis
Endlicher oder unendlicher Algorithmus: `\sqrt{2}` ist mit den
Grundrechnungsarten ausgedrückt ein unendlicher Algorithmus. Wertet man
`\sqrt{2}` nicht aus, sondern hantiert symbolisch mit ihm, dann ist das
wieder ein endlicher Algorithmus.
<file_sep>/chcko/r/ck/__init__.py
# -*- coding: utf-8 -*-
from __future__ import division
from random import randrange
from chcko.chcko.hlp import Struct, norm_int as chorm
def chiven():
return Struct(
A=randrange(150, 200), B=randrange(100, 150), C=randrange(50, 100)
)
def chalc(g):
# g=chiven()
# g=Struct(A=200,B=150,C=100)
# total unknown but assumed known
# p1=g.A/total
# p2=g.B/total
# p = p1*p2 #p1 and p2 independent
# total*p = g.A*g.B/total = g.C
total = g.A * g.B // g.C # converts to int
detected = g.A + g.B - g.C
undetected = total - detected
return [undetected]
<file_sep>/chcko/r/b/de.rst
.. raw:: html
%path = "Mathe/Vektoren"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
Vektoren
--------
Was ist ein Vektor?
...................
Einen **mehrdimensionalen Vektor** kann man als unabhängige Auswahl (Wert) von
mehreren Variablen verstehen.
Die Werte (Zahl+Einheit) müssen unabhängig **addierbar** sein.
Die jeweiligen Einheiten sind die **Einheitsvektoren**. Sie bilden
zusammen die **Basis** und werden deshalb auch **Basisvektoren** genannt.
Auch eine Auswahl aus einer Variable ist ein Vektor, ein **eindimensionaler** Vektor.
Der gesamte Vektor kann mit einer Zahl, dem **Skalar**, multipliziert
werden und ergibt wieder ein Vektor.
Beispiel
- Wenn ich in einen Laden gehe, dann sind die Produkte darin mein
Vektorraum (Koordinatensystem, KS) und mein Einkaufskorb ist ein Vektor,
d.h. eine Fixierung der Werte (wieviel?) von jeder Variable (hier Produkt).
- Wenn meine Frau auch einkaufen gegangen ist, addieren sich unsere Einkäufe zu hause unabhängig,
d.h. Milch + Milch, Butter + Butter, ...
Koordinatentransformation
.........................
Eine Matrix transformiert eine Vektor in einem Vektorraum zu einem Vektor in einem anderen Vektorraum.
Deshalb lernen wir zuerst den Vektor kennen. Eine Matrix ergibt sich, wenn wir
von einem KS zu einem anderen wechseln wollen.
Beispiel :inline:`r.a0`
Wie schreibt man Vektoren?
..........................
- Als Spalte von Zahlen `\vec{x}=\begin{pmatrix}x_1\\x_2\end{pmatrix}`
Die Einheitsvektoren, d.h. was die Zeilen bedeuten, gibt man separat an.
- Explizit mit Einheiten ausgeschrieben. `\vec{x}=x_1\vec{e_1}+x_2\vec{e_2}` (3
Milchpackungen + 5 Butter) Wenn ohne Pfeil ist mit Index oben die Zahl
gemeint und mit Index unten die Einheit (Dimension,Richtung):
`x=x^1e_1+x^2e_2`.
Notation is nicht der Vektor selbst.
Vektorverknüpfungen
-------------------
.. .. texfigure:: vector_dot_cross.tex
.. :align: center
.. tikz:: \coordinate (0) at (0,0);
\coordinate (A) at (1,3);
\coordinate (B) at (4,2);
\coordinate (C) at (2,1);
\tikzset{->}
\draw[black,very thick] (0) -- (A) node [midway,left]{$\vec{x}$};
\draw[black,very thick] (0) -- (B) node [near end,right,below]{$\vec{y}$};
\draw[black,very thin] (0) -- (C) node [midway,right,below]{$x_y$};
\draw[-,thin] (A) -- (C) node [midway,right]{$x_{\perp y}$};
Es gibt neben der Addition zwei weitere wichtige Vektorverknüpfungen.
- **Skalarprodukt (dot-product)**. Es ergibt eine Zahl (Skalar), welche die
Abhängigkeit darstellt oder inwiewenig man unabhängig Werte auswählen kann.
.. math:: \vec{x}\vec{y}=x_yy=y_xx=x_1y_1+x_2y_2
- Orthogonale Vektoren ergeben 0 (keine Abhängigkeit).
- Bei parallelen Vektoren ist es das Produkt der Längen.
Die Länge eines Vektors `\vec{x}` ist damit `\sqrt{\vec{x}\vec{x}}`.
Länge wird als `|\vec{x}|` oder einfach nur `x` notiert.
- `\vec{x_o}=\frac{\vec{x}}{x}` ist der Einheitsvektor (Länge 1, Richtung von `\vec{x}`)
- Das skalare Produkt definiert den Winkel zwischen 2 Vektoren: `\cos\alpha = \frac{\vec{x}\vec{y}}{xy}`
- **Vektorprodukt oder Kreuzprodukt (cross-product)**. Für Dimension `= 3`.
Es ergibt einen Vektor der orthogonal zu `\vec{x}` und `\vec{y}` ist
und die Länge ist die Fläche des von `\vec{x}` und `\vec{y}` aufgespannten Parallelogramms.
.. math::
\vec{x}\times\vec{y}=x_{\perp y}y=y_{\perp x}x=
\begin{vmatrix}
\vec{e_1} & \vec{e_2} & \vec{e_3} \\
x_1 & x_2 & x_3 \\
y_1 & y_2 & y_3
\end{vmatrix}
Wenn `\vec{x}` und `\vec{y}` zweidimensional sind, ist nur die `\vec{e_3}` Komponente von
`\vec{x}\times\vec{y}` ungleich 0, die dann gleich
`\begin{vmatrix}
x_1 & x_2 \\
y_1 & y_2
\end{vmatrix}=
\begin{vmatrix}
x_1 & y_1 \\
x_2 & y_2
\end{vmatrix}`
ist. Zum Vergleich: Die Determinante von 3 Vektoren im 3D Raum ist das
Volumen des aufgespannten Parallelepipeds.
<file_sep>/chcko/r/bl/de.html
%path = "Mathe/Finanz/Zinsen/Vergleich Zinssatz"
%kind = chindnum["Übungen"]
%level = 11
Welcher Zinsatz ist nach konformer (äquivalenter) Berechnung besser? <br>
%include('r/bl/x_')
<file_sep>/chcko/r/cn/en.rst
.. raw:: html
%path = "maths/numbers/representation"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
The numbers are their own concepts independent from their representations.
The `wikipedia article <http://en.wikipedia.org/wiki/Numeral_system>`_
gives a great overview beyond the
`positional system <http://en.wikipedia.org/wiki/Positional_notation>`_
described here.
Positional System
=================
It is not possible to give every number its own sign. Instead we use signs
up to a certain count and then one makes heaps (groups) of that count and
starts counting these heaps.
.. admonition:: Note
One can compare the numeral system with letters or phonetic systems.
In a language one combines phonemes to produce a multitude, i.e. the words.
These are associated/mapped to concepts.
With Numbers digit signs are combined and then mapped to a count and beyond.
The Decimal System
------------------
- For a count below ten there are signs: 1, 2, 3, 4, 5, 6, 7, 8, 9.
- For "none" there is **0**. Together these are 10 signs.
- For a count ten and above one makes heaps of ten and counts these separately.
Position coding:
Instead of writing 3 tens and 4 ones we write 3 at a position for the tens
and 4 for the position for the ones: 34.
This can be called position coding: Via the position we identify what we mean.
302 means 3 heaps of tens of tens (hundred), 0 (no) tens and 2 ones.
Position value:
The value of the position increases from right to left
... 10³=1000 10²=100 10¹=10 10⁰=1
These are powers of 10.
10 is the **base** of the decimal system.
Fractions:
As heaps of 10 get a position also fractions in 10th get a position to the right
of the dot (.)
,1/10¹=1/10 1/10²=1/100 1/10³=1/1000 ...
The Binary System
-----------------
In the dual system two things make their own heap.
Together with the 0 the binary system has 2 signs, which mean: **there or not there**
The position values of the positions are these
... 2⁴=16 2³=8 2²=4 2¹=2 2⁰=1 . `2^{-1}` `2^{-2}` ...
Example
1011₂ = 11₁₀
The binary system is important, because computers use it and because 2 is the
smallest quantity one can still choose from.
The Hexadecimal System
----------------------
Here we make heaps of 16.
Together with the 0 we have 16 signs:
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, F.
The new ones are A=10, B=11, C=12, D=13, E=14, F=15.
The position values are:
... 16⁴=65536 16³=4096 16²=256 16¹=16 16⁰=1 . `16^{-1}` `16^{-2}` ...
Because of 2⁴=16, one needs 4 binary digits for one hexadecimal digit.
Since the binary system is important, the hexadecimal is important, too,
and so are other systems with base power of 2,
like. Base 8 (octal), 64 (base64), 128(ASCII) and 256 (ANSI).
Duodecimal System
-----------------
Twelve has many divisors: 2, 3, 4, 6
This allows an easy representation of fractions with these denominators.
But as with the decimal system (1/3 = 0.333...)
the duodecimal system has easy fractions that are periodic (1/5 = 0.2497 2497...).
<file_sep>/chcko/r/bp/de.html
%path = "Mathe/Integral/bestimmtes/Volumen/Zylinder oder Rotation"
%kind = chindnum["Übungen"]
%level = 12
Es soll ein Bonbon in Fischform erzeugt werden.
Das Fischprofil wird nach oben durch folgende Funktionen begrenzt.<br>
\(y={{ chutil.TX(chiven.f) }}\), 0<=x<{{ chiven.b }}<br>
\(y={{ chutil.TX(chiven.g) }}\), {{ chiven.b }}<=x<{{ chiven.c }}<br>
\(y={{ chutil.TX(chiven.h) }}\), {{ chiven.c }}<=x<{{ chiven.d }}<br>
Nach unten entsteht die Begrenzung durch Spiegeln an der x-Achse.
<ol>
<li>Skizziere dieses Profil.</li>
<li>Berechne das Volumen einer Zylindervariante mit Höhe {{chiven.t}}.
<br>\(V_p=\)
%chq(0)
</li>
<li>Berechne das Volumen einer Rotationsvariante (um die x-Achse).
<br>\(V_r=\)
%chq(1)
</li>
</ol>
<file_sep>/chcko/r/cd/en.rst
.. raw:: html
%path = "maths/finance/interest"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
`K` Capital
An amount of money.
`i` Interest rate
The increase or decrease of capital `K` is notated in percent %=1/100.
- interest value: `3\% K = 0.03 K`.
- increase : `K + 3\% K = (1+3\%) K = 1.03 K`.
- decrease : `K - 3\% K = (1-3\%) K = 0.97 K`.
`n` Period (Year/Quarter/Month/Day)
The interest rate `i` always refers to a time period, in which the increase or decrease
takes place (is compounded)
- `i` normally refers to a full year (annual interest rate, effective annual interest rate)
- `i_{12}` is a monthly interest rate
- `i_4` is a quarterly interest rate
After this time period `K` has grown by `iK`, i.e. `K_{n=1} = K_0 (1+i) = K_0 q` (q = 1+i).
Compound interest
After one period the capital becomes `K_{n=1} = K_0 (1+i) = K_0 q`,
after n=2 periods `K_0 q^2`, after n=3 periods `K_0 q^3`...
After n periods:
- `K_n = K_0 q^n`
- compound interests: multiply the starting capital (principal) with `q^n`
to get the value `n` periods later.
- discount interests: multiply the capital with `q^{-n}` to get the value `n` periods earlier.
.. http://en.wikipedia.org/wiki/Time_value_of_money
Annuity
An annuity is a payment `r` in regular time periods.
The number of periods for the annuity depends on the payment.
The accrued payments make up the lump-sum. This is the pension or **annuity formula**:
`R_n = \sum_{m=0}^{n-1} r_m = \sum_{m=0}^{n-1} r q^m = r \frac{q^n - 1}{q-1}`
The formula can be used to calculate the **future value** (FV)`R_n`
when the interests are compounded at the end of the periods.
Annuity due is when compounded at the beginning: `R_n^v = q R_n`
The **present value** (PV) of an annuity is obtained by discounting from the FV:
`B_n = R_n q^{-n}`.
Compounding periods smaller than a year
To compare the effective annual rate of interest with the rate for the period one converts the rates.
In a linear conversion we use when there is no compounding taking place
- `i_{12} = i/12`
- `i_4 = i/4`
With compounding the effective annual interest rate is calculated with the **conformal** conversion:
Effective `i_{eff}` distinguishes from nominal interest rate `i`.
- `i_{eff} = (i_{12} + 1)^{12} - 1`
- `i_{eff} = (i_4 + 1)^4 - 1`
Normally the annual interest rate is given.
For a monthly or quarterly compounding this first needs to be converted.
Annuity rest
To calculate the remaining value of the annuity at a certain time
one subtracts the future value of the annuity for that time from
the capital value for that time.
Convert one annuity to another
- First one finds the future value `R_n`.
- This `R_n` needs to be compounded to the end of the other annuity.
- Using the annuity formula one can calculate the requested quantity (`n`, `q`, `r`)
of the new annuity.
Comparison of capitals or offers
To compare values one must first compound their values to the same time
(time-value, e.g. present value) using the compounding or annuity formulas.
<file_sep>/chcko/r/cb/__init__.py
# -*- coding: utf-8 -*-
from chcko.chcko.hlp import Struct, norm_frac as chorm
import random
from sympy import Rational as R
__all__ = ['chiven', 'chalc']
def chiven():
g = Struct()
g.x = random.sample(list(range(-9, -1)) + list(range(1, 9)), 2)
g.b = random.sample(list(range(-9, -1)) + list(range(1, 9)), 1)[0]
return g
def chalc(g):
res = [R(g.b, g.x[1])]
return res
<file_sep>/chcko/r/p/__init__.py
# -*- coding: utf-8 -*-
import random
from math import log
from chcko.chcko.hlp import Struct, norm_frac as chorm
be = []
for i in range(2, 5):
for j in range(-4, 5):
be.append((i, j))
for i in range(2, 5):
for j in range(-4, 5):
be.append((1.0 / i, j))
random.shuffle(be)
def chiven():
b, e = random.sample(be, 1)[0]
n = 1.0 * b ** (1.0 * e)
g = Struct(b=b, n=n)
return g
def chalc(g):
res = 1.0 * log(g.n) / log(g.b)
return [res]
<file_sep>/chcko/r/o/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy import latex, Poly, Rational
from sympy.abc import x
from chcko.chcko.hlp import Struct, norm_frac as chorm
__all__ = ['chiven', 'chalc', 'chorm', 'tex_lin']
def tex_lin(a, b):
p = Poly([a, b], x, domain='QQ')
return latex(p.as_expr())
def chiven():
# ax+b=cx
a, c = random.sample(list(range(2, 10)) + list(range(-9, -2)), 2)
da, dc = random.sample(list(range(2, 4)) + list(range(-3, -1)), 2)
xx = random.sample(list(range(1, 6)) + list(range(-5, 0)), 1)[0]
b = (Rational(c, dc) - Rational(a, da)) * xx
g = Struct(a=Rational(a, da), b=b, c=Rational(c, dc))
return g
def chalc(g):
return [1.0 * g.b / (g.c - g.a)]
<file_sep>/chcko/r/bz/en.html
%path = "maths/trigonometry/slope"
%kind = chindnum["problems"]
%level = 11
A piece of road covers a height difference of {{ chiven.h }}m
in a slope length of {{ chiven.l }}m.<br>
What is the slope in percent?
%chq(0)
<br>
What ist the ascent angle in degrees?
%chq(1)
<file_sep>/chcko/r/w/__init__.py
# -*- coding: utf-8 -*-
import random
from chcko.chcko.hlp import Struct, norm_frac as chorm
be = []
for i in range(2, 5):
for j in range(-4, 5):
be.append((i, j))
for i in range(2, 5):
for j in range(-4, 5):
be.append((1.0 / i, j))
random.shuffle(be)
def chiven():
b, e = random.sample(be, 1)[0]
g = Struct(b=b, e=e)
return g
def chalc(g):
x = 1.0 * g.b ** (1.0 * g.e)
return [x]
<file_sep>/chcko/r/cw/de.rst
.. raw:: html
%path = "Mathe/Informatik/Komplexität"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Komplexität
Um auf einen Wert einer Variable zu verweisen, braucht es Speicher.
Ergibt sich der Wert jedoch aus Werten anderer Variablen,
erpart man sich diesen Speicher. Das ist ein Grund,
warum Funktionen wichtig sind.
Andererseits muss man dann Berechnungen machen, um vom Wert einer
Variable auf den Wert der anderen Variable zu schließen. Das braucht
Zeit.
**Komplexität** gibt das Ausmaß der benötigten Resourcen wieder. Wenn
viel Zeit gebraucht wird, dann ist die Zeitkomplexität hoch.
Wenn viel Speicher bebraucht wird, dann ist die Speicherkomplexität
oder Raumkomplexität hoch.
Da Computer unterschiedlich schnell sind, gibt man nur die Größenordnung
in Abhängigkeit von der Kardinalität `n` der Eingabemenge in der O-Notation an,
etwa `O(n^2)`.
<file_sep>/chcko/r/initdb.py
# -*- coding: utf-8 -*-
# generate via ``doit -kd. initdb``
def populate_index(index_add):
index_add("r.a", "de", "0", "11",
"Mathe/Trigonometrie/SSS")
index_add("r.a", "en", "0", "11",
"maths/trigonometry/sss")
index_add("r.a0", "de", "1", "11",
"Mathe/Vektoren")
index_add("r.a0", "en", "1", "11",
"maths/vectors")
index_add("r.a1", "de", "1", "12",
"Physik/S=E*t")
index_add("r.a1", "en", "1", "12",
"physics/S=E*t")
index_add("r.a2", "de", "0", "9",
"Mathe/Zahlen/NZQR")
index_add("r.a2", "en", "0", "9",
"maths/numbers/NZQR")
index_add("r.a3", "de", "0", "10",
"Elektronik/Transistor/BJT/R")
index_add("r.a3", "en", "0", "10",
"electronics/transistor/BJT/R")
index_add("r.a4", "de", "0", "10",
"Elektronik/Transistor/BJT/V")
index_add("r.a4", "en", "0", "10",
"electronics/transistor/BJT/V")
index_add("r.a5", "en", "1", "12",
"maths/differential/introduction")
index_add("r.b", "de", "1", "11",
"Mathe/Vektoren")
index_add("r.b", "en", "1", "11",
"maths/vectors")
index_add("r.ba", "de", "0", "10",
"Mathe/Folgen und Reihen/arithmetische u. geometrische")
index_add("r.ba", "en", "0", "10",
"maths/sequences and series/arithmetic and geometric")
index_add("r.bb", "de", "0", "10",
"Mathe/Dreieck/rechtwinkliges")
index_add("r.bb", "en", "0", "10",
"maths/triangle/rectangular")
index_add("r.bc", "de", "0", "11",
"Mathe/Finanz/Rentenrechnung/ganzjährig->unterjährig")
index_add("r.bc", "en", "0", "11",
"maths/finance/annuity/annual->monthly")
index_add("r.bd", "de", "0", "11",
"Mathe/Log/Rentenlaufzeit")
index_add("r.bd", "en", "0", "11",
"maths/log/annuity runtime")
index_add("r.be", "de", "0", "9",
"Mathe/Zahlensysteme/Binärsystem/Umwandeln")
index_add("r.be", "en", "0", "9",
"maths/number system/binary/convert")
index_add("r.bf", "de", "0", "9",
"Mathe/Zahlensysteme/Binärsystem/Addieren")
index_add("r.bf", "en", "0", "9",
"maths/number system/binary/add")
index_add("r.bg", "de", "0", "9",
"Mathe/Zahlensysteme/Binärsystem/Dividieren")
index_add("r.bg", "en", "0", "9",
"maths/number system/binary/divide")
index_add("r.bh", "de", "0", "9",
"Mathe/Zahlensysteme/Binärsystem/Multiplizieren")
index_add("r.bh", "en", "0", "9",
"maths/number system/binary/multiply")
index_add("r.bi", "de", "0", "9",
"Mathe/Zahlensysteme/Binärsystem/Subtrahieren")
index_add("r.bi", "en", "0", "9",
"maths/number system/binary/subtract")
index_add("r.bj", "de", "0", "11",
"Mathe/Funktionen/exponentiell")
index_add("r.bj", "en", "0", "10",
"maths/functions/exponential")
index_add("r.bk", "de", "0", "11",
"Mathe/Vektoren/Matrix/Torten")
index_add("r.bk", "en", "0", "11",
"maths/vectors/matrix/cakes")
index_add("r.bl", "de", "0", "11",
"Mathe/Finanz/Zinsen/Vergleich Zinssatz")
index_add("r.bl", "en", "0", "11",
"maths/finance/interest/comparison")
index_add("r.bm", "de", "0", "12",
"Mathe/Finanz/Kosten- u. Preistheorie/Cournot")
index_add("r.bm", "en", "0", "12",
"maths/finance/cost and price theory/cournot")
index_add("r.bn", "de", "0", "10",
"Mathe/Log/Verzinsungszeit")
index_add("r.bn", "en", "0", "10",
"maths/log/interest time")
index_add("r.bo", "de", "0", "12",
"Mathe/Finanz/Kosten- u. Preistheorie/Gewinnmodell")
index_add("r.bo", "en", "0", "12",
"maths/finance/cost and price theory/profit model")
index_add("r.bp", "de", "0", "12",
"Mathe/Integral/bestimmtes/Volumen/Zylinder oder Rotation")
index_add("r.bp", "en", "0", "12",
"maths/integral/defined/volume/cylinder or rotation")
index_add("r.bq", "de", "0", "12",
"Mathe/Integral/bestimmtes/Fläche/2 Polynome")
index_add("r.bq", "en", "0", "12",
"maths/integral/defined/area/2 polynomials")
index_add("r.br", "de", "0", "12",
"Mathe/Integral/unbestimmtes/x^n+Substitutionsregel")
index_add("r.br", "en", "0", "12",
"maths/integral/undefined/x^n and substitution rule")
index_add("r.bs", "de", "0", "12",
"Mathe/Integral/unbestimmtes/Substitutionsregel")
index_add("r.bs", "en", "0", "12",
"maths/integral/undefined/substitution rule")
index_add("r.bt", "de", "0", "12",
"Mathe/Integral/bestimmtes/Volumen/Rotation")
index_add("r.bt", "en", "0", "12",
"maths/integral/defined/volume/rotation")
index_add("r.bu", "de", "0", "9",
"Mathe/<NAME>/mit <NAME>")
index_add("r.bu", "en", "0", "9",
"maths/linear equation/with integers")
index_add("r.bv", "de", "0", "11",
"Mathe/Vektoren/Parallelogramm")
index_add("r.bv", "en", "0", "11",
"maths/vectors/parallelogram")
index_add("r.bw", "de", "0", "11",
"Mathe/Finanz/Zinsen/gemischt u. theoretisch")
index_add("r.bw", "en", "0", "11",
"maths/finance/interest/combined and theoretical")
index_add("r.bx", "de", "0", "11",
"Mathe/Trigonometrie/Fläche")
index_add("r.bx", "en", "0", "11",
"maths/trigonometry/area")
index_add("r.by", "de", "0", "11",
"Mathe/Trigonometrie/WSW")
index_add("r.by", "en", "0", "11",
"maths/trigonometry/asa")
index_add("r.bz", "de", "0", "11",
"Mathe/Trigonometrie/Steigung")
index_add("r.bz", "en", "0", "11",
"maths/trigonometry/slope")
index_add("r.c", "de", "0", "11",
"Mathe/Finanz/Rentenrechnung/unterjährig")
index_add("r.c", "en", "0", "11",
"maths/finance/annuity/monthly")
index_add("r.ca", "de", "0", "11",
"Mathe/Vektoren/2D")
index_add("r.ca", "en", "0", "11",
"maths/vectors/2d")
index_add("r.cb", "de", "0", "11",
"Mathe/Vektoren/Gerade")
index_add("r.cb", "en", "0", "11",
"maths/vectors/straight line")
index_add("r.cc", "de", "0", "11",
"Mathe/Vektoren/Transformation")
index_add("r.cc", "en", "0", "11",
"maths/vectors/transformation")
index_add("r.cd", "de", "1", "10",
"Mathe/Finanz/Zinsen")
index_add("r.cd", "en", "1", "10",
"maths/finance/interest")
index_add("r.ce", "de", "1", "10",
"Mathe/Funktionen/Log")
index_add("r.ce", "en", "1", "10",
"maths/functions/log")
index_add("r.cf", "de", "1", "10",
"Mathe/Funktionen/Funktionsgraph")
index_add("r.cf", "en", "1", "10",
"maths/functions/graph")
index_add("r.cg", "de", "1", "11",
"Mathe/Vektoren/Transformation und Inverse")
index_add("r.cg", "en", "1", "11",
"maths/vectors/transformation and inverse")
index_add("r.ch", "de", "1", "10",
"Mathe/Folgen und Reihen")
index_add("r.ch", "en", "1", "10",
"maths/sequences and series")
index_add("r.ci", "de", "1", "9",
"Mathe/Zahlen/NZQR lang")
index_add("r.ci", "en", "1", "9",
"maths/numbers/NZQR long")
index_add("r.cj", "de", "1", "9",
"Mathe/Zahlen/Kombination mit Operationen")
index_add("r.cj", "en", "1", "9",
"maths/numbers/combination with operations")
index_add("r.ck", "de", "0", "13",
"Mathe/Wahrscheinlichkeit/Lektor")
index_add("r.ck", "en", "0", "13",
"maths/probability/reviser")
index_add("r.cl", "de", "1", "10",
"Mathe/Stukturen/Gruppe")
index_add("r.cl", "en", "1", "10",
"maths/stuctures/group")
index_add("r.cm", "de", "1", "10",
"Mathe/Stukturen/Ring")
index_add("r.cm", "en", "1", "10",
"maths/stuctures/ring")
index_add("r.cn", "de", "1", "9",
"Mathe/Zahlen/Darstellung")
index_add("r.cn", "en", "1", "9",
"maths/numbers/representation")
index_add("r.co", "de", "1", "9",
"Mathe/abstrakt-konkret")
index_add("r.co", "en", "1", "9",
"maths/abstract-concrete")
index_add("r.cp", "de", "1", "9",
"Mathe/Mathe+Information")
index_add("r.cp", "en", "1", "9",
"maths/maths+information")
index_add("r.cq", "de", "1", "9",
"Mathe/Operationen")
index_add("r.cq", "en", "1", "9",
"maths/operations")
index_add("r.cr", "de", "1", "9",
"Mathe/Funktionen")
index_add("r.cr", "en", "1", "9",
"maths/functions")
index_add("r.cs", "de", "1", "10",
"Mathe/Morphismen")
index_add("r.cs", "en", "1", "10",
"maths/morphisms")
index_add("r.ct", "de", "1", "11",
"Mathe/Richtung")
index_add("r.ct", "en", "1", "11",
"maths/direction")
index_add("r.cu", "de", "1", "12",
"Mathe/Funktionen/Integral von 1÷z")
index_add("r.cu", "en", "1", "12",
"maths/functions/integral of 1÷z")
index_add("r.cv", "de", "1", "12",
"Mathe/Entropie")
index_add("r.cv", "en", "1", "12",
"maths/entropy")
index_add("r.cw", "de", "1", "9",
"Mathe/Informatik/Komplexität")
index_add("r.cw", "en", "1", "9",
"maths/informatics/komplexity")
index_add("r.cx", "de", "1", "0",
"Mathe/Externe Links")
index_add("r.cx", "en", "1", "0",
"maths/external links")
index_add("r.cy", "de", "6", "10",
"Mathe/Euler's Identität")
index_add("r.cy", "en", "6", "10",
"maths/euler identity")
index_add("r.d", "de", "0", "12",
"Physik/Elektrizität/Kondensator")
index_add("r.d", "en", "0", "12",
"physics/electricity/capacity")
index_add("r.dg", "de", "3", "13",
"Physik/Schaltkreise/Thevenin")
index_add("r.dg", "en", "3", "13",
"physics/circuits/thevenin")
index_add("r.dh", "en", "3", "10",
"physics/mechanics/forces")
index_add("r.di", "de", "1", "9",
"Mathe/Zahlen/C")
index_add("r.di", "en", "1", "10",
"maths/numbers/C")
index_add("r.dj", "de", "1", "11",
"Mathe/Trogonometrie")
index_add("r.dj", "en", "1", "11",
"maths/trigonometry")
index_add("r.dk", "de", "1", "11",
"Mathe/Funktionen/exponentiell")
index_add("r.dk", "en", "1", "11",
"maths/functions/exponential")
index_add("r.e", "de", "6", "12",
"Physik/Elektrizität/Kondensator")
index_add("r.e", "en", "6", "12",
"physics/electricity/capacitor")
index_add("r.f", "de", "1", "12",
"Mathe/Differential/trennbar")
index_add("r.f", "en", "1", "12",
"maths/differential/separable")
index_add("r.g", "de", "0", "9",
"Mathe/Lineare Gleichung/x im Nenner")
index_add("r.g", "en", "0", "9",
"maths/linear equation/x in denominator")
index_add("r.h", "de", "0", "11",
"Mathe/Vektoren/Matrix/Stufenweise Produktion")
index_add("r.h", "en", "0", "11",
"maths/vectors/matrix/stepwise production")
index_add("r.i", "de", "0", "10",
"Mathe/Funktionen/Funktionsgraphen")
index_add("r.i", "en", "0", "10",
"maths/functions/function graphs")
index_add("r.j", "de", "0", "12",
"Mathe/Integral/bestimmtes/sin,cos,exp,x^(-1)")
index_add("r.j", "en", "0", "12",
"maths/integral/defined/sin,cos,exp,x^(-1)")
index_add("r.k", "de", "0", "12",
"Mathe/Integral/bestimmtes/Polynom")
index_add("r.k", "en", "0", "12",
"maths/integral/defined/polynomial")
index_add("r.l", "de", "0", "12",
"Mathe/Integral/bestimmtes/quadratische Funktion")
index_add("r.l", "en", "0", "12",
"maths/integral/defined/quadratic function")
index_add("r.m", "de", "0", "11",
"Mathe/Finanz/Zinsen/Auf-, Abzinsen")
index_add("r.m", "en", "0", "11",
"maths/finance/interest/compound and discount")
index_add("r.n", "de", "0", "11",
"Mathe/Finanz/Investition")
index_add("r.n", "en", "0", "11",
"maths/finance/investment")
index_add("r.o", "de", "0", "10",
"Mathe/Lineare Gleichung/mit Brüchen")
index_add("r.o", "en", "0", "10",
"maths/linear equation/with fractions")
index_add("r.p", "de", "0", "10",
"Mathe/Log/mit Zahlen")
index_add("r.p", "en", "0", "10",
"maths/log/with numbers")
index_add("r.q", "de", "0", "10",
"Mathe/LGS/2x2")
index_add("r.q", "en", "0", "10",
"maths/linear system/2x2")
index_add("r.r", "de", "0", "10",
"Mathe/LGS/Kosten")
index_add("r.r", "en", "0", "10",
"maths/linear system/costs")
index_add("r.s", "de", "0", "10",
"Mathe/LGS/Mischen")
index_add("r.s", "en", "0", "10",
"maths/linear system/blending")
index_add("r.t", "de", "0", "10",
"Mathe/LGS/quadratische Kostenfunktion")
index_add("r.t", "en", "0", "10",
"maths/linear system/quadratic cost function")
index_add("r.u", "de", "0", "10",
"Mathe/Vektoren/Matrix/2x2 invertieren")
index_add("r.u", "en", "0", "10",
"maths/vectors/matrix/2x2 inversion")
index_add("r.v", "de", "0", "9",
"Mathe/Finanz/Prozentrechnung")
index_add("r.v", "en", "0", "9",
"maths/finance/percent")
index_add("r.w", "de", "0", "9",
"Mathe/Potenzen/mit Zahlen")
index_add("r.w", "en", "0", "9",
"maths/power/with numbers")
index_add("r.x", "de", "0", "9",
"Mathe/Potenzen/kürzen")
index_add("r.x", "en", "0", "9",
"maths/power/cancel")
index_add("r.y", "de", "0", "9",
"Mathe/Potenzen/Doppelbrüche")
index_add("r.y", "en", "0", "9",
"maths/power/compound fraction")
index_add("r.z", "de", "0", "10",
"Mathe/Quadratisch/Gleichung")
index_add("r.z", "en", "0", "10",
"maths/quadratic/equation")
available_langs = {'de',
'en'}
<file_sep>/chcko/r/q/en.html
%path = "maths/linear system/2x2"
%kind = chindnum["problems"]
%level = 10
\[
\begin{matrix}
{{chiven.A[0][0]}} x & {{chutil.sgn(chiven.A[0][1])}} & {{abs(chiven.A[0][1])}} y & = & {{chiven.b[0]}} \\
{{chiven.A[1][0]}} x & {{chutil.sgn(chiven.A[1][1])}} & {{abs(chiven.A[1][1])}} y & = & {{chiven.b[1]}}
\end{matrix}
\]
%chq(chames=['x =','y ='])
<file_sep>/chcko/r/cq/de.rst
.. raw:: html
%path = "Mathe/Operationen"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Eine grundlegende Operation ist die **Addition**.
Sie kann mengentheoretisch als Kardinalität der Summe zweier disjunkter Mengen gesehen werden:
`|M\cup N|=|M|+|N|` wenn `M\cap N = \emptyset`
Dies abstrakte Vorstellung kann auf alle konkreten Fälle angewandt werden, wenn
man eine Ressourceneinheit (Länge, Gewicht,...) mit einem Element identifiziert
und es keine Überlappung gibt, weil etwa die Ressource nicht doppelt vergeben
werden kann.
Man muss im konkreten die reale physikalische Gegebenheit berücksichtigen. So
addiert sich nicht die Länge von vertikal in ein Rohr eingeführte elastische
Stäbe wegen deren Elastizität und deren Gewicht. Das Volumen zweier
Flüssigkeiten kann durchmischt aber kleiner als die Summe sein, weil die
Ressource Volumen in der Mischung effektiver zugeteilt werden kann.
Der **Multiplikation** begegnet man zuerst bei der Flächenberechnung eines Rechtecks:
Man kann entweder alle Kästchen zusammenzählen
(Addition) oder eine Wiederholung (Multiplikation) von Reihen oder eine
Wiederholung von Spalten darin sehen (Kommutativität von `\cdot`). Wenn man
jedes Kästchen mit zwei Koordinaten `(i,j)` identifiziert, so zählt man mit der
Multiplikation die Kombinationen von `0 <= i <= a` und `0 <= j <= b` und erhält `a\cdot b`.
In einem Quadrat der Seitenlänge `a` hat man `a\cdot a` Einheitsquadrate, in
einem Würfel der Seitenlänge `a` hat man `a^3` kleine Würfel ... Man zählt
hier Kombinationen einer Menge M `n` mal mit sich selbst. `|M\times \dots
\times M| = |M|^n`. So kommt man zur **Potenz**.
- Die Multiplikation ist eine Kurzschreibweise der Addition.
Man sagt auch sie ist 2. Stufe.
- Die Potenz eine Kurzschreibweise der Multiplikation.
Man sagt auch sie ist 3. Stufe.
**Operation** ist ein anderer Name für eine **Funktion** (:lnk:`r.cr`),
wenn bestimmte Eigenschaften, wie **Assoziativegesetz** oder **Kommutativgesetz** erfüllt werden.
Eine **Operation** lässt sich gut mit einem Parameter zu einer neuen Menge {(Wert,Operation)}
vereinen (siehe Zahlen: :lnk:`r.ci` und `Lambda Kalkül <http://de.wikipedia.org/wiki/Lambda-Kalkül>`_)
Diese Mengen bestehen dann aus Anweisungen oder Vorgängen, oder besser **Operatoren**,
die dann auf etwas angewandt werden können. Z.B. 3m = (3,mal) Meter, d.h. 3 = (3,mal).
<file_sep>/chcko/r/a5/en.rst
.. raw:: html
%path = "maths/differential/introduction"
%kind = chindnum["texts"]
%level = 12
<!-- html -->
We will use the concepts:
- variable/value
- change
- velocity
Variable/Value
==============
English is a historical mix of two languages.
So there are two words of many things.
*Change*, for example, can also be called *vary*.
That's why something, that can change, is called a *variable*.
A variable assumes one *value* at a time, *exclusively*.
It does not need to be "at a time".
It could also be "at a place".
As a side-note:
The uniqueness is expressed by the word *function*:
We can say, the value is a *function* of this or that.
We can use a number to denote a *value*.
We could also use a word, but it is easier with a number.
Variable = {value1, value2, ...}
Position = {10m , 20m, ..., 120m, ...}
Change
======
We assume a gradual change.
Large changes:
The change is expressed with a difference: `-`.
Difference is abbreviated with a Greek D: Δ.
`Δy_1 = y_1 - y_0`.
The differences can be added to get the full extend of the variable.
We basically undo the difference (subtraction) by addition.
`y = Σ Δy_i = Δy_1 + Δy_2 + ...`.
Small changes:
We use ``d`` instead of ``Δ`` for very small changes.
`dy = (y + dy) - y`.
`d` is called *differential*.
With `dy` we use `∫` for sum,
and call it *integral*.
Velocity
========
How fast a value changes is again a variable.
It is called *speed* or *velocity* of change of that variable.
Velocity is relative.
To describe velocity of change of the value of one variable
we need *another variable* to compare it to.
Often this other variable is *time*, but it could be something else.
If there is no other variable specified, then it is implicitly *time*,
or better our time feeling, given by how fast our brain thinks.
Lets find the velocity by which you grow.
We have two variables:
1) Height `y`: The distance from the floor to the top of your head.
2) Age `x`: The number of years that have passed since your birth.
Differences:
The *average* velocity over `Δx` is found by *dividing* the differences:
`\tilde{v} = Δy/Δx`
Why *divide*?
*Because* then you can *multiply* to get back the difference:
`Δy = \tilde{v} Δx`
And you can sum the differences to get back the height:
`y = Σ Δy = Σ \tilde{v} Δx`
Differentials:
This velocity is at a specific `x` because `dx` is so small that we can neglect it.
`v = dy/dx`
and to get back y we sum the very many `v dx`
`y = ∫ dy = ∫ v dx`.
Velocity is used if `x` is time.
More generally, one calls it *derivative*:
- *derivative* of height `y` with respect to age `x`
Summary
=======
Change is expressed via differences,
- that `Δ` means *difference*
- that `d` means very small difference or *differential*
The symbol for sum is
- `Σ`, if summing differences
- `∫`, if summing differentials
Velocity is a quotient between two differences
- average over `Δx`: `\tilde{v}=\frac{Δy}{Δx}`
- at `x`: `v=\frac{dy}{dx}`
<file_sep>/chcko/r/q/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct
def chiven():
while True:
A = np.array(random.sample(list(range(1, 19)) + list(range(-19, -1)), 4))
A.shape = (2, 2)
try:
np.linalg.inv(A)
except:
continue
break
x = np.array(random.sample(list(range(2, 9)) + list(range(-9, -2)), 2))
b = np.dot(A, x)
A = A.tolist()
b = b.tolist()
g = Struct(A=A, b=b)
return g
def chalc(g):
iA = np.linalg.inv(np.array(g.A))
x = np.dot(iA, np.array(g.b))
return [i for i in x.round().tolist()]
<file_sep>/chcko/r/l/__init__.py
# -*- coding: utf-8 -*-
import random
import numpy as np
from chcko.chcko.hlp import Struct, norm_frac as chorm
def chiven():
r = sorted(random.sample(list(range(-9, -1)) + list(range(1, 9)), 2))
c = [-1, r[0] + r[1], -r[0] * r[1]]
g = Struct(r=r, c=c)
return g
def chalc(g):
p = np.poly1d(g.c)
p_i = np.polyint(p)
I = p_i(g.r[1]) - p_i(g.r[0])
return [I]
<file_sep>/chcko/r/dj/de.rst
.. raw:: html
%path = "Mathe/Trogonometrie"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
.. role:: asis(raw)
:format: html latex
.. contents::
Im folgenden Diagramm hat der Kreis den Radius 1.
Die Länge des Bogens diese Einheitskreises ist ein Maß für den **Winkel**,
das **Bogenmaß** genannt wird und die Einheit ``rad`` (Radiant) besitzt.
Der rechte Winkel (90°) ist `\pi/2`.
Im Allgemeinen:
.. math::
\frac{\pi}{180}\alpha[°] = \alpha[rad]
Wenn man den Winkel ändert, dann ändern sich auch die Längen, die mit
`\sin\alpha`, `\cos\alpha` und `\tan\alpha` beschriftet sind.
Diese Längen sind durch den Winkel bestimmt.
Anders gesagt: Diese Längen sind **Funktionen des Winkels**.
.. tikz:: \coordinate (O) at (0,0);
\coordinate (C) at ({2*cos(60)},{2*sin(60)});
\coordinate (P) at ({2*cos(60)},0);
\coordinate (D) at (2,{2*tan(60)});
\draw[black, very thin] (O) circle [radius=2];
\draw[red,thick] (2,0) arc [radius=2, start angle=0, end angle=60] node[midway,above]{\tiny $\alpha$};
\draw[blue,thick] (O) -- (C) node[midway,above]{\tiny $1$};
\draw[blue,thick] (P) -- (C) node[midway,right]{\tiny $\sin\alpha$};
\draw[blue,thick] (O) -- (P) node[midway,below]{\tiny $\cos\alpha$};
\draw[green,thick] (P) -- (2,0);
\draw[green,thick] (2,0) -- (D) node[midway,right]{\tiny $\tan\alpha$};
\draw[green,thick] (C) -- (D);
\draw[xshift=-1.1cm,yshift=-1cm] node[right,text width=2.2cm]
{ \tiny $\tan\alpha=\frac{\sin\alpha}{\cos\alpha}$\\$\sin^2\alpha+\cos^2\alpha=1$ };
.. admonition:: Ähnlichkeit = Affine Abbildung
Man kann dieses Diagramm auf die Größe eines tatsächlichen rechtwinkligen
Dreiecks skalieren und dieses Dreieck vollständig bestimmen, wenn man eine
Seitenlänge und einen Winkel desselben weiß.
Die Graphen der obigen **trigonometrischen Funktionen** sind die folgenden
.. tikz:: \begin{axis}
[
ymin=-1,ymax=1,
xmin=0,xmax=2*pi,
xtick=\empty,
ytick={-1,0,1},
extra x ticks={1.5708,3.1416,4.712,6.2832},
extra x tick labels={$\frac{\pi}{2}$, $\pi$, $\frac{3\pi}{2}$, $2\pi$},
every extra x tick/.style={
xticklabel style={anchor=north west},
grid=major,
major grid style={thick,dashed,red}
},
axis lines = center,
xlabel=$x$,ylabel=$y$,
enlargelimits=0.2,
domain=0:2*pi,
samples=100,
axis equal,
]
\addplot [green,thick] {tan(deg(x))} node [midway,left]{tan};
\addplot [red,thick] {sin(deg(x))} node [above]{sin};
\addplot [blue,thick] {cos(deg(x))} node [above]{cos};
\end{axis}
Einige Werte der Funktionen ergeben sich durch Berechnungen am gleichseitigen Dreieck
(`\pi/3`, `\pi/6`) oder am Quadrates der Seitenlänge 1 (`\pi/4`).
`\cos` ist symmetrisch: `\cos(-\alpha)=\cos\alpha`
`\sin` ist antisymmetrisch: `\sin(-\alpha)=-\sin\alpha`.
Alle trigonometrischen Funktionen haben eine Periode von `2\pi`: `sin|cos|tan(\alpha+2\pi)=sin|cos|tan(\alpha)`.
Weil die spitzen Winkel eines rechtwinkligen Dreiecks sich zu `\pi/2` addieren, gilt
.. math::
\sin(\pi/2 - \alpha)=\cos\alpha\\
\cos(\pi/2 - \alpha)=\sin\alpha
<file_sep>/dodo.py
# -*- coding: utf-8 -*-
'''
doit utility::
Do this after having changed an RST file.
Start this from chcko-x/chcko/::
$ doit -kd. html
Do this after having changed the header (path, kind, level) in a html or rst file::
$ doit initdb
Do this after any changes, especially in the main code::
$ doit test
$ doit cov
Do this to add new content, html or rst::
$ doit -kd. new
$ doit -kd. newrst
task_included is internal.
'''
import sys
import os
chckouninstalled = os.path.normpath(os.path.join(os.path.dirname(__file__),'..','chcko'))
if os.path.exists(chckouninstalled):
sys.path.insert(0,chckouninstalled)
from chcko.chcko import doit_tasks
doit_tasks.set_base(__file__)
task_included = doit_tasks.task_included
task_html = doit_tasks.task_html
task_initdb = doit_tasks.task_initdb
task_new = doit_tasks.task_new
task_newrst = doit_tasks.task_newrst
<file_sep>/chcko/r/t/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct
def chiven():
cf = random.sample(range(2000, 2500), 1)[0]
m1 = random.sample(range(30, 45), 1)[0]
m2 = random.sample(range(50, 70), 1)[0]
A = np.array([[m1 * m1, m1], [m2 * m2, m2]])
x1 = random.sample(range(-5, -1), 1)[0]
x2 = random.sample(range(300, 999), 1)[0]
x = np.array([x1, x2])
b = np.dot(A, x)
c1 = b[0] + cf
c2 = b[1] + cf
g = Struct(A=A.tolist(), b=b.tolist(), cf=cf)
return g
def chalc(g):
iA = np.linalg.inv(np.array(g.A))
x = np.dot(iA, np.array(g.b))
x = x.tolist()
return [i for i in (x + [g.cf])]
<file_sep>/chcko/r/cm/de.rst
.. raw:: html
%path = "Mathe/Stukturen/Ring"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
*Ringartige algebraische Strukturen* bauen auf
gruppenartige Strukturen (:lnk:`r.cl`) auf
und bestehen aus einer Menge `M` mit
zwei binären Verknüpfung `+` und `\cdot` , kurz `(M,+,\cdot)`.
- `(M,+)` und `(M,\cdot)` sind Monoide und es gilt `0\cdot a = 0`
`\rightarrow` **Halbring**.
- `(M,+)` ist eine kommutiative Gruppe
`\rightarrow` **Ring**.
- In `(M,\cdot)` gibt es keine zwei Zahlen, deren Produkt 0 ist.
Nullteilerfrei `\rightarrow` **Integritätsring**.
- `(M\setminus\{0\},\cdot)` ist eine kommutative Gruppe
`\rightarrow` **Körper**.
- `(M,\cdot)` erfüllt die Jakobi Identität `a\cdot (b \cdot c) + c\cdot (a \cdot b) + b\cdot (c \cdot a) = 0`
`\rightarrow` **Lie Ring**.
- `(M,\cdot)` ist idempotent
`\rightarrow` **Boolsche Algebra**.
<file_sep>/chcko/r/cb/en.html
%path = "maths/vectors/straight line"
%kind = chindnum["problems"]
%level = 11
The vector \(\begin{pmatrix}{{chiven.x[0]}}\\{{chiven.x[1]}}\end{pmatrix}\) is orthogonal
to the line \({{chiven.x[0]}}x_1{{chutil.sgn(chiven.x[1])}}{{abs(chiven.x[1])}}x_2={{chiven.b}}\).
The line can be described with the dot product
\(\begin{pmatrix}{{chiven.x[0]}}\\{{chiven.x[1]}}\end{pmatrix}\begin{pmatrix}x_1\\x_2\end{pmatrix} =
\begin{pmatrix}{{chiven.x[0]}}\\{{chiven.x[1]}}\end{pmatrix}\begin{pmatrix}v_1\\v_2\end{pmatrix}
={{chiven.b}}\).
<br>
\(v_1=0\)
<br>
\(v_2=\)
%chq()
<br>
Sketch
\(\begin{pmatrix}{{chiven.x[0]}}\\{{chiven.x[1]}}\end{pmatrix}\left(\begin{pmatrix}x_1\\x_2\end{pmatrix} -
\begin{pmatrix}v_1\\v_2\end{pmatrix}\right)
={{chiven.b}}\) für ein \(\vec{x}\not=\vec{v}\).
<file_sep>/chcko/r/v/__init__.py
# -*- coding: utf-8 -*-
import random
from chcko.chcko.hlp import Struct
def chiven():
r, s = random.sample(range(1, 9), 2)
m = random.sample(range(18, 21), 1)[0]
u = random.sample(range(700, 900), 1)[0]
g = random.sample(range(30, 50), 1)[0]
e = 1.0 * u / (1 + g / 100.0)
v = 1.0 * u * (1 - r / 100.0) * (1 - s / 100.0) * (1 + m / 100.0)
g = Struct(r=r, s=s, m=m, v=v, e=e)
return g
def chalc(g):
u = g.v / (1 - g.r / 100.0) / (1 - g.s / 100.0) / (1 + g.m / 100.0)
n = 100.0 * (u - g.e) / g.e
return [u, n]
<file_sep>/chcko/r/r/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct
def chiven():
A = np.array([1, 1] + random.sample(range(200, 1000), 2))
A.shape = (2, 2)
A = np.transpose(A)
x = np.array(
random.sample(
range(
500,
1000),
1) +
random.sample(
range(
2,
20),
1))
b = np.dot(A, x)
g = Struct(A=A.tolist(), b=b.tolist())
return g
def chalc(g):
iA = np.linalg.inv(np.array(g.A))
x = np.dot(iA, np.array(g.b))
return [i for i in x.round().tolist()]
<file_sep>/chcko/r/ba/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct, norm_rounded as chorm
__all__ = ['chiven', 'chorm', 'chalc', 'g_fn', 'g_fs']
g_fn = [lambda a1, q, n:a1 + (n - 1) * q, lambda a1, q, n: a1 * q ** (n - 1)]
g_fs = [lambda a1,
q,
n:n * (2 * a1 + (n - 1) * q) / 2,
lambda a1,
q,
n: a1 * (q ** n - 1) / (q - 1)]
def chiven():
rr = randrange(2) # 0 arithmetic, 1 geometric
if rr == 0:
a1 = randrange(-9, 9)
q = randrange(1, 9)
else:
a1 = randrange(-9, 9)
q = 1.0 + (1.0 * randrange(1, 9) / 100)
g = Struct(rr=rr, a1=a1, q=q, n=randrange(4, 9), N=randrange(20, 60))
return g
def chalc(g):
return [g_fn[g.rr](g.a1, g.q, g.n), g_fs[g.rr](g.a1, g.q, g.N)]
<file_sep>/chcko/r/x/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy.abc import a, b, c, d, e, f, g, h, i, j, k, m, n, p, q, r, s, t, u, v, w, x, y, z
from sympy import sstr, simplify
from chcko.chcko.hlp import Struct, equal_0 as chequal
syms = [a, b, c, d, e, f, g, h, i, j, k, m, n, p, q, r, s, t, u, v, w, x, y, z]
syml = 'abcdefghijkmnpqrstuvwxyz'
def chiven():
bn = random.sample(syml, 3)
bd = bn[:]
random.shuffle(bd)
en = random.sample(range(-9, 9), 3)
ed = random.sample(range(-9, 9), 3)
g = Struct(bn=bn, bd=bd, en=en, ed=ed)
return g
def chalc(g):
nm = 1
for i, ae in enumerate(g.en):
nm = nm * simplify(g.bn[i]) ** ae
for i, ae in enumerate(g.ed):
nm = nm / simplify(g.bd[i]) ** ae
return [sstr(simplify(nm))]
chorm = lambda x: x
<file_sep>/chcko/r/d/de.html
%path = "Physik/Elektrizität/Kondensator"
%kind = chindnum["Übungen"]
%level = 12
%include('r.e')
<br>
a) Wie hängt die Ladung Q von der Zeit t ab?
\(Q(t)=\)
%chq(0)
<br>
Der Widerstand, über den die Ladung abfließt, ist R={{chiven.r}}\(k\Omega\).
Zum Zeitpunt t=0s misst man am Kondensator eine Spannung von {{chiven.u}}V,
und bei t={{chiven.t}}s ist die Spannung {{round(chiven.u2,2)}}V.
<br>
b) Bestimme die Kapazität des Kondensators? \(\frac{C}{μF}=\)
%chq(1)
<br>
c) Wie groß ist die Ladung bei t=0s? \(\frac{Q_o}{As}=\)
%chq(2)
<br>
d) Wie groß ist die Ladung bei t={{chiven.t}}s? \(\frac{Q_t}{As}=\)
%chq(3)
<br>
e) Nach wieviel Sekunden ist der vollgeladene Kondensator nur mehr halbvoll?
%chq(4)
<file_sep>/chcko/r/bx/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from math import pi, sin, cos
from chcko.chcko.hlp import Struct
def chiven():
c = randrange(4, 20)
alpha = randrange(10, 80)
g = Struct(c=c, alpha=alpha)
return g
def chalc(g):
res = 0.5 * sin(pi * g.alpha / 180) * cos(pi * g.alpha / 180) * g.c ** 2
return [res]
<file_sep>/chcko/r/dg/de.rst
.. raw:: html
%path = "Physik/Schaltkreise/Thevenin"
%kind = chindnum["Beispiele"]
%level = 13
<!-- html -->
.. role:: asis(raw)
:format: html latex
Wir werden die Übertragungsfunktion eines Bandsperre-Filters mit der Methode von Thevenin ermitteln.
Wir ziehen dazu einen Schaltkreis aus
`Op Amps for EveryOne (5-10) <http://www.ti.com/lit/an/slod006b/slod006b.pdf>`_.
heran.
.. texfigure:: r_dg_c1.tex
:align: center
Das Eingangssignal ist gegenüber Erde. Wir zeichnen die Schaltung neu, um das widerzuspiegeln.
.. texfigure:: r_dg_c2.tex
:align: center
Wir suchen `G=\frac{V_o}{V_i}`.
`V_o` ist die Spannung beim rechten Widerstand. Wir werden dort das Thevenin-Äquivalent berechnen.
.. texfigure:: r_dg_c3.tex
:align: center
Wir finden die Ströme mittels der Kirchhoff'schen Regeln.
Es gibt zwei Schleifen, wo Strom fließt.
Es gibt keinen Strom bei dem Widerstand, wo wir den Stromkreis geöffnet haben.
.. texfigure:: r_dg_c4.tex
:align: center
Die resultierenden Gleichungen sind
.. math:: \begin{array}{l l l}
V_i - I_2 R - \frac{I_2}{iwC} - I_1 R & = 0\\
V_i - \frac{I_1 - I_2}{iwC} - I_1 R & = 0
\end{array}
Diese lösen wir nach `I_1` und `I_2`:
.. math:: \begin{array}{l l}
I_1 &= \frac{\omega C V_i (-2 i+C R \omega)}{-1-3 C R i \omega+C^2 R^2 \omega^2}\\
I_2 &= -\frac{i \omega C V_i}{-1-3 C R i \omega+C^2 R^2 \omega^2}
\end{array}
Bei die kleine Schleife bei `V_{th}` berechnen wir mit den bekannten Strömen
.. math:: V_{th}=\frac{I_2}{iwC} + I_1 R
Als nächstes brauchen wir die Thevenin-Impedanz. Dafür entfernen wir `V_i`
und berechnen die Impedanz wie sie von `V_{th}` aus gesehen wird.
.. texfigure:: r_dg_c5.tex
:align: center
Wir zeichnen die Schaltung neu, um besser zu sehen, was parallel und was seriell ist.
.. texfigure:: r_dg_c6.tex
:align: center
Dann erhalten wir
.. math:: Z_{th}=\left(\frac{1}{i \omega C}+\frac{R \frac{1}{i \omega C}}{R+\frac{1}{i \omega C}}\right) || R =
\frac{R (1+2 i \omega C R)}{1+3 i \omega C R - C^2 R^2 \omega^2}
und
.. math:: V_o = \frac{R}{R + Z_{th}}
Und endlich bekommen wir die **Übertragungsfunktion**:
.. math:: G = \frac{V_o}{V_i} = \frac{(-i+C R \omega)^2}{-2-5 i \omega C R+C^2 R^2 \omega^2}
= \frac{(1+i \omega C R)^2}{2+5 i \omega C R-C^2 R^2 \omega^2}
= \frac{(1+ s\tau)^2}{2+5 s\tau+(s\tau)^2}
Hier haben wir `\tau=R C` und `s=i \omega` gesetzt, wie es für Filter üblich ist.
Beim Nenner kann man für `s\tau` die Nullstellen berechnen (-4.56,-0.44).
Das Produkt der Nullstellen ist 2. `G` können wir damit so schreiben
.. math:: G = \frac{(1+ s\tau)^2}{2(1+\frac{s\tau}{0.44})(1+\frac{s\tau}{4.56})}
<file_sep>/chcko/r/bc/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from math import log
from chcko.chcko.hlp import Struct, norm_frac as chorm
def chiven():
g = Struct()
g.C0 = randrange(20, 100) * 1000
g.i = randrange(20, 40) / 10.0
g.n = randrange(5, 10)
g.r = randrange(300, 400)
return g
def chalc(g):
q = (g.i / 100.0 + 1) ** (1.0 / 12)
m = 12 * g.n
Q = lambda n: q * (q ** n - 1) / (q - 1)
monthlyrate = g.C0 * q ** m / Q(m)
mm = -log(1 - g.C0 * (q - 1) / q / g.r) / log(q)
fullmonths = int(mm)
Cfullrate = g.r * Q(fullmonths)
restrate = g.C0 * q ** fullmonths - Cfullrate
restrate = q * restrate
return [monthlyrate, fullmonths, restrate]
<file_sep>/setup.py
import os
import io
import setuptools
from pathlib import Path
package_root = os.path.abspath(os.path.dirname(__file__))
# # maybe not all installed in system
# # done in Makefile
# from doit.cmd_base import DodoTaskLoader
# from doit.doit_cmd import DoitMain
# def doit_run():
# loader = DodoTaskLoader()
# loader.setup(
# dict(
# dodoFile=os.path.join(package_root,'dodo.py')
# ,cwdPath=os.path.join(package_root,'chcko','r')
# ,seek_file=True
# ))
# DoitMain(loader).run(['-kd.', 'html'])
# DoitMain(loader).run(['-kd.', 'initdb'])
def main():
# doit_run()
os.chdir(package_root)
try:
import shutil
shutil.rmtree('build')
except:
pass
proot = Path(package_root)
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
setuptools.setup(
name="chcko-r",
version = "1.3.2", # keep same a chcko
description="A random mix of exercises for chcko",
long_description=readme,
long_description_content_type="text/x-rst",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/chcko/chcko-r",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
"Topic :: Internet",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Topic :: Education",
"Topic :: Education :: Computer Aided Instruction (CAI)"
],
packages=setuptools.find_namespace_packages(),
include_package_data=True,
namespace_packages=["chcko"],
install_requires=['schemdraw','pint'],#don't ["chcko"], because on gcloud chcko is uploaded and not installed
extras_require={},
zip_safe=False,
)
if __name__ == "__main__":
main()
<file_sep>/chcko/r/u/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct, norm_frac as chorm
def chiven():
while True:
A = np.array(random.sample(range(-5, 6), 4))
A.shape = (2, 2)
try:
m = np.linalg.inv(A)
A = np.linalg.inv(np.array(m))
except:
continue
break
g = Struct(m=m.tolist())
return g
def chalc(g):
A = np.linalg.inv(np.array(g.m))
A = A.round()
r = [A[0, 0], A[0, 1], A[1, 0], A[1, 1]]
return [i for i in r]
<file_sep>/chcko/r/a1/en.rst
.. raw:: html
%path = "physics/S=E*t"
%kind = 1
%level = 12
<!-- html -->
A system can be seen as one variable.
A system *state* is a *value* of the variable.
**Entropy** is the information of a variable.
Entropy is the *number of values* of the variable.
What value (or state) means is of no importance.
It has been abstracted away.
Only the number counts here.
The values must actually occur.
An elementary change is a value selection.
**Energy** is the *number of values per time*.
.. math::
S = Et
One can see it the other way around:
Energy brings time into existence by the selections of values.
If the system is composed of many independent variables,
this produces a system time resolution,
that does not exist in any of the independent variables.
A constant amount of energy can either result in
- a few states cycling fast: `\Delta S` and `\Delta t` small
- a lot of states cycling slow: `\Delta S` and `\Delta t` large
A system often consists of layers.
Assuming an ideal gas,
the energy `Q=TS` is given by:
- the temperature `T`: the average kinetic energy of one particle
- the entropy `S`
This divides the system in two layers:
- `T` encodes information events (energy) of a layer below.
- `S` counts the events in the current layer.
The logarithm in entropy comes up,
when we distribute the information
to more variables of the same kind (e.g. the bit).
In the other direction this is the reason for the exponent `e^S`.
The particle's direction of motion partitions the number `N` of particles
- by direction: factor `1/2`, since exclusive
- by orientation: factor `3`,
since `T`, through averaging,
is acting on all three orientations simultaneously
Therefore:
.. math::
Q = ST = 3/2NkT = 3/2RT = 3/2pV
For an ideal gas the inner energy is equal to the work done on the surrounding `3/2 pV`.
The average energy per particle `E` is:
.. math::
E = 1/2 m v^2 = 3/2 kT
Boltzmann's constant `k` is a conversion factor of units of energy.
`v^2` can be related better to micro events per time than `T`,
but also `E` is only the energy in that layer and not the ultimate
unit of information event per time.
The ultimate information event is given by the Planck constant.
The sum of all such events create space and time: E-t, x-v, ...
<file_sep>/chcko/r/di/en.rst
.. raw:: html
%path = "maths/numbers/C"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
.. contents::
The Complex Numbers `\mathbb{C}`
--------------------------------
The real numbers (:lnk:`r.ci`) are more than just a count.
- There is also direction (`+` or `-`). Now we will extend this to all directions in a plane.
- In `\mathbb{R}`` we invert the direction with multiplication by `-1`. We extend this to rotation to any direction.
In `\mathbb{R}` `x^2` only assumes positive values. The equation `x^2+1=0` does not have a solution.
So we invent a "number" `i` that satisfies `i^2=-1`.
`i` is called **imaginary unit** and its multiples are called imaginary
numbers. `i` is like apple or orange. It has nothing to do with
the unit `1`. The imaginary numbers are orthogonal to `\mathbb{R}`,
which means you can choose from these two sets independently.
All combinations form a 2-dimensional space,
i.e. a plane, the **complex plane**.
`z = a + ib \in \mathbb{C}`
is also a two-dimensional vector: 2 orthogonal directions that can be added independently.
There are two representations
- `z = a+ib`, i.e. via the components or
- `z = r(\cos\varphi + i\sin\varphi)` via modulus `r` and artument `\varphi` (angle, phase) in radiants.
Now consider the following:
- `i\cdot 1 = i`, i.e. multiplication with `i` make 1 to `i`, which is orthogonal to 1,
the **real unit**. This is a rotation by a right angle.
- `i\cdot i = -1`. Again a rotation by a right angle.
Generally: Multiplication with `i` produces a rotation by the right angle.
Since two multiplications (`x^2`) are supposed to invert (rotate by `\pi`)
one multiplication should rotate by half of it (`\pi/2`).
When multiplying exponentials, the exponent gets added.
This gives a hint that there could be a representation that has the angle in the exponent.
In trigonometric addition formulas (e.g. `\cos(\alpha+\beta)=\cos\alpha\cos\beta-sin\alpha\sin\beta`),
multiplication adds the angles.
Finally developing `\sin` and `\cos` into a Taylor series and comparing with the `e^x` series
leads to the **Euler Formula**:
- :inl:`r.cy`
`z=re^{i\varphi}` is a usual way to represent complex numbers.
About `\sin` and `\cos` we know that the period is `2\pi`, therefore this is
true for `e^{i\varphi}`. The nth root divides the period up to `2n\pi` to
below `2\pi` and so we have `n` different roots.
.. math::
z^{1/n}=r^{1/n}e^{i(\varphi/n+2k\pi/n)}
More generally:
In `\mathbb{C}` every polynomial of degree n has exactly n roots
(**fundamental theorem of algebra**), if one counts the multiplicity
of roots. `\mathbb{C}` therefore is called **algebraically closed**.
This means that not only `x^2`, but every polynomial maps the whole
`\mathbb{C}` to the whole of `\mathbb{C}`.
.. admonition:: Note
In function theory one learns that this can be extended to all functions
that are infinitely often differentiable (analytic or holomorphic) in all of `\mathbb{C}`
(entire functions), because they can be developed into a Taylor series.
Further properties:
- a = Re(z) is the real part
- b = Im(z) is the imaginary part
- `\bar{z}=re^{-i\varphi}=a-ib` is the complex conjugate of z. `\bar{z^n}=\bar{z}^n`.
`z_1\bar{z_2}` combines in itself dot product (`Re(z_1\bar{z_2})=r_1r_2\cos\Delta\varphi`)
and vector product (`Im(z_1\bar{z_2})=r_1r_2\sin\Delta\varphi`).
- `|z| = \sqrt{z\bar{z}} = \sqrt{a^2+b^2} = r` is the absolute value (modulus) of z.
The square over the length of a complex number independent of direction
is given by `z\bar{z}` and not by `z^2`.
- `φ = arg(z)` is the argument (phase) of z.
- `arg(z_1z_2)=arg(z_1)+arg(z_2)`
- `arg(\frac{z_1}{z_2})=arg(z_1)-arg(z_2)`
Applications for `\mathbb{C}`
-----------------------------
Since `\mathbb{C}` is a extension of `\mathbb{R}`,
one can do everything with `\mathbb{C}` that you can do with `\mathbb{R}`.
The essentially new is that `\mathbb{C}` includes all directions not just `+` and `-`.
What is a direction?
:inline:`r.ct`
The complex numbers are used in physics and technology in connection with vibrations and waves
and there are many of them:
- mechanics/solid state physics: water waves, acoustic waves, elastic waves, ...
- Electricity: alternate current, alternate current circuits (resistance, capacity and inductance),...
- Electrodynamics: Electromagnetic waves (light, radio), ...
- Optics: Light, ...
- Quantum dynamics: particle waves, ....
Basically applications of complex numbers are due to
- the fact that unrestricted calculation is possible in `\mathbb{C}` and
- further results in function theory.
Many physical systems are described with differential equations.
These can be reduced to polynomial and then one gets complex numbers as roots.
<file_sep>/chcko/r/bu/__init__.py
# -*- coding: utf-8 -*-
from random import sample
from chcko.chcko.hlp import Struct, norm_int as chorm
def chiven():
a, c = sample(list(range(2, 9)) + list(range(-9, -2)), 2)
x = sample(list(range(2, 9)) + list(range(-9, -2)), 1)[0]
b = (c - a) * x
g = Struct(a=a, b=b, c=c)
return g
def chalc(g):
res = g.b / (g.c - g.a)
return [res]
<file_sep>/chcko/r/h/en.html
%path = "maths/vectors/matrix/stepwise production"
%kind = chindnum["problems"]
%level = 11
For the production of a product X it needs {{chiven.A[1][0]}} W(orking hours), {{chiven.A[1][1]}} P1 (Part 1)
and {{chiven.A[1][2]}} P2.
For the production of a product Y it needs {{chiven.A[2][0]}} W, {{chiven.A[2][1]}} P1
and {{chiven.A[2][2]}} P2.
A product Z needs {{chiven.Z[0]}} W and {{chiven.Z[1]}} X and {{chiven.Z[2]}} Y.<br>
Make a matrix equation and determine, how much the production of
{{chiven.m}} Z costs (K), if the costs of the needed resources are
W={{chiven.k[0]}}, P1={{chiven.k[1]}}, P2={{chiven.k[2]}}.<br>
K =
%chq()
<file_sep>/chcko/r/cg/en.rst
.. raw:: html
%path = "maths/vectors/transformation and inverse"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
Coordinate Transformation and Inverse Matrix
============================================
Though convenient it is not a necessity that the basis vectors are independent,
i.e. orthogonal.
As an example think of ingredients of cakes as a vector space (*ingedient vector space*).
Then every cake is a vector, an independent choice from more variables,
which in this case are the quantities for each ingredient (0 for not used at all).
The ingredients can be regarded orthogonal to each other. The context does not
ask for a detailed comparison. *The dot product is 0.*
Let's compare the cakes in detail though via their ingredients.
Cake A and cake B surely have ingredients in common.
So the unit vectors in the *cake vector space* are not orthogonal to each other
with this comparison. *The scalar product is not 0.*
A vector in the cake vector space (How many of each type of cake?) can be transformed
to the ingredient vector space by multiplying with a matrix.
Every column in the matrix is the recipe of one cake.
In a matrix and vector written as a set of numbers, every number means
something. What it means is coded by the position the number takes. This is
called position coding. The same we do with our number system where the units,
the tens, the hundreds,... have their own position.
In this example the cake space and ingredient space do not necessarily need to
have the same number of variables (number of variables = dimension).
We can have 10 ingredients and
3 types of cakes. Then the transformation matrix is 10x3 (10 rows, 3 columns).
Such a `m\times n` matrix with `m\not = n` cannot be inverted,
i.e. one cannot infer from the ingredients how many of each type of cake are baked.
Said differently: Not for every combination of ingredients there
is a combination of cakes (linear combination), which needs exactly that amount of ingredients.
If we fix a number for each cake in a smaller assortment of cakes
we use less information, i.e. we make fewer decisions,
than in the bigger space of ingredients.
.. admontition:: Pseudoinverse
A non-square matrix can be pseudo-inverted, though: Moore-Penrose Pseudoinverse.
For this example multiplying an ingredient vector with the pseudo-inverse
would produce a cake vector, which minimizes unused quantities of ingredients
(Method of Least Squares) or makes best use of the ingredients (Maximum Entropy Method).
If we change from one vector space to another with same dimensions,
then we can get back to the starting one by multiplying with the *inverse matrix*.
In order for the inverse to exist, in addition to being square, the
columns/rows of the matrix must be *linearly independent*. If not, then that
is like effectively being in a smaller matrix (*rank of matrix*). For the
cake example this means that every type of cake must have a different combination of
ingredients, which is some extra information that distinguishes it from the
others and that can be used to code something.
.. admonition:: Linear Independence
A square matrix can be inverted, if columns (rows) cannot be expressed as
linear combination of the others, i.e. the rank of the matrix is equal to
its dimension.
One can calculate the inverse of a square Matrix by:
- leaving out the `ij` cross and calculate the determinant = Minor `M_{ij}`
- change the sign, if `i+j` is odd
- transpose, i.e. mirror at the main diagonal
(compare below: `ij` for `A` and `ji` for `M`)
- divide everything by the determinant
Short:
.. math::
(A^{-1})_{ij} = \frac{1}{det(A)}(-1)^{i+j} M_{ji}
Normally we write `\frac{1}{det(A)}` in front of the matrix,
but it can be multiplied with every number of the matrix.
For a *2x2 Matrix* `M_{ij}` is the diagonally opposite number. Because of the
transposing the numbers left bottom and right top (secondary diagonal) stay
where they are, but the sign changes. At the main diagonal the numbers get
swapped, but since `i+j` is even the sign does not change.
- Main diagonal `\rightarrow` sign stays
- Secondary diagonal `\rightarrow` position stays
<file_sep>/chcko/r/cp/en.rst
.. raw:: html
%path = "maths/maths+information"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
Computer and Mathematics
------------------------
We humans process information and also have abstracted the rules
by which this is done. The result is mathematics.
The computer processes information by applying these rules.
It can even be used to find new rules, new paths, new proofs.
Computer science in this sense is a part of mathematics,
now with many extensions which are specifically applicable
to the computers of our times.
Since we have the computer now,
we will not only let it calculate,
but gradually use it for all kinds of formalized thought processes.
Autonomous learning, abstracting and synthesizing (creativity),
can also be taken over by computers,
although still in its infancy.
The computer not only makes calculations unbelievably fast,
but has the potential to take over most of the thought tasks,
we humans are capable of.
Mathematics deals with information processing.
But what is that?
Information
-----------
The communication between two humans, in which information is exchanged,
can be broken down to an elementary process.
This process consists of
- a set, one can choose elements from (choice)
- the choosing process (decision, ...)
The selection is a repeated process.
All selections make up a **mapping**.
An element can be chosen exclusively only, and every element gets its turn.
.. admonition:: Note
The smallest set one can still choose from has 2 elements.
That is why the **bit** is the smallest unit of information.
The sender chooses concepts in his head, maps them to words,
maps them to phonetic combinations or sign combination and sends
them over a medium (air, paper) to the receiver.
The receiver processes in the opposite direction.
Phonemes, letters and digits are there to create a multitude (words, numbers)
to which concepts can be mapped (coding).
.. admonition:: Note
With numbers one can choose everything
one can choose with words. One way is to put all concept into a
sequence and let the number choose the position.
Words in our brain are used associatively,
but that is possible with numbers, too.
Basically all dynamic systems function with the above elementary selection processes.
There are always sets and choices.
Examples:
- Biology: Variants via mutation and selection through the natural environment,
i.e. the other individuals and the habitat.
- Economy: supply and demand
- Society: organizations and their success
- Politics: politicians and voters
- Science: theories and their usefulness to describe phenomena
- Ideas and their supporters
- random thoughts in our brain and checking via experience stored in the brain
- ...
Since quantum mechanics we know that randomness is an inherent principle of nature,
that not everything is predetermined, but that selection processes do really
create new combinations, fleeting ones and staying ones.
Nature processes information by distributing states
The universe can thus be compared with living systems like us.
Both function with the same abstract principle of information processing.
Energy, Entropy
...............
:inline:`r.cv`
:inline:`r.a1`
Mathematics and Information
---------------------------
We have recognize the elementary process of information (transport) to
be the selection which consists of
- set
- choosing
Let's connect this better with mathematics?
Sets and Variables
..................
The set we find in the set theory, the foundation of mathematics.
To choose elements from a set one can reserve a bit for every element
and code a selection via 0 (not used) and 1 (used).
One can do with less bits, if one first finds out, which elements
exclude each other.
A set from which an element is chosen exclusively, is a variable.
The element is the value.
.. admonition:: Variable
Variablen bestehen aus Werten.
With variable we mean the real thing, not a placeholder for a number.
Only via a mapping, e.g. by comparing to a unit, the value is linked to a
number (coordinate).
Mathematics describes reality by finding variables
and then dependencies between them.
Strukture
.........
An important aspect of information processing is abstraction. By comparing one
finds common patterns and these are used to build a smaller encoding
(description) and to recognize later. In programming there is an important
guideline: Don't Repeat Yourself (DRY). That's how mathematics does it as
well. Patterns that repeat are described abstractly and concrete objects become
examples of these structures (group, ring, field, ...).
This makes information exchange (communication) more effective. Instead of
repeatedly describe, e.g. the rules for `+` in `\mathbb{Z}` and `V` and so on
one can say `(\mathbb{Z},+)` and `(V,+)` and so on are groups.
Mathematics as science builds a structure as a whole by successively adding new
concepts and theorems. New works build on these concepts, choose what to work
on, and thus extend the structure.
.. admonition:: Struktur
This successive extension and sophistication of structure
is a general developement of dynamic systems (evolution):
biological evolution, economy, ... (see above).
In order for complex and lasting structure to develop
energy must be supplied in the right dosage.
For the economy this is the money.
Algorithmics
............
To determine (choose) a value of a variable, mathematics uses the **function**.
This is also the name in informatics, but with a slightly different meaning,
Other names are subprogram, subroutine, procedure, ...
:inline:`r.cw`
The value of one variable can depend on the values of more other variables.
Function therefore have more parameters. They are called **formal parameters**
in the definition of the function. A first selection process there is already
when choosing the **actual parameters** to correspond to the formal ones when
calling the function.
Functions in programming languages do not necessarily return the value of a variable,
but it is a good design to name variables and the elementary
dependencies via functions in order to clearly separate them from others.
Functions consist of calls to other functions
- Successive calls are a **sequence**.
- Decisions, which calls to do under which conditions, are **branches**.
- Repeated execution of blocks of calls are a **loop**
This algorithmics is hidden everywhere in mathematics behind numbers,
expression and symbols (polynomials, `\sqrt{}`, `\lim`, `\int`, ...) in
theorems and proofs. The whole of mathematics can be regarded as a huge
program, but unfortunately still most of it in a language that the computer
does not understand.
.. admonition:: Representation
A big challenge is to convert the representation of mathematics
into a language the computer can understand.
There are many computer languages, some especially made for mathematics.
Many different representations increase the effort
and reduce the applicability.
The same is true for human languages.
.. admonition:: Numbers
When introducing the number systems algorithmic aspects are combined
with the count (the natural number):
- Count with + and -: Integers
- Count with * and /: Rationals
Then we can regards
- Numbers as elementary instruction (add 2, subtract 2, ...)
- analytic expressions as programs/functions
- term simplification as program simplification, profiling
.. admonition:: Equations
The equation is a function that returns the result of a comparison.
Equivalent transormations of an equation is a kind of profiling, too.
Equations and inequalities are used to implicitly define sets.
<file_sep>/chcko/r/bj/__init__.py
# -*- coding: utf-8 -*-
import random
from math import log
from chcko.chcko.hlp import Struct
def chiven():
g = Struct()
g.pc = random.randrange(10, 90)
return g
def chalc(g):
t = -log(g.pc / 100.0) * 5730 / log(2)
return [t]
#import sympy
#from sympy.abc import x
#th = 5730
# lambda = sympy.ln(2)/th
# lambda.n()
# c14 = sympy.exp(-lambda*x)
#nc14 = lambda v:sympy.N(c14.subs(x,v))
# nc14(5730)
#t = -log(g['pc']/100.0)*5730/log(2)
#
<file_sep>/chcko/r/ch/de.rst
.. raw:: html
%path = "Mathe/Folgen und Reihen"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
Folgen und Reihen
-----------------
Eine **Folge** ist eine Funktion der natürlichen Zahlen.
Die natürliche Zahl ist eine Methode, um auf die Glieder der Folge zu verweisen.
Sie heißt auch Index.
- `a_1` ist das erste Glied der Folge
- `a_2` ist das zweite Glied der Folge
- ...
- `a_n` ist das n-te Glied der Folge
Werden die ersten n Glieder einer Folge aufsummiert,
dann ist das das n-te glied der Summenfolge oder **Reihe**.
Der Begriff *Funktion* bedeutet:
Weiß ich das wievielte Glied, dann weiß ich die Zahl dort.
Viele Folgen haben eine Regelmäßigkeit, die es erlaubt sie viel kürzer zu beschreiben
(Kurze Beschreibung = geringe Komplexität).
arithmetische Folge
...................
Bei der arithmetischen Folge ergibt sich ein Glied der Folge aus dem
vorhergehenden durch Addieren einer gleichbleibenden Zahl.
`a_{n+1} = a_n + d`.
Das ist die **rekursive** Darstellung der arithmetischen Folge.
Um zum n-ten Glied zu kommen, wiederholt man das n-1 mal:
`a_n = a_1 + (n-1) d`
Das ist die **Termdarstellung** der arithmetischen Folge.
.. admonition:: Hinweis
In vielen Programmiersprachen wird bei 0 gestartet, da man dann `nd` hat, statt `(n-1)d`.
Um eine gegebene Folge als arithmetische Folge zu erkennen, schaut man, ob die
Differenz aufeinanderfolgender Glieder gleich bleibt.
arithmetische Reihe
...................
Betrachtet man die Summe die ersten n Glieder, dann kann man eine Regelmäßigkeit erkennen,
und solche sind immer Anlass für einfachere Berechnungsmethoden.
Betrachtet man obige Termdarstellung, kann man erkennen, dass wenn man vom
Anfang der Folge startet immer d dazu kommt, wenn man aber vom letzten (n-ten)
Glied der Folge rückwärts geht immer d wegkommt. Diese Operationen heben sich auf.
Man kann deshalb Anzahl/2 mal die Summe vom ersten und letzten Glied machen.
`\sum_{k=1}^{n} a_k = \frac{n(a_1+a_n)}{2}`
Insbesondere ist `1+2+...n=\frac{(n+1)n}{2}`.
geometrische Folge
...................
Bei der geometrischen Folge ergibt sich ein Glied der Folge aus dem
vorhergehenden durch Multiplikation mit einer gleichbleibenden Zahl.
`a_{n+1} = a_n \cdot q`.
Das ist die **rekursive** Darstellung der geometrischen Folge.
Um zum n-ten Glied zu kommen, wiederholt man das n-1 mal:
`a_n = a_1 q^{n-1}`
Das ist die **Termdarstellung** der geometrischen Folge.
Um eine gegebene Folge als geometrisch zu erkennen, schaut man, ob der
Quotient aufeinanderfolgender Glieder gleich bleibt.
geometrische Reihe
...................
Betrachtet man
.. math::
\begin{matrix}
1+&q+q^2+...+q^{n-1}&=&S_n\\
&q+q^2+...+q^n&=&q S_n\\
\end{matrix}
so sieht man, dass viele Summanden gleich sind. Durch Subtraktion erhält man
`\sum_{k=1}^{n} q^{k-1} = 1 + q + ... + q^{n-1} = \frac{q^n-1}{q-1}=\frac{1-q^n}{1-q}`
<file_sep>/chcko/r/a0/de.rst
.. raw:: html
%path = "Mathe/Vektoren"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
Wenn man die Zutaten von einer Auswahl von Kuchenrezepten als Vektorraum auffasst,
dann ist jeder Kuchen `z` ein Vektor im *Zutatenvektorraum*,
d.h. wir wählen unabhängig für jede Zutat (Variable `z_i`).
Wir verwenden den Wert 0, wenn die Zutat nicht verwendet wird.
Wenn man nur die Kuchen betrachtet,
dann ist eine Auswahl daraus ein Vektor `k` im *Kuchenvektorraum*.
Jedes `k_j` ist die Anzahl der Kuchensorte `j`.
Will man von einer Auswahl von Kuchen auf die Zutaten kommen,
dann ist das eine **Koordinatentransformation**.
Um die Gesamtmenge `z_i` zu erhalten muss man die Anzahl von jeder Kuchensorte `k_j`
mit der jeweiligen Zutatmenge multiplizieren.
Das läuft auf eine Matrixmultiplikation hinaus.
`z = ZK \cdot k = \sum_j ZK_{ij}k_j`
In `ZK` ist jede Spalte ein Rezept,
d.h. die Zutaten (**Komponenten**) für den Kuchen `j`.
Um auf den Preis `p` im *Preisvektorraum* zu kommen,
multiplizieren wir wieder
`p = PZ \cdot z = PZ_{1i} z_i`
`PZ` ist eine Matrix mit einer Zeile.
Die Anzahl von Zeilen ist die Dimension des Zielvektorraumes.
<file_sep>/chcko/r/cq/en.rst
.. raw:: html
%path = "maths/operations"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
A very basic operation is the **addition**.
Addition can be seen as cardinality of the union of two disjoint sets.
`|M\cup N|=|M|+|N|` if `M\cap N = \emptyset`
This abstract view can be applied to concrete cases, if we identify the unit of
a resource (length, mass, ...) with an element and there is no overlapping,
because the resource can be dispatched only once.
In the application one must take the physical system into account though.
For example elastic rods inserted vertically into a tube might not add their
lengths due to their elasticity and weights.
Volumes of liquids when mixed might not add, because the molecules can
use the space more efficiently in the mixture.
**Multiplication** one first encounters when calculating the area of a rectangle.
One either can add all unit squares (Addition) or
one repeats (multiplication) the rows or the columns (commutativity of `\cdot`)
If we address every unit square with coordinates `(i,j)`, then multiplication
counts all combinations of `0 <= i <= a` and `0 <= j <= b`, which yields `a\cdot b`.
In a square of side length `a` one has `a\cdot a` unit squares, in a cube of
side length `a` one has `a^3` unit cubes ... Here we count combinations of a
set `M` `n` times with itself. `|M\times \dots \times M| = |M|^n`. Thus we
have come to the **power** operation.
- Multiplication is a shorthand for addition.
So multiplication is a second level operation.
- Power is a shorthand for multiplication.
Power therefore is a third level operation.
**Operation** is another name for **function** (:lnk:`r.cr`), if certain rules
like **assoziative law** or **commutative law** apply. An **operation** can
easily be made to a new set by combining it with a value {(value,operation)}.
(see Zahlen: :lnk:`r.ci` and `lambda calculus <http://en.wikipedia.org/wiki/Lambda_calculus>`_).
These are then sets of instructions or **operators**, which can be applied to
other values of other variables, e.g. 3m = (3,times) m, i.e. 3 = (3,times).
<file_sep>/chcko/r/ce/de.rst
.. raw:: html
%path = "Mathe/Funktionen/Log"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
Logarithmus
-----------
Will man vom Ergebnis des Potenzierens wieder zurück zu den Ausgangszahlen,
dann gibt es zwei Wege, nämlich zur Basis und zur Hochzahl.
Zur Basis kommt man durch Potenzieren mit dem Kehrwert, also `(3^2)^{\frac{1}{2}} = 3`,
auch Wurzelziehen genannt.
Zur Hochzahl kommt man durch den **Logarithmus**, also `\log_{3}(3^2)=2`.
Aus dem Rechnen mit Potenzen zu gleicher Basis,
z.B. `2^32^2=2^{3+2}`
und `\frac{2^3}{2^2}=2^{3-2}` ergibt sich, dass der Logarithmus aus
*mal* *plus* und aus *teilen* *minus* macht.
Aus Wiederholen von Multiplikation (Potenzieren)
wird dadurch Wiederholen von Addition (Multiplikation):
.. math::
\begin{matrix}
\log ab &= \log a + \log b \\
\log \frac{a}{b} &= \log a - \log b \\
\log b^c &= c\log b
\end{matrix}
Aus der letzten Regel ergibt sich, dass man den Logarithmus zu einer beliebigen
Basis wie folgt berechnen kann:
.. math::
b^x &= d \\
x &= \frac{\log d}{\log b}
Exponentialgleichungen, also Gleichungen, wo man den Exponent (die Hochzahl) sucht,
löst man am besten, indem man zuerst so lange umformt, bis man `b^x = d` hat,
dann wendet man auf beiden Seiten den Logarithmus an.
Der Logarithmus bezieht sich immer auf eine bestimmte Basis.
Wenn diese nicht angegeben ist, dann wird mit `\log` meistens die Basis 10 angenommen,
oft, etwa in Programmiersprachen, aber auch die Eulersche Zahl e=2.71828182846...
Es gilt:
.. math::
\log_{10} 10 = \log 10 = \text{lg} 10 = 1\\
\log_e e = \ln e = 1\\
\log_2 2 = \text{lb} 2 = 1\\
<file_sep>/chcko/r/bd/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from math import log
from chcko.chcko.hlp import Struct
def chiven():
g = Struct()
g.Kn = randrange(1000, 2000)
g.R = randrange(100, 200)
g.i = randrange(2, 9)
return g
def chalc(g):
r = log(g.Kn * g.i / 100.0 / g.R + 1) / log(1.0 + g.i / 100.0)
return [r]
<file_sep>/chcko/r/cd/de.rst
.. raw:: html
%path = "Mathe/Finanz/Zinsen"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
`K` Kapital
Ein Geldbetrag.
`i` Zinssatz
Die Zu- oder Abnahme von `K` wird in %=1/100 angegeben.
- Zinswert: `3\% K = 0.03 K`.
- Zunahme: `K + 3\% K = (1+3\%) K = 1.03 K`.
- Abnahme: `K - 3\% K = (1-3\%) K = 0.97 K`.
`n` Perioden (Jahre/Quartale/Monate/Tage)
Zum Zinsatz `i` gehört immer auch ein Zeitraum, in dem die Zunahme oder Abnahme stattfindet.
- `i` meint ein Jahr (Jahreszinssatz, Effektivzinssatz)
- `i_{12}` meint ein Monat
- `i_4` meint ein Quartal
Nach diesem Zeitraum ist K um den `iK` größer, also `K_{n=1} = K_0 (1+i) = K_0 q` (q = 1+i).
Zinseszinsen
Am Anfang eingelegtes Kapital ist nach einer Periode
(n=1) `K_{n=1} = K_0 (1+i) = K_0 q` wert, nach n=2 Perioden `K_0 q^2`, nach n=3 Perioden `K_0 q^3`...
Nach n Perioden:
- `K_n = K_0 q^n`
- Aufzinsen: Ein Kapital wird mit `q^n` multipliziert, um den Wert `n` Perioden später zu erhalten.
- Abzinsen: Ein Kapital wird mit `q^{-n}` multipliziert, um den Wert `n` Perioden früher zu erhalten.
Rente
Eine Rente ist eine Zahlung (Rate `r`) in regelmäßigen Zeitabständen (Perioden).
Die Periodenzahl für jede Rate ist anders.
Die Endwerte aller Raten aufsummiert ergibt die Rentenformel:
`R_n = \sum_{m=0}^{n-1} r_m = \sum_{m=0}^{n-1} r q^m = r \frac{q^n - 1}{q-1}`
Diese Formel kann man anwenden, um den Endwert `R_n` der Rente zu berechnen,
wenn die Rente am Ende jeder Periode gezahlt wurde (nachschüssig).
- Nachschüssige Rente: `R_n = r \frac{q^n - 1}{q-1}`
- Vorschüssige Rente: `R_n^v = q R_n`
**Barwert** der Rente ergibt sich durch Abzinsen des **Endwertes** `B_n = R_n q^{-n}`.
Unterjährige Rente
Um den unterjährigen Zinsatz mit dem Jahreszinssatz vergleichen zu können, muss man umrechnen.
Lineare Umrechnung, wenn in den Monaten oder Quartalen keine Verzinsung stattfindet:
- `i_{12} = i/12`
- `i_4 = i/4`
Konforme (äquivalente) Umrechnung bei unterjähriger Verzinsung:
- `i = (i_{12} + 1)^{12} - 1`
- `i = (i_4 + 1)^4 - 1`
Wenn ein Jahreszinssatz gegeben ist und eine monatliche Rente zu berechnen ist,
dann muss zuerst **auf den monatlichen Zinssatz umgerechnet** werden.
Rentenrest
Um zu beantworten, wieviel Kapital zu einem Zeitpunkt
während der Rentenlaufzeit noch übrig ist, zählt man den Rentenwert
bis zu diesem Zeitpunkt vom Kapitalwert an diesem Zeitpunkt ab.
Umrechnung einer Rente in eine andere
- Man ermittelt zuerst den Endwert der einen Rente `R_n`.
- Dieses `R_n` muss man dann zum Endwert der anderen Rente auf- oder abzinsen.
- Durch Verwendung der Rentenformel kann man dann die gesuchte Größe (`n`, `q`, `r`) der neuen Rente ausrechnen.
Kapitalvergleich, Angebotsvergleich
Um Gelder vergleichen zu können muss man sie auf den gleichen Zeitpunkt (etwa jetzt, also Barwert)
umrechnen. Umrechnen geht über Auf- oder Abzinsen oder unter Verwendung der Rentenformel.
<file_sep>/chcko/r/z/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy import Poly, solve
from sympy.abc import x
from chcko.chcko.hlp import Struct, norm_int, norm_frac
def chiven():
p = 0
x1 = x2 = 0
while p == 0 and x1 == x2: # p could become 0
x1, x2 = random.sample(list(range(2, 9)) + list(range(-9, -2)), 2)
p = -(x1 + x2)
q = x1 * x2
a = random.sample(list(range(2, 6)) + list(range(-5, -1)), 1)[0]
b = p * a
c = q * a
g = Struct(coef=[a, b, c])
return g
def chalc(g):
# g=chiven()
p = Poly(g['coef'], x, domain='ZZ')
x1, x2 = solve(p)
xs = 1.0 * (x1 + x2) / 2
ys = p(xs)
answers = [
norm_int(x1) +
', ' +
norm_int(x2),
norm_frac(xs) +
',' +
norm_frac(ys)]
return answers
def chorm(answers):
#answers = chalc(chiven())
a = answers[:]
res = a[0]
try:
sa = sorted([norm_int(aa) for aa in a[0].split(',')])
if len(sa) == 1:
res = sa * 2
else:
res = sa
except:
pass
a[0] = ','.join(res)
a[1] = ','.join([norm_frac(aa) for aa in a[1].split(',')])
return a
<file_sep>/chcko/r/cp/de.rst
.. raw:: html
%path = "Mathe/Mathe+Information"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
Computer und Mathematik
-----------------------
Wir Menschen verarbeiten Information und haben die Regeln dazu
abstrahiert und Gesetzmäßigkeiten entdeckt. Das Resultat ist
die Mathematik.
Der Computer verarbeitet auch Informationen und kann die
Ergebnisse der Mathematik anwenden. Er kann auch schon
genutzt werden, um neu Regeln zu entdecken,
oder um Wege, Beweise, zu bekannten Zusammenhängen zu finden.
Die Computerwissenshaft Informatik kann in der Hinsicht als Teilgebiet
der Mathematik angesehen werden, jetzt mit vielen Erweiterungen
welche speziell auf die Eigenheiten der Informationsverarbeitung
von Computern eingeht.
Weil der Mensch jetzt den Computer hat, werden nicht nur
Rechnungen sondern zunehmend formalisierte Denkprozesse dem Computer überlassen.
Auch Lernen, Abstrahieren und Synthetisieren (kreative Aufgaben)
kann der Computer übernehmen, wenn auch derzeit noch wenig.
Der Computer macht also nicht nur Rechnen unglaublich schneller,
sondern er hat das Potential alle Denkprozesse des Menschen zu übernehmen.
Die Mathematik beschäftigt sich mit Informationsverabeitung.
Aber was ist Information?
Information
-----------
Die Kommunikation zwischen zwei Menschen, bei der Information
ausgetauscht wird, kann auf einen elementaren Prozess reduziert werden:
den Auswahlprozess (Selektion, Entscheidung)
Es braucht eine
- Menge, aus der ausgewählt werden kann (Angebot, Auswahl, Selektion).
- Das Auswahlverfahren (Selektion, Entscheidung)
Die Auswahl (Selektion) ist ein wiederholter Vorgang.
Ein Element wird exklusiv ausgweählt (nur eins) und jedes Element kommt dran.
Die Summe aller Auswahlvorgänge ist eine **Abbildung**.
.. admonition:: Hinweis
Die kleinste Anzahl aus der man noch auswählen kann ist 2.
Dashalb ist da **bit** auch die kleinste Informationseinheit.
Der Sender wählt aus Konzepten in seinem Kopf aus, bildet diese auf Worte ab,
welche dann mittels Lauten oder Schriftzeichen kodiert und gesendet werden.
Der Empfänger macht den umgekehrten weg.
Laute, Schriftzeichen und Ziffern sind dazu da,
um eine Vielheit zu erzeugen (Wörter, Zahlen),
auf die Konzepte abgebildet werden können.
.. admonition:: Hinweis
Mit Zahlen kann man alles auswählen, was man mit Worten
auswählen kann, etwa indem man die Elemente in eine Reihe bringt
und dann mit der Zahl eine Position auswählt.
Worte werden assiziativ verwendet, abar auch das geht mit Zahlen.
Eigentlich arbeiten alle dynamischen System auf diese Art.
Es gibt Mengen von möglichen Zuständen und Selektion. Beispiele:
- In der Biologie: Varianten durch Mutation und Selektion durch die natürliche
Umgebung, die Artgenossen und das Biotop
- In der Wirtschaft: Angebot und Nachfrage
- In der Gesellschaft: Formen der Organisation und deren Zulauf
- In der Politik: Politiker und Wähler
- In der Wissenschaft: Theorien und deren Nützlichkeit zur Erklärung von Phänomenen
- Ideen und deren Befürworter
- Gedanken im eigenen Kopf und deren Bestätigung durch Erfahrungen
- ...
Seit der Quantenmechanik weiß man, dass der Zufall ein Prinzip der Natur ist,
dass nicht alles vorbestimmt ist, sondern dass die Auswahlprozesse wirklich
neue Kombinationen enstehen lassen, flüchtige und auch bleibende.
Die Natur verarbeitet Information, indem sie Zustände verteilt. In diesem
Sinne ist das Universum vergleichbar mit einem Computer oder mit Tieren
einschließlich dem Menschen, oder besser, letztere beherbergen das gleiche
Prinzip.
Energie, Entropie
.................
:inline:`r.cv`
:inline:`r.a1`
Mathematik und Information
--------------------------
Als elementare Einheit der Information(sübertragung) haben wir
- Menge
- Auswahl
erkannt.
In welchem Bezug steht das zur Mathematik?
Mengen und Variablen
....................
Die Menge finden wir in der Mengenlehre als Fundament der Mathematik.
Um Elemente aus der Menge auszuwählen, kann man jedem Element ein bit zuordnen
und dann eine Auswahl mit 0 (nicht verwendet) und 1 (verwendet) codieren.
Mit weniger bits kommt man aus, wenn man herausfindet, welche Elemente
sich gegenseitig ausschließen.
Eine Menge, aus der nur ein Element ausgewählt werden kann,
ist eine Variable.
Das Element das ausgewählt wird heißt Wert.
.. admonition:: Variable
Variablen bestehen aus Werten.
Gemeint sind aber nicht Zahlen sondern die realen Dinge. Erst über eine
Abbildung wird den realen Werten einer realen Variablen durch Vergleich mit
Einheiten eine Zahl zugeordnet (Koordinate).
Die Mathematik beschreibt die Realität indem Variablen ausfindig gemacht werden
und Abhängigkeit zwischen ihnen aufgestellt werden.
Struktur
........
Ein wichtiger Aspekt der Informationsverarbeitung ist die Abstraktion.
Durch Vergleich werden Gemeinsamkeiten gefunden und diese dienen
dann zum Wiedererkennen. In der Programmierung gibt es eine wichtige Regel:
Don't Repeat Yourself (DRY). So macht es auch die Mathematik.
Muster die sich wiederholen, werden abstrakt beschrieben und konkrete
Objekte werden zu Beispielen dieser Strukturen (z.B. Gruppe, Ring, Körper,...).
Das macht den Informationsaustausch (Kommunikation) viel effektiver. Statt
wiederholt zu beschreiben, wie man in `\mathbb{Z}` mit `+` umgeht und in einem
Vektorraum und in ..., sagt man `(\mathbb{Z},+)` und `(V,+)` sind Gruppen.
Die Mathematik als Wissenschaft baut eine Struktur auf,
jetzt im Sinne dass sukzessive neu Konzepte dazukommen,
eine Menge aufgebaut wird, aus der man auswählen kann.
Neue Arbeiten verwenden diese Konzepte (diese Sprache)
und erweitern sie.
.. admonition:: Struktur
Auch die sukzessive Erweiterung einer Struktur
ist ein Entwicklungsvorgang allgemeiner dynamischer System
(biologische Evolution, Wirtschaft, ... sie Beispiele oben).
Damit sich komplexere und länger währende Strukturen entwickeln können,
müssen diese die Energieverteilung (in der Wirtschaft die Geldverteilung)
mit aufbauen, so dass Subsysteme richtig dosiert versorgt werden.
Algorithmik
...........
Um einen Wert aus einer Variablen zu bestimmen (auszuwählen)
hat die Mathematik die **Funktion**. Das ist auch der Name in der
Informatik, auch wenn in einer etwas anderen Bedeutung. Andere Namen sind
Unterprogramm, Subroutine, Prozedur, ...
:inline:`r.cw`
Die Auswahl eines Wertes einer Variablen kann von mehreren anderen Variablen abhängen.
Entsprechend haben Funktionen oft mehrere Parameter. Bei der Definition einer
Funktion heißen sie **formale Parameter**. Eine erster Auswahlprozess passiert dann,
wenn diese formalen Parameters beim Aufruf (Anweisung) zu aktuellen Werten
anderer Variablen (**aktuelle Parameter**) abgebildet werden.
Funktionen in Programmiersprachen liefern nicht unbedingt den Wert einer
Variable zurück, sondern oft mehrerer und es können diese Wert auch irgendwo
abgespeichert werden, statt sie zurück zu liefen, was theoretisch aber das
gleiche ist. Es ist jedoch ein gutes Design, Variablen und deren Abhängigkeiten
zu benennen und damit von anderen zu trennen.
Funktionen bestehen aus Aufrufen anderer Funktionen.
- hintereinander ausgeführte Aufrufe heißen **Sequenz** (engl. sequence).
- Entscheidungen, welche Aufrufe unter welchen Bedingungen gemacht werden, heißen
**Verzweigung** (engl. branch)
- wiederholte Ausführung heißt **Schleife** (engl. loop)
Diese Algorithmik ist in der Mathematik überall versteckt, in Zahlen, in Ausdrücken
und Symbolen (Polynome, `\sqrt{}`, `\lim`, `\int`, ...), in Sätzen und Beweisen.
Die ganze Mathematik kann man sich als großes Programm vorstellen,
aber leider das Meiste noch in einer Sprache, die der Computer nicht versteht.
.. admonition:: Darstellung
Eine große Herausforderung ist die Darstellungen der Mathematik in eine
Computersprache zu übertragen. Aber von letzteren gibt es auch viele,
unter anderem solche die speziell für die Mathematik gemacht wurden.
Viele unterschiedliche Darstellungen erhöhen den Aufwand
und vermindern die Anwendbarkeit in einem unvorstellbarem Ausmaß.
Das gleiche gilt auch für die vielen menschlichen Sprachen.
.. admonition:: Zahlen
Bei der Einführung der Zahlen ist es naheliegend algorithmische Aspekte
direkt mit der Anzahl zu verbinden:
- Anzahl mit + und -: ganze Zahlen
- Anzahl mit * und /: rationale Zahlen
Dann ergeben sich folgende Betrachtungen:
- Zahlen sind elementare Anweisungen (2 hinzu, mal 2, ...)
- analytische Ausdrücke sind Programme/Funktionen
- Termvereinfachung ist Programmvereinfachung, auch Profiling genannt
.. admonition:: Gleichung
Die Gleichung ist eine Funktion, die das Ergebnis eines Vergleiches liefert.
Äquivalenzumformungen von Gleichungen sind auch eine Art Profiling.
Gleichungen und Ungleichungen dienen auch dazu, implizit Mengen zu beschreiben.
<file_sep>/chcko/r/co/en.rst
.. raw:: html
%path = "maths/abstract-concrete"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
In mathematics as in daily life **concrete** observations
that repeat, are described once and then referred to later.
The common pattern in many observations is called **abstrakt**.
They make descriptions shorter in their totality.
For learners it is often difficult to link the abstract with the concrete
again. Therefore it is good to comment definitions and theorems with motivation,
examples and applications.
First one normally starts with the concrete, recognizes common patterns by comparing,
which is called **analysis**, and the result is the **abstraction**.
This is **reduction of redundancy** and **compression**, a fundamental cognitive process.
.. admonition:: Note
Mathematics often uses the **equivalence relation**
(= **reflexive, symmetrische und transitive Relation**, `\sim`)
as a tool to describe abstraction.
One focuses on one or a few properties and leaves others out.
All elements with a certain value of the property variable
are equivalent and form an equivalence class.
All equivalence classes make up the **quotient space**
`M/\sim`, which is a set of disjoint subsets of `M`.
Abstract concepts can be combined again (**Synthesis**).
This is the principle of **creativity**.
<file_sep>/chcko/r/b/en.rst
.. raw:: html
%path = "maths/vectors"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
Vectors
-------
What is a Vector?
.................
A **multidimensional vector** can be seen as independently choosing (value)
from more variables.
The values (number+unit) must be **addable** independently.
The units are the **unit vectors**. Together they form the **basis**
and are therefore also called **basis vectors**.
The choice from one variable is a vector, too, a **one-dimensional** vector.
The whole vector can be multiplied by a number, the **scalar**, and yields a vector again.
Example:
- If I go into a shop, then the products there are my vector space
(coordinate system, CS) and my shopping basket is a vector, i.e. a fixing
of the value (how much?) of each variable (here product).
- If my wife went shopping, too, then the baskets add up independently at home,
i.e. milk + milk, butter + butter, ...
Coordinate Transformation
.........................
A matrix transforms a vector from one coordinate system to a vector of another
coordinate system. Therefore we learn first about vectors. The matrix comes
into play, when we want to change from one coordinate system to another.
Example :inline:`r.a0`
How do we notate vectors?
..........................
- As column of numbers `\vec{x}=\begin{pmatrix}x_1\\x_2\end{pmatrix}`.
The unit vectors, i.e. what the rows mean, one specifies separately.
- Written explicitly with units: `\vec{x}=x_1\vec{e_1}+x_2\vec{e_2}`
(3 milk + 5 butter). If without arrow, then the superscript index
normally mean the scalar (number) and the subscript index the unit
(dimension, direction): `x=x^1e_1+x^2e_2`.
Notation is not the vector itself.
Vector Operations
-----------------
.. .. texfigure:: vector_dot_cross.tex
.. :align: center
.. tikz:: \coordinate (0) at (0,0);
\coordinate (A) at (1,3);
\coordinate (B) at (4,2);
\coordinate (C) at (2,1);
\tikzset{->}
\draw[black,very thick] (0) -- (A) node [midway,left]{$\vec{x}$};
\draw[black,very thick] (0) -- (B) node [near end,right,below]{$\vec{y}$};
\draw[black,very thin] (0) -- (C) node [midway,right,below]{$x_y$};
\draw[-,thin] (A) -- (C) node [midway,right]{$x_{\perp y}$};
Apart from addition there are two other important vector operations.
- **dot-product (scalar product)**. It yields a number (scalar) that represents the dependence
or with how little independence one can choose values.
.. math:: \vec{x}\vec{y}=x_yy=y_xx=x_1y_1+x_2y_2
- Orthogonal vectors result in 0 (no dependence).
- For parallel vectors it is the product of the lengths.
The length of a vector `\vec{x}` is thus `\sqrt{\vec{x}\vec{x}}`
The length is denoted as `|\vec{x}|` or simply `x`.
- `\vec{x_o}=\frac{\vec{x}}{x}` is the unit vector (length 1 in the direction of `\vec{x}`)
- The dot-product defines an angle between two vectors: `\cos\alpha = \frac{\vec{x}\vec{y}}{xy}`
- **Vector product or cross product**. For a dimension `= 3` it produces
a vector orthogonal to `\vec{x}` and `\vec{y}` and of length equal to the area
of the parallelogram created by the two vectors.
.. math::
\vec{x}\times\vec{y}=x_{\perp y}y=y_{\perp x}x=
\begin{vmatrix}
\vec{e_1} & \vec{e_2} & \vec{e_3} \\
x_1 & x_2 & x_3 \\
y_1 & y_2 & y_3
\end{vmatrix}
If `\vec{x}` and `\vec{y}` are two-dimensional, then only the `\vec{e_3}` component of
`\vec{x}\times\vec{y}` is different from 0. It is
`\begin{vmatrix}
x_1 & x_2 \\
y_1 & y_2
\end{vmatrix}=
\begin{vmatrix}
x_1 & y_1 \\
x_2 & y_2
\end{vmatrix}`.
Compare this to: Determinant of 3 vectors in the 3D space are the volume of the parallelepiped
created by the three vectors.
<file_sep>/chcko/r/cs/de.rst
.. raw:: html
%path = "Mathe/Morphismen"
%kind = chindnum["Texte"]
%level = 10
<!-- html -->
Der Begriff der Funktion aus der Mengenlehre, Elemente einer Menge
(Definitionsbereich) in eine andere Menge (Wertebereich) eindeutig abzubilden,
wird mit dem Begriff des Morphismus in der Kategorientheorie in der Hinsicht
abgeändert/verallgemeinert, dass man die ganze Abbildung ins Zentrum rückt und
Objekte unabhängig ob Quelle (domain) oder Ziel (codomain) zu einer Menge von Objekten O
zusammenfasst. Quelle und Ziel in der Menge der Objekte sind durch den
Morphismus bestimmt, bzw. Teil davon
(`D_f` ist Quelle von f, `C_f` ist Ziel von f, beide müssen nicht Mengen sein).
Mehrere Morphismen in der Menge der Morphismen M können sich ein Paar
(Quelle,Ziel) teilen. (O,M,id) ist eine Kategorie. id ist der identische
Morphismus.
Ein wichtiger Aspekt bezüglich Morphismen ist, dass eine Struktur erhalten bleibt
(Ordnungsstruktur, Algebraische Struktur, topologische Struktur) und
je nach betrachteter Struktur gibt es Unterbegriffe (`f\circ g (D_g) = f(g(D_g))`):
- Monomorphismus: `f\circ g=f\circ h \implies g=h` (linkskürzbares `f`)
oder `f` injektiv für Mengen als Objekte
(`Beweis <http://www.proofwiki.org/wiki/Injection_iff_Monomorphism_in_Category_of_Sets>`_)
- Epimorphismus: `g\circ f=h \circ f \implies g=h` (rechtskürzbares `f`)
oder `f` surjektiv für Mengen als Objekte
(`Beweis <http://www.proofwiki.org/wiki/Surjection_iff_Epimorphism_in_Category_of_Sets>`_)
- Isomorphismus: `f` hat ein `g` für das `f\circ g=id_{D_g}` und `g \circ f = id_{D_f}`
(Linksinverse = Rechtsinverse) oder `f` bijektiv für Mengen als Objekte
- Endomorphismus: `X\rightarrow X`
- Automorphismus: `X\rightarrow X` + Isomorphismus
- Homomorphismus (Algebra): `f(a+b)=f(a)+f(b)` (`+` möglicherweise unterschiedlich)
- Homöomorphismus (Topologie): `f` und `f^{-1}` stetig
- Diffeomorphismus (Differentialgeometrie): bijektiv, `f` und `f^{-1}` stetig differenzierbar
<file_sep>/chcko/r/ct/en.rst
.. raw:: html
%path = "maths/direction"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
Variables one normally deals with, are extensive, i.e. quantities and not points.
3m means all points from 0 to 3m.
Two different variables, for which all combinations are possible can be called
orthogonal. They generate the maximal set of combinations (area).
The *cross* product is maximal.
The addability is represented by the *dot* product. It is 0 for orthogonal quantities.
Quantities that show to the same direction can be added.
Different directions can be added component-wise.
- The **angle** results from the ratio of generated area and maximal area
`\angle(\mathbf{v_1},\mathbf{v_2})=\arcsin\frac{|\mathbf{v_1}\times \mathbf{v_2}|}{|\mathbf{v_1}||\mathbf{v_2}|}`
or from the ratio of the addable components to the whole length
`\angle(\mathbf{v_1},\mathbf{v_2})=\arccos\frac{\mathbf{v_1}\mathbf{v_2}}{|\mathbf{v_1}||\mathbf{v_2}|}`
The angle between orthogonal variables is `\frac{\pi}{2}`.
- and with complex numbers
`\angle(z_1,z_2)=\arg(\frac{z_1\bar{z_2}}{|z_2||z_2|})=\arg{z_1\bar{z_2}}`.
Another word for angle is phase, that allows to overcome the meaning of direction by common use.
Essential is the comparison of two quantities regarding the addable components.
To this end variables that do not represent a direction, but have influence on addability,
can be mapped to the angle range `[0,2\pi]`, which then is called phase.
Examples:
- The time `t` of a vibration becomes `\varphi=\frac{2\pi}{T}t` or
- the combined time and space position of a wave becomes `\varphi=\frac{2\pi}{\lambda}x-\frac{2\pi}{T}t`.
`Re(Ae^{i\varphi})` then represents the currently addable amplitude.
<file_sep>/chcko/r/bc/en.html
%path = "maths/finance/annuity/annual->monthly"
%kind = chindnum["problems"]
%level = 11
<NAME> has \(K_0=\)€{{chiven.C0}} that she can cash in as a monthly annuity due.
She can choose either the runtime or the rate.
The annual interest is {{chiven.i}}% and shall be converted conformally
to a monthly interest.
<ol type="a">
<li>
How big is the monthly interest rate with {{chiven.n}} years runtime?
<br>Rate €
%chq(0)
</li><li>
How many months can <NAME> get a monthly amount of €{{chiven.r}} full
<br>Months
%chq(1)
</li><li>
and what is the remaining amount she gets the month after?
<br>Annuity rest €
%chq(2)
</li>
</ol>
<file_sep>/chcko/r/cw/en.rst
.. raw:: html
%path = "maths/informatics/komplexity"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
Complexity
To refer to a value of a variable one needs memory space.
If the value (reference) can be calculated from (the values of) other variables,
then one can do without that memory.
This is why functions are important.
On the other hand one has to do calculations, now,
to reference to the value of the variable. This need time.
**Complexity** is a meassure for the needed resources.
If a lot of time is needed, then the time complexity is high.
If a lot of memory is needed, the the space complexit is high.
Since computing power varies widely one only mentions the order of
magnitude in dependence of the cardinality of the data set to process via
the O-notation , e.g. `O(n^2)`.
<file_sep>/chcko/r/ce/en.rst
.. raw:: html
%path = "maths/functions/log"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
Logarithm
---------
The power operation generates a result from the basis and the exponent.
So from the result there are two ways back: either to the basis or to the exponent.
- To get the basis one forms the power of the result with the reciprocal of the exponent,
e.g. `(3^2)^{\frac{1}{2}} = 3`. This is also called root.
- To get the exponent there is the **logarithm**, e.g. `\log_{3}(3^2)=2`.
From the calculation rules of exponents with same basis, e.g. `2^32^2=2^{3+2}`
and `\frac{2^3}{2^2}=2^{3-2}` follow the logarithm rules that make
*plus* out of *multiply* and *minus* out of *divide*.
The repetition of multiplication (power) becomes repetition of addition (multiplication).
.. math::
\begin{matrix}
\log ab &= \log a + \log b \\
\log \frac{a}{b} &= \log a - \log b \\
\log b^c &= c\log b
\end{matrix}
From the last rule it follows how to calculate any logarithm with just one logarithm.
.. math::
b^x &= d \\
x &= \frac{\log d}{\log b}
An exponential equation, i.e. an equation that has the unknown in the exponent,
is solved best by first trying to bring it into the form `b^x = d` and then
apply the logarithm on both sides.
The logarithm always refers to a basis. If the basis is not specified,
then `\log` is either with Basis 10 or with basis e=2.71828182846... (Euler number)
It is
.. math::
\log_{10} 10 = \log 10 = \text{lg} 10 = 1\\
\log_e e = \ln e = 1\\
\log_2 2 = \text{lb} 2 = 1\\
<file_sep>/chcko/r/cj/en.rst
.. raw:: html
%path = "maths/numbers/combination with operations"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
If we combine with the counting number or natural numbers
a reversible process like adding and its counter process subtracting,
then we have introduced the **integers**.
.. math::
\mathbb{Z} = \mathbb{N}\times\{+,-\}
Repeat the adding and we have a new operation: the multiplication.
Now let's combine this new operation with the integers.
The counter process is division.
Now we have introduced the **rational numbers**.
.. math::
\mathbb{Q} = \mathbb{Z}\times\{+,-\}\\
\mathbb{Q} = \mathbb{N}\times\{+,-\}\times\{\cdot,\div\}
Repeat the multiplication and we have the new operation "to the power".
Now not any more that analogously, but basically yes,
rationals combined with power operation extend to a new set of numbers
the **algebraic numbers**.
Numbers so far contain processes, so the make up an **algorithm**. If we allow
infinite algorithms, we extend further by including the **irrational numbers**
(algebraic and transcendental) and thus extend to the **real numbers**.
.. admonition:: Note
Finite and infinte algorithms: `\sqrt{2}` is infinite if expressed with the
basic operations. But if we do not evaluate `\sqrt{2}` but use it
symbolically only then this is a finite algorithm.
<file_sep>/chcko/r/dk/de.rst
.. raw:: html
%path = "Mathe/Funktionen/exponentiell"
%kind = chindnum["Texte"]
%level = 11
<!-- html -->
.. role:: asis(raw)
:format: html latex
Grundlegendes
-------------
In der **exponentiellen Funktion**
.. math::
y = a^x
nennen wir
- `x` den **Exponenten**
- `a` die **Basis**
- `y` die **exponentielle Funktion** von `x` zur Basis `a`
Der **Exponent** sagt, wie oft die *Multiplikation* mit `a` wiederholt wird.
`a` muss eine positive reelle Zahl sein : `a\in\mathbb{R}`.
.. admonition:: Multiplikaton
Multiplikation ist eine Operation der realen Welt, die als
Zahl codiert wird. In der Zahlenmenge `\mathbb{Q}`
ist die Operation Teil der Zahl: `2` meint `\cdot 2` und `1/2` meint `/2`.
Das Zeichen `\cdot` steht für die Multiplikation und `/` steht für die umgekehrte (inverse)
Operation, die Division, welche mit der Einbindung der Brüche in `\mathbb{Q}` Teil der Zahl wurde.
Also sprechen wir nur mehr von Multiplikation und meinen die Anwendung
der Operation aus `\mathbb{Q}\subset\mathbb{R}`.
Wenn `a` größer als `1` ist, dann wächst `y` mit `x` *strikt monoton*: `x_1<x_2 \Rightarrow y_1<y_2`.
.. tikz:: \begin{axis}[grid=both,axis lines=middle,xmin=-3,xmax=3,ymin=0,ymax=8, samples=50]
\addplot[green] {pow(2,x)} node[above]{$y=2^x$};
\end{axis}
Wenn `a` kleiner als `1` ist, dann fällt `y` mit `x` *strikt monoton*: `x_1<x_2 \Rightarrow y_1>y_2`.
.. tikz:: \begin{axis}[grid=both,axis lines=middle,xmin=-3,xmax=3,ymin=0,ymax=8, samples=50]
\addplot[green] {pow(1/2,x)} node[above]{$y=(\frac{1}{2})^x$};
\end{axis}
Diskussion
----------
Vergleichen wir die Anzahl der Wertekombinationen von `n` bits:
.. math::
2^n
mit dem Wachstumsprozess, wie etwa das Anwachsen des Kapitals mit der jährlichen Verzinsung
.. math::
(1+\frac{i}{100})^n
oder das besonders interessante natürliche Wachstum
.. math::
e^x = \lim_{n->\infty}(1+\frac{1}{n})^{nx} =
\lim_{m->\infty}(1+\frac{x}{m})^m = (1+\frac{x}{\infty})^\infty
`e` ist `Eulersche Zahl <https://de.wikipedia.org/wiki/Eulersche_Zahl>`_
deren Bedeutung auf dem gegebenen Zusammenhang beruht.
Der Schlüssel zum Vestehen der Gemeinsamkeiten steckt in der Interpretation
von **Information** als Wachstumsprozess.
Jedes Bit vergößert um `1` Mal die vorhandene Anzahl von Wertekombinationen.
Notieren wir diesen Aspekt des Bits mit `(1+1)`, um zu betonen, dass `1` dazu kommt.
Die Klammern machen den Ausdruck zu einem Operator, einem Element der Zahlenmenge `\mathbb Q`.
`n` wiederholte Anwendungen von `(1+1)` erzeugen eine Vielzahl der Größe
.. math::
(1+1)^n = 2^n
Jedes Bit wird zur bestehenden Menge von Wertekombinationen "dazuverzinst".
Das Informationmaß einer realen Variablen der Größe `C` ist die Anzahl
`n=\log_2 C` Bits, die notwendig sind, damit wir auf `C` Kombinationen kommen.
.. admonition:: Vergleich mit welcher anderen Variable?
Statt Bits könnten wir ebensogut die betrachtete Variable selbst nehmen,
weil diese ist physikalisch present. Kombinationen sind aber auch
physikalisch und auch die Auswahl von Werten, welche letztendlich Veriablen
erzeugt, ist ein physicalischer Prozess. Die Anzahl der beteiligten
Variablen spielt dabei eine Rolle. Das bedeutet erstens, dass Information
physikalisch ist und zweitens, in Hinsicht auf die Quantenmechanik, dass die
Anzahl der beteiligten Variablen immens groß ist und die individuellen
Beiträge minimal sind.
Wenn wir von der *Anzahl an Variablen* starten, dann gibt uns die
*Exponentialfunktion* die *Anzahl and Wertekombinationen*. Wenn wir von der
*Anzahl der Werte* starten, dann gibt uns der *Logarithmus* die *Anzahl der
Variablen*, die zur Wertegenerierung notwendig ist.
Bei der **Zinsrechnung** schauen wir auf die Geldmenge (die `1`),
welche auf der Bank `i` Prozent zinsen abgibt.
In `n` Jahren wächst die `1` zu
.. math::
(1+i/100)^n = q^n
Der *Wachstumsfaktor* `q` ist nicht `2`, sondern normalerweise nur etwas über `1`. Das
"Informationsmaß" in diesem finanziellen Kontext würde die Anzahl
der Jahre sein.
Der essentielle Unterschied bezüglich den Bits
ist, dass, was hinzugefügt wird, ein *Bruchteil* von dem ist, was da ist.
Aber ob Bruchteil oder nicht ist nur ein Frage der Einheit.
Die Einheiten von Lebewesen sind Zellen und die ultimativen Einheiten der
realen Welt sind Quanten. Beide sind sehr klein im Vergleich zu den Dingen
unserer täglichen Wahrnehmung. Mit solchen kleinen Einheiten können wir auch
beliebig oft (= unendlich oft) "verzinsen":
.. math::
\lim_{m->\infty}(1+\frac{x}{m})^m = \lim_{n->\infty}(1+\frac{1}{n})^{nx} = e^x
In der ersteren Gleichung können wir sehen, dass wir mit dem Verändern der
*Verzinsungsschritte* auch die *Wachstumsfaktor* verändern. Wegen der
Bedeutung von `e^x` wird der Wachstumsfactor `q` in `y=q^n` oft zum Exponenten von `e`
verlegt (`y=e^{kx}`). `k = \ln q` heißt dann *Wachstumskonstante*.
.. admonition:: Natürliche Verzinsung in der Finanzwelt
Auch in der finanziellen Welt sind die tatsächlichen
Verzinsungsschritte sehr klein. Aber die Bank gibt sie ihren Kunden in
größeren Zeiteinheiten weiter.
`x` ist die Information in der **natürlichen Informationseinheit**
`nat <https://de.wikipedia.org/wiki/Nit_(Informationseinheit)>`_.
Im Pinzip teilen wir dabei eine Variable in unendliche viele undendlich kleine Variablen auf,
so dass der Wachstumsfaktor pro Schritt beinahe bei `1` liegt.
<file_sep>/chcko/r/j/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy.abc import x
from sympy import exp, sin, cos, latex, Rational, S, N, integrate, sstr
from chcko.chcko.hlp import Struct
__all__ = ['chiven', 'chalc', 'low', 'high']
rs = []
for i in [2, 3, 5]:
for j in [2, 3, 5]:
if i == j:
continue
rs.append(x ** Rational(i, j))
sp = [sin(x), cos(x), exp(x), 1 / x]
funs1 = rs[:] # 1
for i in [2, 3, 5]:
for j in [2, 3, 5]:
if i == j:
continue
funs1 = funs1 + [Rational(i, j) * fun for fun in rs]
funs2 = sp[:] # 1
for i in [2, 3, 5]:
for j in [2, 3, 5]:
if i == j:
continue
funs2 = funs2 + [Rational(i, j) * fun for fun in sp]
crange = range(3, 10)
low, high = 0.5, 1
""" #Any of funs1 - any of crange has no intersection with any of funs2 within low and high:
#f2 > f1
from scipy.optimize import brentq
E=lambda f: lambda v: f.subs(x,v).evalf()
cnt = 0
for f1 in funs1:
for f2 in funs2:
for c in crange:
try:
res=N(integrate(f2-(f1-c),(x,low,high)))
if res < 0.1:
print(res)
brentq(E(f2-(f1-c)),low,high)
cnt = cnt + 1 #found a root
except ValueError:
continue
print(cnt) #=> 0 => no root
"""
def chiven():
ff1 = random.sample(funs1, 1)[0]
f1 = ff1 - random.sample(crange, 1)[0]
f2 = random.sample(funs2, 1)[0]
g = Struct(f1=sstr(f1), f2=sstr(f2))
return g
def chalc(g):
res = N(integrate(S(g.f2) - S(g.f1), (x, low, high)))
return [res]
<file_sep>/chcko/r/y/__init__.py
# -*- coding: utf-8 -*-
import random
from sympy.abc import a, b, c, d, e, f, g, h, i, j, k, m, n, p, q, r, s, t, u, v, w, x, y, z
from sympy import sstr, simplify
from chcko.chcko.hlp import Struct, equal_0 as chequal
syms = [a, b, c, d, e, f, g, h, i, j, k, m, n, p, q, r, s, t, u, v, w, x, y, z]
syml = 'abcdefghijkmnpqrstuvwxyz'
def chiven():
bn = random.sample(syml, 3)
bd = bn[:]
random.shuffle(bd)
en = random.sample(range(-9, 9), 3)
ed = random.sample(range(-9, 9), 3)
nm = ''
for i, ae in enumerate(en):
nm = nm + '*{0}**{1}'.format(bn[i], ae)
nm = nm.strip('*')
dm = ''
for i, ae in enumerate(ed):
dm = dm + '*{0}**{1}'.format(bd[i], ae)
dm = dm.strip('*')
g = Struct(nm=nm, dm=dm)
return g
def chalc(g):
return [sstr(simplify(g.nm + '/(' + g.dm + ')'))]
chorm = lambda x: x
<file_sep>/chcko/r/c/__init__.py
# -*- coding: utf-8 -*-
import random
from math import log
from chcko.chcko.hlp import Struct
def chiven():
g = Struct()
g.r = random.sample(range(100, 200), 1)[0]
g.n = random.sample(range(5, 10), 1)[0]
g.i = random.sample(range(2, 14), 1)[0] / 10.0
g.m = random.sample(range(2, 7), 1)[0]
g.m = g.m * 10
return g
def chalc(g):
q = 1.0 + 1.0 * g.i / 100
kn = g.r * (1 - q ** g.n) / (1 - q)
kv = q * kn
nm = log(g.m * (q - 1) + 1) / log(q)
ieff = 100 * ((1 + g.i / 100.0) ** 12.0 - 1)
return [kn, kv, nm, ieff]
<file_sep>/chcko/r/a1/de.rst
.. raw:: html
%path = "Physik/S=E*t"
%kind = 1
%level = 12
<!-- html -->
Ein System kann als Variable gesehen werden.
Der Zustand eines Systems ist ein Wert der Variable.
**Entropie** ist die Information einer Variable.
Entropie ist die *Anzahl der Werte einer Variable*.
Was mit Zustand gemeint ist, spielt keine Rolle,
d.h. wurde weg abstrahiert.
Nur die Anzahl spielt hier eine Rolle.
Die Zustände müssen vorkommen.
Das heißt, das System muss sich ändern.
**Energie** ist die **Anzahl der Zustände pro Zeit**.
.. math::
S = Et
Man kann das auch anders herum betrachten:
Energie erzeugt die Zeit durch das Ändern der Werte.
Wenn das System aus vielen unabhängigen Variablen besteht,
wird so eine System-Zeitauflösung erzeugt,
welche in keiner unabhängigen Variablen vorkommt.
Eine konstante Menge an Energie kann
- wenige Zustände schnell wiederholen: `\Delta S` und `\Delta t` klein
- viele Zustände langsam wiederholen: `\Delta S` und `\Delta t` groß
Ein System besteht oft aus Schichten.
In einem idealen Gas,
ist die Energie `Q=TS` gegeben durch
- die Temperatur `T`: durchschnittliche kinetische Energie eines Teilchens
- die Entropie `S`
Dies teilt das System in zwei Schichten:
- `T` meint die Informationsereignisse (Energie) einer Schicht unterhalb.
- `S` ist die Anzahl in der betrachteten Schicht.
Der Logarithmus in der Entropie kommt davon,
dass Information auf mehrere gleichartige Variablen verteilt wird (etwa Bit).
In der anderen Richtung folgt daraus der Exponent `e^S`.
Die Richtung der Teilchenbewegung unterteilt die Anzahl `N` der Teilchen
- nach Richtung: Faktor `1/2`, da exklusiv
- nach Ausrichtung: Faktor `3`,
da `T` durchschnittlich gleichzeitig in allen 3 Ausrichtungen wirkt
Deshalb:
.. math::
Q = ST = 3/2NkT = 3/2RT = 3/2pV
Für eine ideales Gas ist die Innere Energie gleich der Arbeit,
die an der Umgebung verrichtet wurde: `3/2 pV`.
Die mittlere Energie eines Teilchens `E` ist:
.. math::
E = 1/2 m v^2 = 3/2 kT
Die Boltzmannkonstante `k` konvertiert zwischen Einheiten der Energie.
`v^2` kann besser mit Ereignissen in Beziehung gebracht werden als `T`,
aber auch `E` ist nur die Energie in der Schicht und nicht die ultimative
Einheit des elementaren Informationsereignisses pro Zeit.
Das elementare Ereignis ist durch das Plancksches Wirkungsquantum gegeben.
Die Summe dieser Ereignisse erzeigen Raum und Zeit: E-t, x-v, ...
<file_sep>/chcko/r/s/__init__.py
# -*- coding: utf-8 -*-
import numpy as np
import random
from chcko.chcko.hlp import Struct
def chiven():
p1, p2 = random.sample(range(20, 30), 2)
p0 = random.sample(range(140, 150), 1)[0]
pp0 = random.sample(range(51, 60), 1)[0]
A = np.array([np.array([p0, p1, p2]) / 10.0,
np.array([-pp0 / 100.0, 1, 1]),
np.array([1, 1, 1])])
x0 = 100. / (100. + pp0)
pp12 = pp0 / (100. + pp0)
x1 = random.sample(range(1, int(100. * pp12)), 1)[0] / 100.0
x2 = pp12 - x1
x = np.array([x0, x1, x2])
b = np.dot(A, x)
g = Struct(A=A.tolist(), b=b.tolist())
return g
def chalc(g):
iA = np.linalg.inv(np.array(g.A))
x = np.dot(iA, np.array(g.b))
return [i for i in (100 * x).round().tolist()]
<file_sep>/chcko/r/bl/__init__.py
from random import randrange, sample
from chcko.chcko.hlp import Struct
def chiven():
i1 = randrange(15, 90) / 10.0
di = (randrange(1, 10) - 5) / 50.0
if di == 0:
di = 0.05
i2 = i1 + di
i_c = [4, 12, 1]
i = [ii for ii in sample(i_c, 2)]
i_n = ['\(i_{%s}\)'%str(ii) for ii in i]
clc = lambda ai, ii: '{:.2f}'.format(
100 * ((ai / 100.0 + 1.0) ** (1.0 / ii) - 1))
i_v = [clc(i1, i[0]), clc(i2, i[1])]
g = Struct(
i1=i[0],
i2=i[1],
i1n=i_n[0],
i2n=i_n[1],
i1v=i_v[0],
i2v=i_v[1])
return g
def chalc(g):
res = 2 if (
(1 +
float(
g.i1v) /
100.0) ** g.i1 -
1 < (
1 +
float(
g.i2v) /
100.0) ** g.i2 -
1) else 1
return [res]
<file_sep>/chcko/r/ci/en.rst
.. raw:: html
%path = "maths/numbers/NZQR long"
%kind = chindnum["texts"]
%level = 9
<!-- html -->
.. contents::
This is not a first introduction to number,
but a discussion and interpretation with links to further resources
and an emphasis on algorithmic aspects (:lnk:`r.cp`).
Natural Numbers (`\mathbb{N}`)
------------------------------
The natural numbers are the real numbers in the sense that they represent a
count. All other sets of numbers have additional information or are quite
different altogether
The **count** is a real variable, which specifies the **cardinality** of a set.
A value of this variable, like three, means three things.
Further properties are not considered, they are abstracted away.
That is why every natural number can be seen as **equivalence class** (:lnk:`r.co`).
.. admonition:: Note
In mathematics one makes a further distinction: **cardinal numbers** as above
and **ordinal numbers** to specify the order.
The intuitive idea of `\mathbb{N}` is formalized with the
`Peano Axioms <http://de.wikipedia.org/wiki/Peano-Axiome>`_.
.. admonition:: Note
Essentially `\mathbb{N}` is a construction
of an ordered multitude (0 with successors),
to address values of other variables (like count),
just like words address concepts.
Zero
....
Zero was a great invention for the number representation, which the roman
system did not yet have. In general one can now include variables, even if they
are not there every time. This often helps to generalize description. As an
example in 103 the position coded tens variable is there, even if there is no
ten grouping in the number (:lnk:`r.cn`). Another example are vectors
(:lnk:`r.cg`).
The Integers `\mathbb{Z}` and the Addition
------------------------------------------
`\mathbb{Z}` is more than the count.
As motivation for the integers one can add to every element of `\mathbb{N}`
a process or a direction. 2 is then not only the count 2,
but has the additional information to add the 2 things (`2 = {2,+} = +2`).
If you understand `\mathbb{N}` already associated with a process
or direction, then it is a good idea to extend this to include
the same values but the undoing process or counter direction
to get back to the original situation. This way one intuitively comes
to the integers `\mathbb{Z}`.
`+` means to add and `-` means to subtract, but that can change.
The `+` is often dropped, but it must be implicitly assumed,
because an integer is not the same thing as a natural number.
It has additionally the reversible process or direction.
If you understand only count with `\mathbb{N}`,
then `\mathbb{Z}=\mathbb{N}\times\{+,-\}`.
Then `\mathbb{N}` is not a subset of `\mathbb{Z}`, but an
`isomorph <http://en.wikipedia.org/wiki/Homomorphism>`_
`embedding <http://en.wikipedia.org/wiki/Embedding>`_.
.. admonition:: Note
An formal introduction of `\mathbb{Z}`, starting from `\mathbb{N}` with as few new concepts
as possible (no `+` und `-`), is via **equivalence classes** (:lnk:`r.co`)
of number pairs `(n,m)` from `\mathbb{N}` with the equivalence relation:
`(n_1,m_1)\sim(n_2,m_2)\equiv n_1+m_2=n_2+m_1`.
In the canonical representation one number is 0. `+2 = (2,0)` und `-2 = (0,2)`.
With `+` and `-` as opposite processes one has encoded this process in the number.
The `Addition` itself is then algorithmically a sequential execution or **sequence**:
`+2+(-2)` means: add 2 then(=+) subtract 2.
If you swap the numbers `((-2)+(+2))` you get the same result (**commutative law**).
With more numbers you can choose freely which to calculate first
`(-2)+((+2)+(+3))=((-2)+(+2))+(+3)` (**associative law**).
.. admonition:: Note
The subtraction 2-2 is an abbreviation for +2+(-2).
The result of +2+(-2)= 0, the **neutral element** of addition.
+2 is the **opposite number (additive inverse)** of -2 and vice versa.
`(\mathbb{Z},+)` is an **abelean Group** (:lnk:`r.cl`).
.. admonition:: Note
`+` as part of the number and `+` as binary operation are not the same.
Similarly for `-`. `-` in addition can be a unary operation that returns the
opposite number (additive inverse).
The Integers `\mathbb{Z}` and Multiplication
--------------------------------------------
A process can be repeated and multiplication says how often
addition (+2) or subtraction (-2) is repeated. Algorithmically multiplication
is a loop:
`3\cdot(-2) = (-2)+(-2)+(-2)`
Multiplication with 1 means the thing itself.
1 is the **neutral element** of the multiplication.
The multiplication with -1 means: revert the process,
i.e. make plus (+) to minus (-).
`(-1)\cdot(-2) = +2`
`(-1)\cdot(-1)\cdot(-2) = -2`
With this one can multiply every integer with every other integer and one gets
an integer again.`(\mathbb{Z},\cdot)` is **closed** and the **assoziative law**
holds. This makes `(\mathbb{Z},+,\cdot)` to an **integrity ring**
(:lnk:`r.cm`). `(\mathbb{N},+,\cdot)` alone is only a **semiring**
(:lnk:`r.cm`) .
Multiplication und die Rationalen Numbers (`\mathbb{Q}`)
--------------------------------------------------------
Analogous to `\mathbb{Z}=\mathbb{N}\times\{+,-\}` one can
think the repeating process united with the number and it is a good
idea to include the inverse process (dividing).
Which subtraction do I need to repeat 3 times in order to get a (-6) subtraction?
(-6)/3 = -2
Analogous to `\mathbb{Z}=\mathbb{N}\times\{+,-\}` one can unite
`\mathbb{N}\times\{\cdot,\div\}` with the count multiplication and division.
The binary operations `\cdot` and `+` must be handled separately,
only the **distributive law** ties them together.
`a\cdot(b+c) = a\cdot b + a\cdot c`
e.g. `2\cdot(3+4)=2\cdot 3+2\cdot 4=14`
If you look for the part that repeated (multiplied)
yields no change, i.e. 1, then we get to the **reciprocal**,
which is the **inverse element of multiplication**.
While with (-6)/3 we still get a whole number, i.e. a multiple of 1,
this is not the case for the reciprocal in general.
Therefore the set is extended by these reciprocals to make it closed.
This is analogous to the extension from `\mathbb{N}` to `\mathbb{Z}`.
- There the process "add" was united to form a tuple (count,add).
"add" has a reverse process "away".
One has extended by (count, away).
- With `\mathbb{Q}` one extends (count,repeat) with the *reciprocal* (count,divide).
.. admonition:: Note
In analogy to `\mathbb{N}\times\{+,-\}`
one could write `\mathbb{Q}`-elements as:
- `\cdot 2` corresponds to +2
- `\div 2` corresponds to -2
The binary operation `\cdot` then is only a successive processing and can be dropped.
`(\cdot 2)\cdot(\div 2) = \cdot 2\div 2 = 1`
But actually we write
- `2\cdot 2^{-1} = 1` or
- `2\cdot \frac{1}{2} = 1`
the first is because one can add the exponent for the same basis
and so we have `2\cdot 2^{-1}=2^1\cdot 2^{-1}=2^{1-1}=2⁰=1`.
`(\mathbb{Q},\cdot)` is a **abelean Group** with neutral element 1.
Because the multiplication in `(\mathbb{Q},\cdot)` shall yield an element of
`(\mathbb{Q},\cdot)` again (closure), one takes all fractions
`p/q=pq^{-1}` into `(\mathbb{Q},\cdot)`.
3/2 means to first do `\cdot 3` and then `\div 2` (reciprocal of 2).
`\frac{3}{2}=3\cdot 2^{-1}=3\frac{1}{2}=\frac{1}{2}\cdot 3=2^{-1}\cdot 3`
`pq^{-1}` means to copy/repeat p times then divided q times.
To additionally multiply r times and undo that by dividing r times,
one doesn't change a thing.
`pq^{-1}=rr^{-1}pq^{-1}=rp(rq)^{-1}=\frac{rp}{rq}`
All such pairs of numbers are equivalent and the canonical representation is
with p and q without common divisor.
.. admonition:: Hinweis
`\mathbb{Q}` formally is introduced as set of equivalence classes
of such equivalent number pairs:
`(n_1,n_2)\sim(n_2,m_2)\equiv n_1m_2=n_2m_1`.
`\mathbb{R}` as extension with the irrational numbers `\mathbb{I}`
------------------------------------------------------------------
Count (`\mathbb{N}`) with addition (+) and subtraction (-) is `\mathbb{Z}`.
`\mathbb{Z}` with repetition (`\cdot`) and division (`\div`) is `\mathbb{Q}`.
If we stay with `+,-,\cdot,\div`, then we can do with `\mathbb{Q}`.
But if we want the power operation to be reversible, then we must extend again.
There is for example no `p/q` in `\mathbb{Q}`, for which `p^2/q^2=2`.
(Proof: p/q shall have no common divisor. If `p^2` is even, so is p (p=2n).
`p^2=4n^2=2q^2` means that q is even, but that is a contradiction).
There are though **algorithms** that make rationale Numbers (**sequences**),
whose square gets arbitrarily close to 2. All such algorithms are
combined into a equivalence class and this is then the new number `\sqrt{2}`
The irrational numbers `\mathbb{I}` are equivalence classes of number
sequences. By naming the algorithm, and `\sqrt{}` refers to such an algorithm,
the irrational number is determined. One cannot write an irrational number as
decimal number. One can also not run the algorithm to an end, because it
does not terminate. So the irrational number is really the algorithm itself.
The irrational numbers get further classified as **algebraische** irrationals,
which are those that are roots of polynomials, and the **transcendental**
irrationals. The latter exist, because there are functions beyond finite
polynomials, like Sin, Cos, ... most of which can be expressed with infinite
polynomials (series), though. `\pi` and `e` are transcendental.
New operations/functions lead to new numbers. But the definition
**equivalence classes of sequences** is so general that it
includes algebraic and transcendental numbers and `\mathbb{Q}` itself.
This is `\mathbb{R}`:
`\mathbb{R} = \mathbb{Q} \cup \mathbb{I}`
Another very useful and exciting extension are the complex numbers `\mathbb{C}`(:lnk:`r.di`).
.. admonition:: Note
Since `\mathbb{R}` includes all never ending number sequences, one could
include `\infty` and `-\infty`, which are also never ending sequences of
numbers, if it weren't for `\infty+1=\infty` and the like.
Still in complex analysis (function theory) the complex number set is
extended with `\infty` fruitfully.
<file_sep>/chcko/r/cl/en.rst
.. raw:: html
%path = "maths/stuctures/group"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
*Group-like algebraic structures* consist of a set `M`
with a binary operation `\circ`, i.e. `(M,\circ)`.
For all elements (`\forall_{a,b\in M}`):
- `a\circ b \in M` **Closedness** `\rightarrow` **Magma**
- `a\circ(b\circ c) = (a\circ b)\circ c` **Associative Law** `\rightarrow` **Semigroup**
- `a^n = a` **Idempotent** Semigroup `\rightarrow` **Lattice**
- `\exists_e|e\circ a = a\circ e = a` **Neutral Element** `\rightarrow` **Monoid**
- `\exists_\bar{a}|\bar{a}\circ a = a\circ\bar{a} = e` **Inverse Element** `\rightarrow` **Group**
- `a\circ b = b\circ a` **Commutative Law** `\rightarrow` commutative or **abelian Group**
<file_sep>/chcko/r/ci/de.rst
.. raw:: html
%path = "Mathe/Zahlen/NZQR lang"
%kind = chindnum["Texte"]
%level = 9
<!-- html -->
.. contents::
Das folgende ist keine erste Einführung, sondern
eine Art Diskussion zur Interpretation von Zahlen mit weiterführenden Links
und mit Augenmerk auf Algorithmik (:lnk:`r.cp`).
Natürliche Zahlen (`\mathbb{N}`)
--------------------------------
Die natürlichen Zahlen sind die eigentlichen Zahlen im Sinne,
dass sie eine Anzahl darstellen.
Alle anderen Zahlenmengen meinen mehr als nur die Anzahl.
Eine **Anzahl** ist eine reale Variable, welche die Mächtigkeit (**Kardinalität**)
einer anderen Menge angibt. Ein Wert dieser Variable, etwa drei, meint drei
Dinge. Weitere Eigenschaften werden nicht betrachtet, werden weg-abstrahiert.
Deshalb kann jede natürliche Zahl als **Äquivalenzklasse** (:lnk:`r.co`)
angesehen werden.
.. admonition:: Hinweis
Die Mathematik unterscheidet noch genauer: **Kardinalzahlen** wie eben
und **Ordinalzahlen** zum Bezeichnen einer Ordnung.
Die intuitive Vorstellung von `\mathbb{N}` wird mit den
`Peano Axiomen <http://de.wikipedia.org/wiki/Peano-Axiome>`_
formalisiert.
.. admonition:: Hinweis
Im Essentiellen ist `\mathbb{N}` eine Konstruktion
einer geordneten Vielheit (0 mit Nachfolgern),
um damit Werte anderer Variablen (etwa eben Anzahl)
zu addressieren oder darzustellen, nach Art wie Wörter Konzepte darstellen.
Null
....
Die Null war eine großartige Erfindung zur Darstellung der Zahlen, die im
römischen Zahlensystem noch nicht da war. Allgemeiner kann man dadurch
Variablen einbeziehen, auch wenn sie nicht immer vorkommen. Das ermöglicht oft
eine allgemeinere Beschreibung. Zum Beispiel ist in 103 der positionscodierte
Zehner da, obwohl keine Zehnergruppierung da ist (:lnk:`r.cn`).
Ein anderes Beispiel sind Vektoren. (:lnk:`r.cg`).
Die ganzen Zahlen `\mathbb{Z}` und die Addition
-----------------------------------------------
`\mathbb{Z}` ist mehr als nur Anzahl.
Als Motivation für die ganzen Zahlen kann man bei `\mathbb{N}`
mit der Anzahl einen Vorgang oder eine Richtung verbunden denken.
2 ist dann nicht nur die Anzahl 2 sondern Anzahl `+` Hinzufügen
(`2 = {2,+} = +2`).
Wenn man sich `\mathbb{N}` als Anzahl kombiniert mit Vorgang
oder Richtung vorstellt, dann braucht man den Umkehrvorgang oder die Gegenrichtung,
um wieder zur Ausgangssituation zurückzukommen.
Man ist damit intuitiv bei den ganzen Zahlen `\mathbb{Z}` gelandert.
`+` meint dazugeben, `-` meint wegnehmen. Die Richtung ist aber Definitionssache.
Das `+` wird oft weggelassen, aber man sollte es hinzudenken,
da eine ganze Zahl eben nicht nur eine Anzahl ist,
sondern Anzahl und Vorgang/Richtung zusammen.
Wenn man sich unter `\mathbb{N}` nur Anzahl vorstellt, dann ist
`\mathbb{Z}=\mathbb{N}\times\{+,-\}`. Dann ist `\mathbb{N}` keine Teilmenge
von `\mathbb{Z}`, aber eine
`isomorphe <http://de.wikipedia.org/wiki/Homomorphismus#Universelle_Algebra>`_
`Einbettung <http://de.wikipedia.org/wiki/Einbettung_(Mathematik)>`_.
.. admonition:: Hinweis
Eine Einführung von `\mathbb{Z}`, die versucht ausgehend von `\mathbb{N}`
möglichst wenig neue Objekte zu verwenden, eben auch kein `+` und `-`, ist die
über **Äquivalenzklassen** (:lnk:`r.co`) von Zahlenpaaren `(n,m)` aus
`\mathbb{N}` mit der Äquivalenzrelation `(n_1,m_1)\sim(n_2,m_2)\equiv n_1+m_2=n_2+m_1`.
In der kanonischen Darstellung ist eine Zahl des Paares 0.
`+2 = (2,0)` und `-2 = (0,2)`.
Mit `+` und `-` als Vorgänge kann man sich die *Addition* algorithmisch als eine Sequenz denken:
das hintereinander Ausführen von Hinzugeben oder Wegnehmen.
`+2+(-2)` heißt 2 hinzugeben und dann 2 wegnehmen.
Vertauscht `((-2)+(+2))` erhält man das gleiche Ergebnis (**Kommutativgesetz**).
Bei mehreren Zahlen kann frei entscheiden, was man zuerst berechnet
`(-2)+((+2)+(+3))=((-2)+(+2))+(+3)` (**Assoziativgesetz**).
.. admonition:: Hinweis
Die Subtraktion 2-2 ist eine Kurzschreibweise für +2+(-2).
Das Ergebnis von +2+(-2)= 0, das **neutrale Element** der Addition.
+2 ist die **Gegenzahlen** (inverses Element der Addition) von -2 und umgekehrt.
`(\mathbb{Z},+)` ist ein **abelsche Gruppe** (:lnk:`r.cl`).
.. admonition:: Hinweis
`+` als Teil der Zahl und `+` als binäre Operation meinen nicht dasselbe,
ebenso für `-`. `-` kann zusätzlich als unitäre Operation (Funktion) angesehen werden,
welche die Gegenzahl liefert.
Die ganzen Zahlen `\mathbb{Z}` und die Multiplikation
-----------------------------------------------------
Einen Vorgang kann man wiederholen. Die Multiplikation gibt an, wie oft das
Hinzugeben (+2) oder Wegnehmen (-2) wiederholt wird. Multiplikation stellt
also algorithmisch eine Schleife dar:
`3\cdot(-2) = (-2)+(-2)+(-2)`
Die Multiplikation mit 1 heißt einmal wiederholt, also das Ding selbst und unverändert.
1 ist das **neutrale Element** der Multiplikation.
Der Multiplikation mit -1 gibt man die Bedeutung: Umkehrung des wiederholten Vorgangs,
d.h. aus hinzu (+) mach weg (-).
`(-1)\cdot(-2) = +2`
`(-1)\cdot(-1)\cdot(-2) = -2`
Damit kann man jede ganze Zahl mit jeder anderen ganzen Zahl multiplizieren und es kommt
wieder eine ganze Zahl heraus, d.h. eine Anzahl die man hinzugibt oder wegnimmt.
`(\mathbb{Z},\cdot)` ist bezüglich der Multiplikation **abgeschlossen** und es gilt das
**Assoziativegesetz**.
`(\mathbb{Z},+,\cdot)` ist ein **Integritätsring** (:lnk:`r.cm`).
`(\mathbb{N},+,\cdot)` alleine ist nur ein **Halbring** (:lnk:`r.cm`) .
Die Multiplikation und die Rationalen Zahlen (`\mathbb{Q}`)
-----------------------------------------------------------
Analog zu `\mathbb{Z}=\mathbb{N}\times\{+,-\}` kann man sich
das Wiederholen/Vervielfachen vereint mit der Anzahl als neues Element denken.
Dann ist es naheliegend, dass man diesen Vorgang umkehren möchte.
Welches Wegnehmen muss ich 3 mal wiederholen, damit (-6) herauskommt?:
(-6)/3 = -2
Analog zu `\mathbb{Z}=\mathbb{N}\times\{+,-\}` kann man über
`\mathbb{N}\times\{\cdot,\div\}` mit Anzahl Multiplikation und Division vereinen.
Beides sind unterschiedliche Mengen. Man muss also grundsätzlich die binären
Verknüpfungen `\cdot` und `+` getrennt behandeln. Nur das
**Distributivgesetz** vereint die beiden:
`a\cdot(b+c) = a\cdot b + a\cdot c`
z.B. `2\cdot(3+4)=2\cdot 3+2\cdot 4=14`
Wenn man im speziellen nach dem Teil sucht, der wiederholt (multipliziert)
nichts verändert, also 1 ergibt, so kommt man auf den **Kehrwert** (**Reziprok**) und
der meint die UmKEHRung des Wiederholens, das **inverse Element der Multiplikation**.
Während bei (-6)/3 noch eine ganze Zahl herauskommt, d.h. ein Vielfaches von 1,
ist das beim Kehrwert nicht mehr der Fall.
Es ist naheliegend die Wiederholungen mit deren Umkehrungen zu erweitern.
Dieser Schritt kann in Analogie zur Erweiterung von `\mathbb{N}` auf `\mathbb{Z}` gesehen werden.
- Dort wurde der Vorgang "hinzu" mit der Anzahl vereint zum Paar (Anzahl,hinzu).
"hinzu" hat eine Umkehrung, dem "weg".
Man hat (Anzahl,weg) erweitert.
- In `\mathbb{Q}` erweitert man (Anzahl,vervielfachen) mit *Kehrzahlen* (Anzahl,teilen).
.. admonition:: Hinweis
Man könnte `\mathbb{Q}`-Elemente analog zu `\mathbb{N}\times\{+,-\}`
schreiben:
- `\cdot 2` entspräche +2 und
- `\div 2` entspräche -2
Die binäre Verknüpfung `\cdot` ist wieder nur Hintereinander-Ausführen und kann weggelassen werden
`(\cdot 2)\cdot(\div 2) = \cdot 2\div 2 = 1`
Stattdessen wird
- `2\cdot 2^{-1} = 1` oder
- `2\cdot \frac{1}{2} = 1`
geschrieben, ersteres, weil man die Hochzahlen bei gleicher
Basis addieren kann und somit `2\cdot 2^{-1}=2^1\cdot 2^{-1}=2^{1-1}=2⁰=1` ist.
`(\mathbb{Q},\cdot)` ist eine **abelsche Gruppe** mit dem neutralen Element 1.
Weil jede Multiplikation in `(\mathbb{Q},\cdot)` ein Ergebnis in
`(\mathbb{Q},\cdot)` liefern soll (Abgeschlossenheit), nimmt man alle Brüche
`p/q=pq^{-1}` in `(\mathbb{Q},\cdot)` auf.
3/2 heißt, dass man zuert `\cdot 3` und dann `\div 2` (Kehrwert von 2) macht.
`\frac{3}{2}=3\cdot 2^{-1}=3\frac{1}{2}=\frac{1}{2}\cdot 3=2^{-1}\cdot 3`
`pq^{-1}` bedeutet, dass man p mal vervielfacht und dann q mal teilt.
Wenn man zusätzlich eine gleiche Anzahl r mal wiederholt und dann wieder r mal teilt,
ändert sich nichts
`pq^{-1}=rr^{-1}pq^{-1}=rp(rq)^{-1}=\frac{rp}{rq}`
Alle solche Zahlen sind äquivalent und die kanonische Darstellung ist die mit p und q teilerfremd.
.. admonition:: Hinweis
`\mathbb{Q}` wird formal über Äquivalenzklassen
solcher gleichwertiger Zahlenpaare eingeführt:
`(n_1,n_2)\sim(n_2,m_2)\equiv n_1m_2=n_2m_1`.
`\mathbb{R}` als Erweiterung mit den irrationalen Zahlen `\mathbb{I}`
---------------------------------------------------------------------
Anzahl (`\mathbb{N}`) mit Hinzugeben (+) und Wegnehmen (-) ist `\mathbb{Z}`.
`\mathbb{Z}` mit Wiederholen (`\cdot`) und Teilen (`\div`) führt zu `\mathbb{Q}`.
Wenn wir bei `+,-,\cdot,\div` bleiben, kommen wir gut mit `\mathbb{Q}` aus.
Soll die Operation des Potenzierens umkehrbar sein, muss man wieder erweitern,
da es sich herausstellt, dass es
z.B. kein `p/q` in `\mathbb{Q}` gibt, für das `p^2/q^2=2` ist.
(Beweis: p/q teilerfremd. Wenn `p^2` gerade, dann auch p. Also `p=2n`, womit
`p^2=4n^2=2q^2` und damit q gerade, was ein Widerspruch ist).
Es gibt aber **Algorithmen**, die rationale Zahlen erzeugen (**Zahlenfolge**),
deren Quadrate immer näher an 2 heranreichen.
Es gibt mehrere solcher Algorithmen, also mehrere Zahlenfolgen, deren **Grenzwert** 2 ist.
Alle zusammen werden als Äquivalenzklasse angesehen.
Die irrationalen Zahlen `\mathbb{I}` besteht aus Äquivalenzklasse von Zahlenfolgen.
Durch Angabe des Algorithmus, und mit `\sqrt{}` meint man so einen Algorithmus,
ist die irrationale Zahl bestimmt.
Man kann eine irrationale Zahl nicht als Dezimalzahl schreiben.
Man kann den Algorithmus auch nie vollständig ausführen, denn der endet nie.
Damit ist die irrationale Zahl wirklich dieser Algorithmus.
Die irrationalen Zahlen werden noch unterteilt in **algebraische** irrationale Zahlen,
eben solche die mit potenzieren zu tun haben,
und den **transzendente** irrationale Zahlen.
Letztere gibt es, weil es nicht nur Potenzieren gibt, sondern viele andere Abhängigkeiten,
etwa Sin, Cos, ...
Neue Operationen/Funktionen führen zu neuen Zahlen. Mit den irrationalen
Zahlen als **Äquivalenzklasse von Zahlenfolgen** hat man aber eine Definition,
die so allgemein ist, dass alle algebraischen und auch alle transzendenten
Zahlen und auch `\mathbb{Q}` mit eingeschlossen sind.
`\mathbb{R} = \mathbb{Q} \cup \mathbb{I}`
Eine andere sehr brauchbare und faszinierende Erweiterung sind die Komplexen Zahlen `\mathbb{C}` (:lnk:`r.di`).
.. admonition:: Hinweis
Da `\mathbb{R}` unendliche Zahlenfolgen sind, könnte man auch `\infty` und
`-\infty` als Zahlen mit aufnehmen, wären da nicht `\infty+1=\infty` und dergleichen.
Nichtsdestotrotz wird in der komplexen Analysis (Funktionentheorie)
`\mathbb{C}` mit `\infty` fruchtbar erweitert.
<file_sep>/chcko/r/cs/en.rst
.. raw:: html
%path = "maths/morphisms"
%kind = chindnum["texts"]
%level = 10
<!-- html -->
The concept of a function from set theory that maps uniquely elements of one set
(domain) to elements of another set (codomain),
is tweaked/generalized with the concept of morphism in category theory
in the sense that it puts the whole mapping in the center and combines
all objects whether domain or codomain into a set of objects O.
Domain and codomain in the set of objects are determined or part of a
morphism (`D_f` is domain of f, `C_f` is codomain of f, both do not need to be sets).
More morphisms in the set of morphisms M can share the
same pair (domain, codomain). (O,M,id) is a category. id is the identity morphism.
An important aspect of a morphism is that it maintains the structure in the objects
(order structure, algebraic structure, topological structure) and
depending on the structure the morphisms have special names (`f\circ g (D_g) = f(g(D_g))`):
- Monomorphism: `f\circ g=f\circ h \implies g=h` (left cancellation of `f`)
or `f` injective for set objects
(`proof <http://www.proofwiki.org/wiki/Injection_iff_Monomorphism_in_Category_of_Sets>`_)
- Epimorphism: `g\circ f=h \circ f \implies g=h` (right cancellation)
or `f` surjective for set objects
(`proof <http://www.proofwiki.org/wiki/Surjection_iff_Epimorphism_in_Category_of_Sets>`_)
- Isomorphism: `f` has `g` such that `f\circ g=id_{D_g}` and `g \circ f = id_{D_f}`
(left inverse = right inverse) or `f` bijektive for set objects
- Endomorphism: `X\rightarrow X`
- Automorphism: `X\rightarrow X` + isomorphism
- Homomorphism (Algebra): `f(a+b)=f(a)+f(b)` (different `+` possible)
- Homeomorphism (Topology): `f` and `f^{-1}` continuous
- Diffeomorphism (Differential geometry): bijektive, `f` and `f^{-1}` continuously differentiable
<file_sep>/chcko/r/a0/en.rst
.. raw:: html
%path = "maths/vectors"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
If we see the ingredients of a set of cake recipes as vector space,
then every cake `z` is a vector of the *ingredient vector space*.
We independently choose an amount from each ingredient (variable `z_i`).
We use the value 0, if the ingredient is not used at all.
If we only look at the cakes,
then a choice from them is a vector `k` in the *cake vector space*.
Every `k_j` is the number of cakes of kind `j`.
When going from the cakes to the ingredients,
one does a coordinate transformation.
To get the total amount of ingredient `z_i`
one needs to multiply the number of each cake `k_j`
with the amount of ingredient `i` for that cake.
This is a matrix multiplication.
`z = ZK \cdot k = \sum_j ZK_{ij}k_j`
In `ZK` every column is a recipe,
i.e. the ingredients (**components**) for cake `j`.
To obtain the price `p` in the *price vector space*,
i.e. what is the cost of all ingredients for a set of cakes,
we multiply again
`p = PZ \cdot z = PZ_{1i} z_i`
`PZ` is a matrix with one row.
The number of rows is the dimension of the target vector space.
<file_sep>/chcko/r/bs/__init__.py
# -*- coding: utf-8 -*-
from random import sample
from chcko.chcko.hlp import Struct, equal_0 as equal, norm_expr as chorm
import sympy
from sympy import S, sin, cos, E
from sympy.abc import x
def chiven():
f = sample([E ** x, sin(x), cos(x), 1 / x], 1)[0]
a, b = sample(range(2, 9), 2)
ee = f.subs(x, a * x + b)
g = Struct(ee=sympy.sstr(ee))
return g
def chalc(g):
res = sympy.sstr(S(sympy.integrate(S(g.ee), x)))
return [res]
<file_sep>/chcko/r/dj/en.rst
.. raw:: html
%path = "maths/trigonometry"
%kind = chindnum["texts"]
%level = 11
<!-- html -->
.. role:: asis(raw)
:format: html latex
.. contents::
In the following drawing we have a circle with radius 1.
The length of the arc on such a circle is a measure for the **angle**.
It is called **radian** and the unit is ``rad``. The right angle (90°) is `\pi/2`.
In general
.. math::
\frac{\pi}{180}\alpha[°] = \alpha[rad]
By changing this angle one also changes the lengths labeled with `\sin\alpha`,
`\cos\alpha` and `\tan\alpha`. These lengths are determined by the angle,
which is equivalent to say that the lengths are **functions of the angle**.
.. tikz:: \coordinate (O) at (0,0);
\coordinate (C) at ({2*cos(60)},{2*sin(60)});
\coordinate (P) at ({2*cos(60)},0);
\coordinate (D) at (2,{2*tan(60)});
\draw[black, very thin] (O) circle [radius=2];
\draw[red,thick] (2,0) arc [radius=2, start angle=0, end angle=60] node[midway,above]{\tiny $\alpha$};
\draw[blue,thick] (O) -- (C) node[midway,above]{\tiny $1$};
\draw[blue,thick] (P) -- (C) node[midway,right]{\tiny $\sin\alpha$};
\draw[blue,thick] (O) -- (P) node[midway,below]{\tiny $\cos\alpha$};
\draw[green,thick] (P) -- (2,0);
\draw[green,thick] (2,0) -- (D) node[midway,right]{\tiny $\tan\alpha$};
\draw[green,thick] (C) -- (D);
\draw[xshift=-1.1cm,yshift=-1cm] node[right,text width=2.2cm]
{ \tiny $\tan\alpha=\frac{\sin\alpha}{\cos\alpha}$\\$\sin^2\alpha+\cos^2\alpha=1$ };
.. admonition:: Similarity = affine mapping
One can scale this diagram to an actual rectangular triangle and completely
determine it by knowing one sharp angle and one side.
The graphs of the above **trigonometric functions** are as follows
.. tikz:: \begin{axis}
[
ymin=-1,ymax=1,
xmin=0,xmax=2*pi,
xtick=\empty,
ytick={-1,0,1},
extra x ticks={1.5708,3.1416,4.712,6.2832},
extra x tick labels={$\frac{\pi}{2}$, $\pi$, $\frac{3\pi}{2}$, $2\pi$},
every extra x tick/.style={
xticklabel style={anchor=north west},
grid=major,
major grid style={thick,dashed,red}
},
axis lines = center,
xlabel=$x$,ylabel=$y$,
enlargelimits=0.2,
domain=0:2*pi,
samples=100,
axis equal,
]
\addplot [green,thick] {tan(deg(x))} node [midway,left]{tan};
\addplot [red,thick] {sin(deg(x))} node [above]{sin};
\addplot [blue,thick] {cos(deg(x))} node [above]{cos};
\end{axis}
Some values of the functions can be found via calculations on the equal sided triangle
(`\pi/3`, `\pi/6`) or on a square with side length 1 (`\pi/4`).
`\cos` is symmetric: `\cos(-\alpha)=\cos\alpha`
`\sin` is antisymmetric: `\sin(-\alpha)=-\sin\alpha`.
All trigonometric functions have a period of `2\pi`: `sin|cos|tan(\alpha+2\pi)=sin|cos|tan(\alpha)`.
Because the sharp angles of the rectangular triangle add to `\pi/2`, we have
.. math::
\sin(\pi/2 - \alpha)=\cos\alpha\\
\cos(\pi/2 - \alpha)=\sin\alpha
<file_sep>/chcko/r/bk/__init__.py
# -*- coding: utf-8 -*-
from random import randrange
from chcko.chcko.hlp import Struct, norm_int, norm_rounded
import numpy as np
__all__ = ['chiven', 'chorm', 'chalc', 'T']
def chiven():
g = Struct()
r2 = lambda v: round(
randrange(int(v * 100 - v * 10), int(v * 100 + v * 10)) / 100.0, 2)
r15 = lambda: randrange(1, 5)
rr = lambda a, b: randrange(a, b)
g.CC = [r15(), r15(), r15()]
g.PP = [r2(1.8), r2(1), r2(0.3), r2(2.6)]
g.RR = [[r2(80 / 250.0), r2(220 / 1000.0), rr(7, 10),
r2(220 / 500.0)], [r2(150 / 250.0),
r2(250 / 1000.0), rr(1, 3), r2(300 / 500.0)],
[r2(70 / 250.0), r2(100 / 1000.0), rr(7, 9), 0]]
return g
T = lambda x: np.transpose(np.array(x))
def chalc(g):
npR = T(g.RR)
npM = np.array(g.PP)
npC = np.array(g.CC)
ingr = np.dot(npR, npC)
money = np.dot(npM, ingr)
return [int(ingr[2]), money]
def chorm(answers):
return [norm_int(answers[0]), norm_rounded(answers[1])]
| 594f8a0aaac0b45df6b66e62f8a4e6e4a1b11826 | [
"Python",
"Makefile",
"HTML",
"reStructuredText"
] | 124 | reStructuredText | chcko/chcko-r | a7b898efd3e35f783917d0b9d89ab96187371794 | 5e6916cbdb8a4fd2683ffaa24a94553ffc035038 | |
refs/heads/master | <file_sep>package com.txtled.gp_a209.model.net;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.Handler;
import android.webkit.WebResourceRequest;
import android.webkit.WebResourceResponse;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.FlowableEmitter;
import io.reactivex.FlowableOnSubscribe;
/**
* Created by Mr.Quan on 2018/11/12.
*/
public class NetHelperImpl implements NetHelper {
@Inject
public NetHelperImpl() {
}
}
<file_sep>package com.txtled.gp_a209.control.mvp;
import android.app.Activity;
import com.txtled.gp_a209.base.BasePresenter;
import com.txtled.gp_a209.base.BaseView;
import com.txtled.gp_a209.bean.IotCoreData;
/**
* Created by Mr.Quan on 2020/3/19.
*/
public interface ControlContract {
interface View extends BaseView{
void mqttSuccess(int id);
void mqttFail(int id);
void hidLoadingView();
void setData(IotCoreData iotCoreData);
void powerChanged(boolean power);
void volumeFail(int progress);
void lightFail();
void initFail();
void resetView(IotCoreData iotCoreData);
}
interface Presenter extends BasePresenter<View>{
void init(String endpoint, Activity activity);
void sendMqtt(int id);
void sendLight(int state);
void onClick(int id,boolean power);
void initData();
void sendVolume(int progress);
void destroy();
void enableView();
}
}
<file_sep>package com.txtled.gp_a209.login.mvp;
import android.app.Activity;
import android.net.wifi.WifiInfo;
import com.txtled.gp_a209.base.BasePresenter;
import com.txtled.gp_a209.base.BaseView;
/**
* Created by Mr.Quan on 2020/3/12.
*/
public interface LoginContract {
interface View extends BaseView {
void hidLoadingView();
void showLoadingView();
void checkLocation();
void setNoWifiView();
void hidSnackBar();
void setInfo(String ssid, WifiInfo info);
void showLoginFail();
void toMainView(boolean back);
void signOut();
void signOutFail();
}
interface Presenter extends BasePresenter<View>{
void init(Activity activity);
void viewClick(int id, int type);
void onResume(boolean isShowing);
void onDestroy(Activity activity);
}
}
<file_sep>package com.txtled.gp_a209.model.prefs;
/**
* Created by Mr.Quan on 2018/4/17.
*/
public interface PreferencesHelper {
boolean isFirstIn();
void setFirstIn(boolean first);
String getUserId();
void setUserId(String id);
String getDeviceAddress();
void setDeviceAddress(String address);
void setUid(String uid);
String getUid();
String getEmail();
void setEmail(String email);
}
<file_sep>package com.txtled.gp_a209.base;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.google.android.material.snackbar.Snackbar;
import com.txtled.gp_a209.R;
import com.txtled.gp_a209.application.MyApplication;
import com.txtled.gp_a209.widget.ArialRoundTextView;
import butterknife.ButterKnife;
public abstract class BaseActivity extends AppCompatActivity {
public static final String TAG = BaseActivity.class.getSimpleName();
public ArialRoundTextView tvTitle;
public boolean isBack = true;
public boolean changeColor = true;
private long mExitTime;
private MyApplication mApplication;
Toolbar toolbar;
public Snackbar snackbar;
private ImageView ivRight;
public abstract void init();
public abstract int getLayout();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
beforeContentView();
setContentView(getLayout());
ButterKnife.bind(this);
// if (!EventBus.getDefault().isRegistered(this)) {
// EventBus.getDefault().register(this);
// }
mApplication = MyApplication.getInstance();
addActivity();
onCreateView();
init();
}
protected abstract void beforeContentView();
public void onCreateView() {
}
public void initToolbar() {
toolbar = (Toolbar) findViewById(R.id.toolbar);
if (toolbar != null) {
tvTitle = (ArialRoundTextView) findViewById(R.id.tv_title);
ivRight = (ImageView) findViewById(R.id.iv_right);
setSupportActionBar(toolbar);
setTitle("");
toolbar.setOnMenuItemClickListener(onMenuItemClick);
toolbar.setNavigationOnClickListener(v -> {
if (isBack){
onBackPressed();
}else {
onLeftClick();
}
});
}
}
public void onLeftClick() {
}
public void setNavigationIcon(boolean isBack) {
this.isBack = isBack;
if (isBack) {
toolbar.setNavigationIcon(R.mipmap.logwifi_back_xhdpi);
} else {
toolbar.setNavigationIcon(R.mipmap.devicelist_left_xhdpi);
}
}
public void setRightImg(boolean isShow, @Nullable int drawable, View.OnClickListener listener) {
if (isShow) {
ivRight.setVisibility(View.VISIBLE);
ivRight.setImageResource(drawable);
ivRight.setOnClickListener(listener);
} else {
ivRight.setVisibility(View.GONE);
}
}
public Toolbar.OnMenuItemClickListener onMenuItemClick = new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
OnMenuItemClick(menuItem.getItemId());
return true;
}
};
public void OnMenuItemClick(int itemId) {
}
public boolean onExitActivity(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == KeyEvent.ACTION_DOWN) {
if ((System.currentTimeMillis() - mExitTime) > 2000) {
Toast.makeText(this, R.string.exit_program_hint,
Toast.LENGTH_SHORT).show();
mExitTime = System.currentTimeMillis();
} else {
removeAllActivity();
}
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
@Override
public void onBackPressed() {
if (getFragmentManager().getBackStackEntryCount() > 0) {
getFragmentManager().popBackStack();
} else {
super.onBackPressed();
}
}
@Override
public void onDestroy() {
super.onDestroy();
// if (EventBus.getDefault().isRegistered(this)) {
// EventBus.getDefault().unregister(this);
// }
}
public void addActivity() {
mApplication.addActivity(this);
}
public void removeAllActivity() {
mApplication.removeAllActivity();
}
public void showSnackBar(View view, int str) {
if (snackbar == null) {
snackbar = Snackbar.make(view, str, Snackbar.LENGTH_INDEFINITE);
snackbar.getView().setBackgroundColor(getResources().getColor(R.color.bg_snack));
}
snackbar.show();
}
public void showSnackBar(View view, int str, int btnStr, View.OnClickListener listener) {
if (snackbar == null) {
snackbar = Snackbar.make(view, str, Snackbar.LENGTH_INDEFINITE).setAction(btnStr,listener);
snackbar.getView().setBackgroundColor(getResources().getColor(R.color.bg_snack));
snackbar.setActionTextColor(getResources().getColor(R.color.white));
}
snackbar.show();
}
// public void showSnackBar(View view, int layoutId, int str) {
// if (snackbar == null) {
// snackbar = Snackbar.make(view, str, Snackbar.LENGTH_INDEFINITE);
// snackbar.getView().setBackgroundColor(getResources().getColor(R.color.bg_snack));
// View snackbarview = snackbar.getView();//获取snackbar的View(其实就是SnackbarLayout)
//
// Snackbar.SnackbarLayout snackbarLayout=(Snackbar.SnackbarLayout)snackbarview;//将获取的View转换成SnackbarLayout
//
// View add_view = LayoutInflater.from(snackbarview.getContext()).inflate(layoutId,null);//加载布局文件新建View
//
// LinearLayout.LayoutParams p = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,LinearLayout.LayoutParams.WRAP_CONTENT);//设置新建布局参数
//
// p.gravity= Gravity.CENTER_VERTICAL;//设置新建布局在Snackbar内垂直居中显示
//
// snackbarLayout.addView(add_view,1,p);//将新建布局添加进snackbarLayout相应位置
// }
// snackbar.show();
// }
// public void changeSnackBar(int str,int btnStr, boolean isShow, View.OnClickListener listener){
// snackbar.setText(str);
// View snackbarview = snackbar.getView();
// ProgressBar bar = (ProgressBar) snackbarview.findViewById(R.id.clp_loading);
// if (isShow){
// bar.setVisibility(View.VISIBLE);
// snackbar.setAction(null,null);
// }else {
// bar.setVisibility(View.GONE);
// snackbar.setAction(btnStr,listener);
// }
//
// }
public void hideSnackBar() {
if (snackbar != null && snackbar.isShown()) {
snackbar.dismiss();
snackbar = null;
}
}
// @Subscribe
// public void onEventMainThread(String str) {
//
// }
}
<file_sep>package com.txtled.gp_a209.add.listener;
/**
* Created by Mr.Quan on 2020/1/3.
*/
public interface OnUdpSendRequest {
void OnRequestListener(String result);
}
<file_sep>package com.txtled.gp_a209.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.util.AttributeSet;
import androidx.appcompat.widget.AppCompatEditText;
import com.txtled.gp_a209.R;
import static com.txtled.gp_a209.utils.Constants.BOLD;
import static com.txtled.gp_a209.utils.Constants.THIN;
/**
* Created by Mr.Quan on 2020/3/17.
*/
public class ArialRoundEditText extends AppCompatEditText {
public ArialRoundEditText(Context context) {
this(context,null,0);
}
public ArialRoundEditText(Context context, AttributeSet attrs) {
this(context, attrs,0);
}
public ArialRoundEditText(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
if (isInEditMode()) return;
String fontName = "ArialRoundedMTStd.otf";
if (attrs != null) {
TypedArray typedArray = context.getTheme().
obtainStyledAttributes(attrs, R.styleable.ArialRound, defStyleAttr, 0);
int fontType = typedArray.getInteger(R.styleable.ArialRound_text_type, 0);
switch (fontType) {
case THIN:
fontName = "ArialRoundedMTStd.otf";
break;
case BOLD:
fontName = "ArialRoundedMTStd-ExtraBold.otf";
break;
}
}
super.setTypeface(Typeface.createFromAsset(getContext().getAssets(),
"fonts/" + fontName), defStyleAttr);
}
}
<file_sep>apply plugin: 'com.android.application'
android {
compileSdkVersion 29
buildToolsVersion "29.0.1"
defaultConfig {
applicationId "com.txtled.gp_a209"
minSdkVersion 14
targetSdkVersion 26
versionCode 1
versionName "1.1.1"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
multiDexEnabled true
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility = 1.8
targetCompatibility = 1.8
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.lifecycle:lifecycle-extensions:2.0.0'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'androidx.multidex:multidex:2.0.1'
//黄油刀
implementation 'com.google.dagger:dagger:2.0.2'
annotationProcessor 'com.google.dagger:dagger-compiler:2.0.2'
implementation 'com.jakewharton:butterknife:8.2.1'
annotationProcessor 'com.jakewharton:butterknife-compiler:8.2.1'
//rx
implementation 'io.reactivex.rxjava2:rxjava:2.1.2'
implementation 'io.reactivex.rxjava2:rxandroid:2.0.1'
//event bus
//implementation 'org.greenrobot:eventbus:3.0.0'
//implementation files('libs/universal-image-loader-1.9.5.jar')
implementation 'org.glassfish:javax.annotation:10.0-b28'
//权限
implementation 'com.tbruyelle.rxpermissions2:rxpermissions:0.9.4@aar'
//sql
//implementation 'org.greenrobot:greendao:3.2.2'
//design
implementation 'com.google.android.material:material:1.1.0'
implementation files('libs/login-with-amazon-sdk.jar')
//DNS
implementation 'org.apache.commons:commons-lang3:3.4'
implementation 'com.yalantis:phoenix:1.2.3'
implementation 'commons-io:commons-io:2.4'
//ble
implementation 'com.inuker.bluetooth:library:1.4.0'
//aws iot
implementation 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.2.2'
implementation 'com.amazonaws:aws-android-sdk-iot:2.16.6'
// https://mvnrepository.com/artifact/com.amazonaws/aws-android-sdk-core
implementation 'com.amazonaws:aws-android-sdk-core:2.16.6'
implementation 'com.amazonaws:aws-iot-device-sdk-java:1.3.4'
//ddb
implementation "com.amazonaws:aws-android-sdk-ddb:2.16.6"
implementation 'com.amazonaws:aws-android-sdk-ddb-mapper:2.16.6'
//cognito
implementation 'com.amazonaws:aws-android-sdk-cognito:2.16.6'
implementation 'com.amazonaws:aws-android-sdk-cognitoidentityprovider:2.16.6'
implementation project(':esptouch')
}
<file_sep>package com.txtled.gp_a209.model.operate;
import android.app.Activity;
import com.tbruyelle.rxpermissions2.Permission;
import com.tbruyelle.rxpermissions2.RxPermissions;
import javax.inject.Inject;
import io.reactivex.functions.Consumer;
/**
* Created by Mr.Quan on 2019/3/6.
*/
public class OperateHelperImpl implements OperateHelper {
@Inject
public OperateHelperImpl() {
}
@Override
public void requestPermissions(Activity activity, String[] permissions,
final OnPermissionsListener permissionsListener) {
RxPermissions rxPermission = new RxPermissions(activity);
rxPermission.requestEach(permissions)
.subscribe(new Consumer<Permission>() {
@Override
public void accept(Permission permission) throws Exception {
if (permission.granted) {
// 用户已经同意该权限
permissionsListener.onSuccess(permission.name);
//Log.d(TAG, permission.name + " is granted.");
} else if (permission.shouldShowRequestPermissionRationale) {
// 用户拒绝了该权限,没有选中『不再询问』(Never ask again),那么下次再次启动时,还会提示请求权限的对话框
permissionsListener.onAskAgain();
//Log.d(TAG, permission.name + " is denied. More info should be provided.");
} else {
// 用户拒绝了该权限,并且选中『不再询问』
permissionsListener.onFailure();
//Log.d(TAG, permission.name + " is denied.");
}
}
});
}
}
<file_sep>package com.txtled.gp_a209.di.module;
import com.txtled.gp_a209.application.MyApplication;
import com.txtled.gp_a209.model.DataManagerModel;
import com.txtled.gp_a209.model.db.DBHelper;
import com.txtled.gp_a209.model.db.DBHelperImpl;
import com.txtled.gp_a209.model.net.NetHelper;
import com.txtled.gp_a209.model.net.NetHelperImpl;
import com.txtled.gp_a209.model.operate.OperateHelper;
import com.txtled.gp_a209.model.operate.OperateHelperImpl;
import com.txtled.gp_a209.model.prefs.PreferencesHelper;
import com.txtled.gp_a209.model.prefs.PreferencesHelperImpl;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
/**
* Created by KomoriWu
* on 2017/9/15.
*/
@Module
public class AppModule {
private MyApplication myApplication;
public AppModule(MyApplication myApplication) {
this.myApplication = myApplication;
}
@Provides
@Singleton
MyApplication provideMyApplication() {
return myApplication;
}
@Provides
@Singleton
DBHelper provideDBHelper(DBHelperImpl dbHelper) {
return dbHelper;
}
@Provides
@Singleton
PreferencesHelper providePreferencesHelper(PreferencesHelperImpl preferencesHelper) {
return preferencesHelper;
}
@Provides
@Singleton
NetHelper provideNetHelper(NetHelperImpl netHelper) {
return netHelper;
}
@Provides
@Singleton
OperateHelper provideOperateHelper(OperateHelperImpl operateHelper) {
return operateHelper;
}
@Provides
@Singleton
DataManagerModel provideDataManagerModel(DBHelperImpl dbHelper,
PreferencesHelperImpl preferencesHelper,
NetHelperImpl netHelper, OperateHelperImpl operateHelper) {
return new DataManagerModel(dbHelper, preferencesHelper,netHelper,operateHelper);
}
}
<file_sep>include ':app', ':esptouch'
rootProject.name='GP-A209'
<file_sep>package com.txtled.gp_a209;
import androidx.lifecycle.ViewModel;
public class FragmentLightViewModel extends ViewModel {
// TODO: Implement the ViewModel
}
<file_sep>package com.txtled.gp_a209.main;
import android.content.Intent;
import android.view.KeyEvent;
import android.view.View;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout;
import com.txtled.gp_a209.R;
import com.txtled.gp_a209.add.AddDeviceActivity;
import com.txtled.gp_a209.appinfo.AppInfoActivity;
import com.txtled.gp_a209.base.MvpBaseActivity;
import com.txtled.gp_a209.bean.DeviceInfo;
import com.txtled.gp_a209.bean.WWADeviceInfo;
import com.txtled.gp_a209.control.ControlActivity;
import com.txtled.gp_a209.information.InfoActivity;
import com.txtled.gp_a209.main.mvp.MainContract;
import com.txtled.gp_a209.main.mvp.MainPresenter;
import com.txtled.gp_a209.utils.AlertUtils;
import com.txtled.gp_a209.widget.ArialRoundButton;
import java.util.List;
import butterknife.BindView;
import static com.txtled.gp_a209.utils.Constants.APP;
import static com.txtled.gp_a209.utils.Constants.ENDPOINT;
import static com.txtled.gp_a209.utils.Constants.INFO;
import static com.txtled.gp_a209.utils.Constants.NAME;
import static com.txtled.gp_a209.utils.Constants.OK;
import static com.txtled.gp_a209.utils.Constants.RESULT;
import static com.txtled.gp_a209.utils.Constants.THING_DIR;
import static com.txtled.gp_a209.utils.Constants.TYPE;
import static com.txtled.gp_a209.utils.Constants.VERSION;
import static com.txtled.gp_a209.utils.Constants.WIFI;
public class MainActivity extends MvpBaseActivity<MainPresenter> implements MainContract.View,
SwipeRefreshLayout.OnRefreshListener, DeviceListAdapter.OnDeviceClickListener,
View.OnClickListener {
@BindView(R.id.rlv_device_list)
RecyclerView rlvDeviceList;
@BindView(R.id.srl_refresh)
SwipeRefreshLayout srlRefresh;
@BindView(R.id.abt_off_all)
ArialRoundButton abtOffAll;
private DeviceListAdapter listAdapter;
private String userId;
private String wifiName;
private AlertDialog dialog;
private String name;
@Override
protected void beforeContentView() {
}
@Override
public void init() {
initToolbar();
setNavigationIcon(false);
tvTitle.setText(R.string.device_list);
userId = presenter.init(this);
abtOffAll.setOnClickListener(this);
setRightImg(true, R.mipmap.devicelist_add_xhdpi, v ->
startActivityForResult(new Intent(MainActivity.this,
AddDeviceActivity.class).putExtra(TYPE,0), RESULT));
srlRefresh.setOnRefreshListener(this);
rlvDeviceList.setHasFixedSize(true);
rlvDeviceList.setLayoutManager(new LinearLayoutManager(this));
listAdapter = new DeviceListAdapter(this,this,userId);
rlvDeviceList.setAdapter(listAdapter);
srlRefresh.setRefreshing(true);
onRefresh();
}
@Override
public int getLayout() {
return R.layout.activity_main;
}
@Override
public void setInject() {
getActivityComponent().inject(this);
}
@Override
public void onRefresh() {
presenter.onRefresh();
}
@Override
public void getDeviceData(List<WWADeviceInfo> data) {
srlRefresh.setRefreshing(false);
if (!data.isEmpty())
listAdapter.setData(data);
}
@Override
public void hidSnackBar() {
hideSnackBar();
}
@Override
public void checkLocation() {
showSnackBar(abtOffAll, R.string.open_location);
}
@Override
public void setNoWifiView() {
showSnackBar(abtOffAll, R.string.no_wifi);
}
@Override
public void closeRefresh() {
srlRefresh.setRefreshing(false);
}
@Override
public void setData(List<WWADeviceInfo> refreshData) {
//srlRefresh.setRefreshing(false);
if (!refreshData.isEmpty()){
hidSnackBar();
listAdapter.setDiscoveryData(refreshData);
}
}
@Override
public void noDevice() {
//srlRefresh.setRefreshing(false);
hidSnackBar();
//showSnackBar(abtOffAll,R.string.no_device);
showSnackBar(abtOffAll,R.string.no_device, R.string.retry, v -> {
presenter.discovery();
hidSnackBar();
showSnackBar(abtOffAll,R.string.searching_device);
});
}
@Override
public void getWifiName(String ssid) {
wifiName = ssid;
}
@Override
public void deleteError() {
dialog.dismiss();
hidSnackBar();
showSnackBar(abtOffAll,R.string.already_deleted);
}
@Override
public void deleteSuccess() {
dialog.dismiss();
listAdapter.deleteItem(name);
}
@Override
public void mqttInitFail() {
dialog.dismiss();
hidSnackBar();
showSnackBar(abtOffAll,R.string.try_again);
}
@Override
public void success(boolean allOff) {
dialog.dismiss();
if (allOff){
//on
abtOffAll.setBackgroundColor(getResources().getColor(R.color.black));
abtOffAll.setTextColor(getResources().getColor(R.color.yellow));
abtOffAll.setText(R.string.on_all);
}else {
//off
abtOffAll.setBackgroundColor(getResources().getColor(R.color.gray));
abtOffAll.setTextColor(getResources().getColor(R.color.bg_snack));
abtOffAll.setText(R.string.off_all);
}
}
@Override
public void fail() {
dialog.dismiss();
hidSnackBar();
showSnackBar(abtOffAll,R.string.some_no_responding);
}
@Override
public void showLoading() {
dialog = AlertUtils.showLoadingDialog(this,R.layout.alert_progress);
dialog.show();
}
@Override
public void showSearching() {
runOnUiThread(() -> {
hidSnackBar();
showSnackBar(abtOffAll, R.string.searching_device);
});
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (snackbar != null && snackbar.isShown()){
hidSnackBar();
return false;
}else {
return onExitActivity(keyCode,event);
}
}
/**
* 列表点击事件
* @param endpoint
*/
@Override
public void onDeviceClick(String endpoint,String name) {
startActivity(new Intent(this, ControlActivity.class)
.putExtra(ENDPOINT,endpoint).putExtra(NAME,name));
}
@Override
public void onSettingClick(WWADeviceInfo data,String name) {
startActivityForResult(new Intent(this, InfoActivity.class)
.putExtra(NAME, data.getFriendlyNames())
.putExtra(ENDPOINT,data.getIp()).putExtra(VERSION,data.getVer())
.putExtra(WIFI,wifiName).putExtra(THING_DIR,data.getThing()),INFO);
}
/**
* 侧滑删除按钮
* @param data
* @param name
*/
@Override
public void onDeleteClick(WWADeviceInfo data,String name) {
this.name = name;
dialog = AlertUtils.showLoadingDialog(this,R.layout.alert_progress);
dialog.show();
presenter.deleteDevice(data,name);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
if (requestCode == RESULT || requestCode == INFO || requestCode == APP){
if (resultCode == OK){
srlRefresh.setRefreshing(true);
onRefresh();
}
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public void onLeftClick() {
startActivityForResult(new Intent(this, AppInfoActivity.class),APP);
}
@Override
public void onClick(View v) {
presenter.onClick(v);
}
@Override
public void onDestroy() {
presenter.onDestroy();
super.onDestroy();
}
}
<file_sep>package com.txtled.gp_a209.main.mvp;
import android.app.Activity;
import android.content.Context;
import android.content.IntentFilter;
import android.location.LocationManager;
import android.net.DhcpInfo;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.view.View;
import androidx.core.location.LocationManagerCompat;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.CognitoCachingCredentialsProvider;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.AttributeValueUpdate;
import com.amazonaws.services.dynamodbv2.model.GetItemRequest;
import com.amazonaws.services.dynamodbv2.model.GetItemResult;
import com.amazonaws.services.dynamodbv2.model.UpdateItemRequest;
import com.amazonaws.services.iot.AWSIotClient;
import com.amazonaws.services.iot.client.AWSIotDevice;
import com.amazonaws.services.iot.client.AWSIotException;
import com.amazonaws.services.iot.client.AWSIotQos;
import com.amazonaws.services.iot.model.CertificateStatus;
import com.amazonaws.services.iot.model.DeleteThingRequest;
import com.amazonaws.services.iot.model.DeleteThingResult;
import com.amazonaws.services.iot.model.DetachPolicyRequest;
import com.amazonaws.services.iot.model.DetachSecurityProfileRequest;
import com.amazonaws.services.iot.model.DetachThingPrincipalRequest;
import com.amazonaws.services.iot.model.UpdateCertificateRequest;
import com.txtled.gp_a209.R;
import com.txtled.gp_a209.add.listener.OnUdpSendRequest;
import com.txtled.gp_a209.add.udp.UDPBuild;
import com.txtled.gp_a209.application.MyApplication;
import com.txtled.gp_a209.base.CommonSubscriber;
import com.txtled.gp_a209.base.RxPresenter;
import com.txtled.gp_a209.bean.DeviceInfo;
import com.txtled.gp_a209.bean.WWADeviceInfo;
import com.txtled.gp_a209.broadcast.MyBroadcastReceiver;
import com.txtled.gp_a209.control.mqtt.MqttClient;
import com.txtled.gp_a209.control.mqtt.MyShadowMessage;
import com.txtled.gp_a209.control.mqtt.listener.OnConnectListener;
import com.txtled.gp_a209.control.mqtt.listener.OnMessageListener;
import com.txtled.gp_a209.model.DataManagerModel;
import com.txtled.gp_a209.utils.RxUtil;
import com.txtled.gp_a209.utils.Utils;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.FlowableOnSubscribe;
import io.reactivex.Observable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
import static com.txtled.gp_a209.base.BaseActivity.TAG;
import static com.txtled.gp_a209.utils.Constants.DATA_DEVICE;
import static com.txtled.gp_a209.utils.Constants.DB_NAME;
import static com.txtled.gp_a209.utils.Constants.DISCOVERY;
import static com.txtled.gp_a209.utils.Constants.FRIENDLY_NAME;
import static com.txtled.gp_a209.utils.Constants.MY_OIT_CE;
import static com.txtled.gp_a209.utils.Constants.PUBLISH;
import static com.txtled.gp_a209.utils.Constants.REST_API;
import static com.txtled.gp_a209.utils.Constants.SEND_THING_NAME;
import static com.txtled.gp_a209.utils.Constants.THING_DIR;
import static com.txtled.gp_a209.utils.Constants.USER_ID;
import static com.txtled.gp_a209.utils.ForUse.ACCESS_KEY;
import static com.txtled.gp_a209.utils.ForUse.SECRET_ACCESS_KEY;
/**
* Created by Mr.Quan on 2019/12/9.
*/
public class MainPresenter extends RxPresenter<MainContract.View> implements MainContract.Presenter {
private DataManagerModel mDataManagerModel;
private CognitoCachingCredentialsProvider provider;
private AmazonDynamoDB client;
private String userId;
private List<WWADeviceInfo> data;
private boolean isNoWifi;
private MyBroadcastReceiver mReceiver;
private WifiInfo wifiInfo;
private WifiManager myWifiManager;
private DhcpInfo dhcpInfo;
private Activity activity;
private String broadCast;
private List<WWADeviceInfo> refreshData;
private String strReceive;
private UDPBuild udpBuild;
private Disposable timeCount;
private int count;
private AWSIotClient awsIot;
private boolean allOff;
@Inject
public MainPresenter(DataManagerModel mDataManagerModel) {
this.mDataManagerModel = mDataManagerModel;
}
@Override
public String init(Activity activity) {
this.activity = activity;
provider = MyApplication.getCredentialsProvider();
client = new AmazonDynamoDBClient(provider);
userId = mDataManagerModel.getUserId();
createIotService();
IntentFilter filter = new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION);
if (isSDKAtLeastP()) {
filter.addAction(LocationManager.PROVIDERS_CHANGED_ACTION);
}
mReceiver = new MyBroadcastReceiver((context, info) -> onChanged(context, info));
activity.registerReceiver(mReceiver, filter);
return userId;
}
/**
* 创建service
*/
private void createIotService() {
try {
AWSCredentials credentials = new BasicAWSCredentials(ACCESS_KEY, SECRET_ACCESS_KEY);
ClientConfiguration clientConfig = new ClientConfiguration();
clientConfig.setProtocol(Protocol.HTTPS);
awsIot = new AWSIotClient(credentials, clientConfig);
} catch (Exception e) {
Utils.Logger(TAG, "IotServiceUtil.createIotService aws-iot创建连接异常", e.getMessage());
//LOGGER.error("IotServiceUtil.createIotService aws-iot创建连接异常",e);
awsIot = null;
}
}
private void onChanged(Context context, WifiInfo info) {
view.hidSnackBar();
boolean disconnected = info == null
|| info.getNetworkId() == -1
|| "<unknown ssid>".equals(info.getSSID());
if (disconnected) {
isNoWifi = true;
if (isSDKAtLeastP()) {
LocationManager locationManager = (LocationManager) context
.getSystemService(Context.LOCATION_SERVICE);
if (!(locationManager != null && LocationManagerCompat
.isLocationEnabled(locationManager))) {
//view.checkLocation();
} else {
view.setNoWifiView();
}
} else {
view.setNoWifiView();
}
} else {
isNoWifi = false;
wifiInfo = info;
String ssid = info.getSSID();
if (ssid.startsWith("\"") && ssid.endsWith("\"")) {
ssid = ssid.substring(1, ssid.length() - 1);
}
view.getWifiName(ssid);
}
}
public boolean isSDKAtLeastP() {
return Build.VERSION.SDK_INT >= 28;
}
/**
* 根据本机ip得出广播地址
*/
private void getBroadCastIp() {
myWifiManager = ((WifiManager) activity.getApplicationContext()
.getSystemService(Context.WIFI_SERVICE));
dhcpInfo = myWifiManager.getDhcpInfo();
String ip = Utils.getWifiIp(dhcpInfo.ipAddress);
String netMask = Utils.getWifiIp(dhcpInfo.netmask);
String[] ipTemp = ip.split("\\.");
String[] maskTemp = netMask.split("\\.");
broadCast = "";
for (int i = 0; i < maskTemp.length; i++) {
if (maskTemp[i].equals("255")) {
broadCast += (ipTemp[i] + ".");
} else {
broadCast += (255 - Integer.parseInt(maskTemp[i])) + (i == maskTemp.length - 1 ? "" : ".");
}
}
}
/**
* 列表刷新
*/
@Override
public void onRefresh() {
addSubscribe(Flowable.create((FlowableOnSubscribe<List<WWADeviceInfo>>) e -> {
//查数据
try {
discovery();
e.onNext(getDeviceData());
}catch (Exception e1){
refreshData = new ArrayList<>();
e.onNext(refreshData);
}
}, BackpressureStrategy.BUFFER).compose(RxUtil.rxSchedulerHelper())
.subscribeWith(new CommonSubscriber<List<WWADeviceInfo>>(view) {
@Override
public void onNext(List<WWADeviceInfo> data) {
//返回数据
view.getDeviceData(data);
}
}));
}
private List<WWADeviceInfo> getDeviceData() {
refreshData = new ArrayList<>();
HashMap<String, AttributeValue> key = new HashMap<>();
key.put(USER_ID, new AttributeValue().withS(userId));
//获取数据
GetItemResult itemResult = client.getItem(new GetItemRequest()
.withTableName(DB_NAME).withKey(key));
if (itemResult.getItem() != null) {
Map<String, AttributeValue> resultItem = itemResult.getItem();
AttributeValue cert_data = resultItem.get(THING_DIR);
//设备名称
String[] names = cert_data.getM().keySet().toArray(new String[cert_data.getM().size()]);
//endpointId
for (int i = 0; i < names.length; i++) {
WWADeviceInfo info = new WWADeviceInfo();
info.setFriendlyNames(userId+"_"+names[i]);
info.setThing(cert_data.getM().get(names[i]).getS());
refreshData.add(info);
}
}
return refreshData;
}
/**
* 删除设备
* @param data
* @param name
*/
@Override
public void deleteDevice(WWADeviceInfo data, String name) {
broadCast = data.getIp();
addSubscribe(Flowable.just(name).subscribeOn(Schedulers.io()).observeOn(Schedulers.io())
.subscribeWith(new CommonSubscriber<String>(view) {
@Override
public void onNext(String name) {
//查数据
try {
//delete DB
String[] names = data.getFriendlyNames().split(",");
if (names.length > 1){
//只把自己的friendlyName部分删掉
if (deleteDB(name)){
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < names.length; i++) {
if (!names[i].contains(userId)){
buffer.append(names[i] + (i == names.length - 1 ? "" : ","));
}
}
udpSend(String.format(FRIENDLY_NAME, buffer.toString()), result -> {
if (result.contains("\"friendlyname\":1")){
view.deleteSuccess();
}else {
udpBuild.sendMessage(String.format(FRIENDLY_NAME,
buffer.toString()),broadCast);
}
});
}else {
view.deleteError();
hidSnackBarDelay();
}
}else {
//删除事物 未完成
view.deleteSuccess();
// awsIot.detachThingPrincipal(new DetachThingPrincipalRequest()
// .withThingName(data.getThing())
// .withPrincipal("<KEY>"));
// awsIot.updateCertificate(new UpdateCertificateRequest()
// .withCertificateId("1<KEY>")
// .withNewStatus("INACTIVE"));
// awsIot.detachPolicy(new DetachPolicyRequest()
// .withPolicyName(MY_OIT_CE)
// .withTarget("arn:aws:iot:us-east-1:612535970613:cert/1f29341c16b40760b8d2d6c8783682bcc6ecec6d828746fe459321f85e79b243"));
// //awsIot.detachSecurityProfile(new DetachSecurityProfileRequest().)
// awsIot.detachThingPrincipal(new DetachThingPrincipalRequest().withThingName(data.getThing()));
// DeleteThingResult request = awsIot.deleteThing(new DeleteThingRequest()
// .withThingName(data.getThing()));
// //删除设备所有信息
// if (deleteDB(name)){
// //delete thing
//
// if (!request.toString().isEmpty()) {
// //写入设备
// writeToDevice();
// view.deleteSuccess();
// }else {
// view.deleteError();
// hidSnackBarDelay();
// }
// }else {
// view.deleteError();
// hidSnackBarDelay();
// }
}
} catch (Exception e1) {
view.deleteError();
hidSnackBarDelay();
}
}
}));
}
/**
* 按钮点击事件
* @param v
*/
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.abt_off_all:
view.showLoading();
if (!refreshData.isEmpty()){
for (WWADeviceInfo info : refreshData) {
MqttClient.getClient().initClient(info.getThing(), new OnConnectListener() {
@Override
public void onSuccess(AWSIotDevice device) {
AWSIotDevice iotDevice = device;
powerOff(iotDevice,info.getThing());
}
@Override
public void onFail() {
view.mqttInitFail();
hidSnackBarDelay();
}
});
}
}
break;
}
}
@Override
public void discovery() {
if (!isNoWifi) {
getBroadCastIp();
view.showSearching();
if (udpBuild != null){
System.out.println("discovery执行111111111");
udpBuild.stopUDPSocket();
}
udpSend(DISCOVERY, result -> {
});
}
}
@Override
public void onDestroy() {
activity.unregisterReceiver(mReceiver);
}
private void hidSnackBarDelay(){
addSubscribe(Flowable.timer(3, TimeUnit.SECONDS)
.compose(RxUtil.rxSchedulerHelper())
.subscribeWith(new CommonSubscriber<Long>(view) {
@Override
public void onNext(Long aLong) {
view.hidSnackBar();
}
}));
}
private void powerOff(AWSIotDevice iotDevice, String endpoint) {
try {
MyShadowMessage myMessage = new MyShadowMessage(String.format(PUBLISH,endpoint),
AWSIotQos.QOS0,String.format(DATA_DEVICE,allOff == true ? "\"on\"" : "\"off\""));
myMessage.setListener(new OnMessageListener() {
@Override
public void onSuccess() {
allOff = !allOff;
view.success(allOff);
}
@Override
public void onFailure() {
view.fail();
hidSnackBarDelay();
}
@Override
public void onTimeout() {
view.fail();
hidSnackBarDelay();
}
});
iotDevice.update(myMessage,5000);
} catch (AWSIotException e) {
e.printStackTrace();
}
}
private void writeToDevice() {
udpSend(String.format(FRIENDLY_NAME, ""), result -> {
if (result.contains("\"friendlyname\"")){
udpBuild.sendMessage(result.contains("1") ?
String.format(SEND_THING_NAME, REST_API, "") :
String.format(FRIENDLY_NAME,"") ,broadCast);
}else {
if (result.contains("\"endpoint\":1")){
view.deleteSuccess();
}else {
udpBuild.sendMessage(String.format(SEND_THING_NAME, REST_API, ""),broadCast);
}
}
});
}
/**
* 发送udp数据
* @param message 内容
* @param listener 监听
*/
private void udpSend(String message, OnUdpSendRequest listener) {
data = new ArrayList<>();
strReceive = "";
udpBuild = UDPBuild.getUdpBuild();
myWifiManager = ((WifiManager) activity.getApplicationContext()
.getSystemService(Context.WIFI_SERVICE));
dhcpInfo = myWifiManager.getDhcpInfo();
udpBuild.setIgnoreIp(Utils.getWifiIp(dhcpInfo.ipAddress));
udpBuild.setUdpReceiveCallback(result -> {
strReceive = new String(result.getData(), 0, result.getLength());
try {
JSONObject deviceInfo = new JSONObject(strReceive);
if (message.equals(DISCOVERY)) {
WWADeviceInfo info = new WWADeviceInfo(
deviceInfo.optString("ip"),
deviceInfo.optString("netmask"),
deviceInfo.optString("gw"),
deviceInfo.optString("host"),
deviceInfo.optString("port"),
deviceInfo.optString("cid"),
deviceInfo.optString("thing"),
deviceInfo.optString("friendlyname"),
deviceInfo.optString("ver")
);
data.add(info);
willStop();
} else {
listener.OnRequestListener(strReceive);
}
} catch (JSONException e) {
e.printStackTrace();
}
});
udpBuild.sendMessage(message, broadCast);
setTime(message, listener);
}
private void willStop() {
if (timeCount != null) {
timeCount.dispose();
}
timeCount = Observable.timer(5, TimeUnit.SECONDS).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread()).subscribe(aLong -> {
if (!data.isEmpty()) {
for (int i = 0; i < data.size(); i++) {
if (data.get(i).getFriendlyNames() != null) {
if (!data.get(i).getFriendlyNames()
.contains(userId)) {
data.remove(i);
}
} else {
data.remove(i);
}
}
view.setData(data);
udpBuild.stopUDPSocket();
System.out.println("discovery执行");
}else {
view.noDevice();
}
}
);
}
private void setTime(String message, OnUdpSendRequest listener) {
addSubscribe(Flowable.timer(3, TimeUnit.SECONDS)
.compose(RxUtil.rxSchedulerHelper())
.subscribeWith(new CommonSubscriber<Long>(view) {
@Override
public void onNext(Long aLong) {
if (data.isEmpty()) {
count += 1;
if (count < 3) {
udpSend(message, listener);
} else {
count = 0;
view.noDevice();
//hidSnackBarDelay();
}
//udpBuild.sendMessage(message,broadCast);
} else {
count = 0;
}
}
}));
}
/**
* 删除数据库数据
* @param name
* @return
*/
private boolean deleteDB(String name) {
HashMap<String, AttributeValue> key = new HashMap<>();
key.put(USER_ID, new AttributeValue().withS(userId));
//获取数据
GetItemResult itemResult = client.getItem(new GetItemRequest()
.withTableName(DB_NAME).withKey(key));
if (itemResult.getItem() != null) {
Map<String, AttributeValue> resultItem = itemResult.getItem();
AttributeValue cert_data = resultItem.get(THING_DIR);
//删除设备
cert_data.getM().remove(name);
try{
client.updateItem(new UpdateItemRequest().withTableName(DB_NAME)
.withKey(key).addAttributeUpdatesEntry(THING_DIR,
new AttributeValueUpdate()
.withValue(cert_data)));
return true;
}catch (Exception e){
view.deleteError();
hidSnackBarDelay();
return false;
}
}else {
return false;
}
}
}
<file_sep>package com.txtled.gp_a209.light.mvp;
import android.app.Activity;
import com.txtled.gp_a209.base.RxPresenter;
import com.txtled.gp_a209.model.DataManagerModel;
import javax.inject.Inject;
/**
* Created by Mr.Quan on 2020/5/25.
*/
public class LightPresenter extends RxPresenter<LightContract.View> implements LightContract.Presenter {
private DataManagerModel dataManagerModel;
@Inject
public LightPresenter(DataManagerModel dataManagerModel) {
this.dataManagerModel = dataManagerModel;
}
}
<file_sep>package com.txtled.gp_a209.information.mvp;
import android.app.Activity;
import android.view.View;
import com.txtled.gp_a209.base.BasePresenter;
import com.txtled.gp_a209.base.BaseView;
/**
* Created by Mr.Quan on 2020/3/23.
*/
public interface InfoContract {
interface View extends BaseView{
}
interface Presenter extends BasePresenter<View>{
void init(Activity activity);
String getName(String friendlyName);
}
}
<file_sep>package com.txtled.gp_a209.light;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.GradientDrawable;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.Nullable;
import com.txtled.gp_a209.R;
import com.txtled.gp_a209.base.MvpBaseFragment;
import com.txtled.gp_a209.light.mvp.LightContract;
import com.txtled.gp_a209.light.mvp.LightPresenter;
import com.txtled.gp_a209.widget.ArialRoundButton;
import com.txtled.gp_a209.widget.ArialRoundRadioButton;
import butterknife.BindView;
/**
* Created by Mr.Quan on 2020/5/25.
*/
public class LightFragment extends MvpBaseFragment<LightPresenter> implements LightContract.View {
@BindView(R.id.button_light_red)
ArialRoundRadioButton cbRed;
@BindView(R.id.button_light_orange)
ArialRoundRadioButton cbOrange;
@BindView(R.id.button_light_yellow)
ArialRoundRadioButton cbYellow;
@BindView(R.id.button_light_green)
ArialRoundRadioButton cbGreen;
@BindView(R.id.button_light_cy_blue)
ArialRoundRadioButton cbCyblue;
@BindView(R.id.button_light_blue)
ArialRoundRadioButton cbBlue;
@BindView(R.id.button_light_purple)
ArialRoundRadioButton cbPurple;
@BindView(R.id.button_light_white)
ArialRoundRadioButton cbWhite;
@BindView(R.id.button_light_cycle)
ArialRoundRadioButton cbCycle;
@BindView(R.id.light_button_switch)
ArialRoundButton cbSwitch;
//activity设置灯光回调
public interface LightStateCall{
public void lightCallVotic(int state);
}
private LightStateCall lightCall;
public void setLightCall(LightStateCall lightCall) {
this.lightCall = lightCall;
}
private int currentid;
private int lastid;
private int lightState;
private View view;
//参数用来判断:界面初始化完成之前activity有设置灯光状态
boolean setState;
//设置灯光失败
public void setLightFail(){
setLightBtn(lastid,false);
}
//设置当前的被选中按钮,提供给activity使用的
public void setCurrentid(int currentid){
System.out.println("当前的灯光id"+currentid);
if (currentid<0||currentid>9){
return;
}
this.lightState = currentid;
setState = true;
if (view==null){
return;
}
switch (currentid){
case 0:
setLightBtn(R.id.light_button_switch,false);
break;
case 1:
setLightBtn(R.id.button_light_red,false);
break;
case 2:
setLightBtn(R.id.button_light_orange,false);
break;
case 3:
setLightBtn(R.id.button_light_yellow,false);
break;
case 4:
setLightBtn(R.id.button_light_green,false);
break;
case 5:
setLightBtn(R.id.button_light_cy_blue,false);
break;
case 6:
setLightBtn(R.id.button_light_blue,false);
break;
case 7:
setLightBtn(R.id.button_light_purple,false);
break;
case 8:
setLightBtn(R.id.button_light_white,false);
break;
default:
setLightBtn(R.id.button_light_cycle,false);
break;
}
}
@Override
protected void initInject() {
getFragmentComponent().inject(this);
}
@Override
public View initView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
view = inflater.inflate(R.layout.fragment_light_fragment, container, false);
return view;
}
//设置按钮的背景色和透明度
public void setBtnColor(ArialRoundRadioButton btn, int color, int alpha) {
GradientDrawable myGrad = (GradientDrawable) btn.getBackground();
myGrad.setColor(color);
myGrad.setAlpha(alpha);
}
@Override
public void onDetach() {
super.onDetach();
}
/*
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
}
*/
public void onClicks(View v) {
setLightBtn(v.getId(),true);
}
//改变被选中的灯光按钮状态,先把原来的按钮状态清空,在设置选中的按钮
public void setLightBtn(int btnid ,boolean isClick){
System.out.println("调用setLightBtn"+btnid);
if (currentid != btnid) {
if (btnid!=R.id.light_button_switch){
lastid = currentid;
currentid = btnid;
System.out.println("currentid"+currentid);
if (lastid!=0){
changeBtnState(lastid, false);
}
changeBtnState(currentid, true);
}
else {
if (currentid!=0)changeBtnState(currentid, false);
currentid = btnid;
lastid = currentid;
lightState = 0;
cbSwitch.setText(R.string.turn_off);
}
if (lightCall!=null&&isClick){
btnIdToLightState(btnid);
lightCall.lightCallVotic(lightState);
}
}
}
//按钮的id转换成灯光的状态
public void btnIdToLightState(int btnid){
switch (btnid){
case R.id.light_button_switch:
lightState=0;
break;
case R.id.button_light_red:
lightState=1;
break;
case R.id.button_light_orange:
lightState=2;
break;
case R.id.button_light_yellow:
lightState=3;
break;
case R.id.button_light_green:
lightState=4;
break;
case R.id.button_light_cy_blue:
lightState=5;
break;
case R.id.button_light_blue:
lightState=6;
break;
case R.id.button_light_purple:
lightState=7;
break;
case R.id.button_light_white:
lightState=8;
break;
case R.id.button_light_cycle:
lightState=9;
break;
}
}
//改变按钮的状态
public void changeBtnState(int btnid, boolean select) {
if (btnid==R.id.light_button_switch)return;
System.out.println("调用changeBtnState"+btnid);
int colors = Color.BLACK;
switch (btnid){
case R.id.button_light_red:
System.out.println("红色red");
colors = Color.RED;
lightState = 1;
break;
case R.id.button_light_orange:
System.out.println("橙色red2");
lightState = 2;
colors = Color.rgb(255, 165, 0);
break;
case R.id.button_light_yellow:
System.out.println("黄色red3");
lightState = 3;
colors = Color.YELLOW;
break;
case R.id.button_light_green:
colors = Color.GREEN;
lightState = 4;
System.out.println("红色red4");
break;
case R.id.button_light_cy_blue:
colors = Color.rgb(75, 0, 130);
lightState = 5;
System.out.println("红色red5");
break;
case R.id.button_light_blue:
colors = Color.BLUE;
lightState = 6;
System.out.println("红色red6");
break;
case R.id.button_light_purple:
System.out.println("红色red7");
lightState = 7;
colors = Color.rgb(128, 0, 128);
break;
case R.id.button_light_white:
System.out.println("红色red8");
lightState = 8;
colors = Color.WHITE;
break;
case R.id.button_light_cycle:
System.out.println("红色red9");
lightState = 9;
colors = Color.GRAY;
break;
}
int alpha = select == true ? 255 : 50;
if (view==null)System.out.println("view是空的");
ArialRoundRadioButton btn = (ArialRoundRadioButton)view.findViewById(btnid);
GradientDrawable myGrad = (GradientDrawable) btn.getBackground();
/*
myGrad.setColor(colors);
*/
myGrad.setAlpha(alpha);
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void init() {
setBtnColor(cbRed, Color.RED, 50);
setBtnColor(cbOrange, Color.rgb(255, 165, 0), 50);
setBtnColor(cbYellow, Color.YELLOW, 50);
setBtnColor(cbGreen, Color.GREEN, 50);
setBtnColor(cbCyblue, Color.rgb(75, 0, 130), 50);
setBtnColor(cbBlue, Color.BLUE, 50);
setBtnColor(cbPurple, Color.rgb(128, 0, 128), 50);
setBtnColor(cbWhite, Color.WHITE, 50);
setBtnColor(cbCycle,Color.GRAY,50);
currentid = R.id.button_light_red;
lastid = R.id.button_light_red;
View.OnClickListener listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
System.out.println("点击了按钮1");
onClicks(v);
}
};
cbRed.setOnClickListener(listener);
cbOrange.setOnClickListener(listener);
cbYellow.setOnClickListener(listener);
cbGreen.setOnClickListener(listener);
cbCyblue.setOnClickListener(listener);
cbBlue.setOnClickListener(listener);
cbPurple.setOnClickListener(listener);
cbWhite.setOnClickListener(listener);
cbCycle.setOnClickListener(listener);
cbSwitch.setOnClickListener(listener);
if (setState==true){
setCurrentid(lightState);
}
/*
cbRed.setOnClickListener(this);
*/
// LinearLayoutManager layoutManager = new LinearLayoutManager(getContext()) {
// @Override
// public boolean canScrollVertically() {
// // 直接禁止垂直滑动
// return false;
// }
// };
}
}
<file_sep>package com.txtled.gp_a209.utils;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.AlphaAnimation;
import android.view.animation.AnimationSet;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.appcompat.app.AlertDialog;
import androidx.core.content.ContextCompat;
import com.google.android.material.snackbar.Snackbar;
import com.google.android.material.textfield.TextInputEditText;
import com.txtled.gp_a209.R;
import com.txtled.gp_a209.widget.ArialRoundButton;
import com.txtled.gp_a209.widget.ArialRoundTextView;
public class AlertUtils {
//private static OnCreateThingListener thingListener;
private static boolean canClose = false;
public static OnConfirmClickListener clickListener;
public static void showErrorMessage(Context context, int titleRes,
String errorCode, DialogInterface.OnClickListener listener) {
// if (!((Activity) context).isFinishing()) {
// AlertDialog.Builder builder = new AlertDialog.Builder(context)
// .setMessage(context.getResources().getIdentifier("ERROR_CODE_" + errorCode,
// "string", context.getPackageName()));
// if (titleRes != 0) {
// builder.setTitle(titleRes);
// }
// if (listener == null) {
// builder.setNegativeButton(R.string.ok, new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialogInterface, int i) {
// dialogInterface.dismiss();
// }
// });
// } else {
// builder.setNegativeButton(R.string.ok, listener);
// }
// Dialog dialog = builder.create();
// dialog.setCancelable(true);
// dialog.show();
// }
}
public static void showErrorMessage(Context context, String errorCode) {
showErrorMessage(context, 0, errorCode, null);
}
public static void showErrorMessage(Context context,
String errorCode, DialogInterface.OnClickListener listener) {
showErrorMessage(context, 0, errorCode, listener);
}
public static void showAlertDialog(Context context, String message,
DialogInterface.OnClickListener listener0,
DialogInterface.OnClickListener listener1) {
// if (!((Activity) context).isFinishing()) {
// AlertDialog dialog = new AlertDialog.Builder(context)
// .setMessage(message)
// .setNegativeButton(R.string.cancel, listener0)
// .setPositiveButton(R.string.confirm, listener1)
// .create();
// dialog.setCancelable(true);
// dialog.show();
// }
}
public interface OnConfirmClickListener{
void onConfirmClick(boolean b);
}
public static void setListener(OnConfirmClickListener listener){
clickListener = listener;
}
// public static OnCreateThingListener getThingListener(){
// return thingListener;
// }
public static int width;
public static void showHintDialog(Context context, int viewId,String title,
int msg, boolean isConfig){
if (!((Activity) context).isFinishing()){
LayoutInflater inflater = LayoutInflater.from(context);
View view = inflater.inflate(viewId,null);
ArialRoundButton abtOk = view.findViewById(R.id.abt_ok);
ArialRoundTextView atvHintTitle = view.findViewById(R.id.atv_hint_title);
ArialRoundTextView atvHintMsg = view.findViewById(R.id.atv_hint_msg);
atvHintTitle.setText(title);
atvHintMsg.setText(msg);
abtOk.setText(isConfig ? R.string.next : R.string.ok);
AlertDialog dialog = new AlertDialog.Builder(context)
.setView(view)
.create();
dialog.setCancelable(true);
dialog.show();
Window window = dialog.getWindow();
window.setWindowAnimations(R.style.dialogWindowAnimInToOut);
window.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.background_white));
abtOk.setOnClickListener(v -> {
dialog.dismiss();
clickListener.onConfirmClick(isConfig);
});
}
}
public static AlertDialog showProgressDialog(Context context,String wifiName,String pass){
if (!((Activity) context).isFinishing()){
LayoutInflater inflater = LayoutInflater.from(context);
View config = inflater.inflate(R.layout.alert_configure,null);
ArialRoundTextView atvWifi = config.findViewById(R.id.atv_wifi);
ArialRoundTextView atvPass = config.findViewById(R.id.atv_pass);
atvWifi.setText(context.getString(R.string.config_wifi,wifiName));
atvPass.setText(pass);
AlertDialog configDialog = new AlertDialog.Builder(context,R.style.TransparentDialog)
.setView(config)
.create();
configDialog.setCancelable(false);
configDialog.show();
Window cWindow = configDialog.getWindow();
ViewTreeObserver vto = atvWifi.getViewTreeObserver();
vto.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
atvWifi.getViewTreeObserver().removeOnPreDrawListener(this);
width = atvWifi.getMeasuredWidth();
return true;
}
});
ViewTreeObserver passVto = atvPass.getViewTreeObserver();
passVto.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
atvPass.getViewTreeObserver().removeOnPreDrawListener(this);
width = atvPass.getMeasuredWidth() > width ?
atvPass.getMeasuredWidth() : width;
cWindow.setLayout(width+60, RelativeLayout.LayoutParams.WRAP_CONTENT);
return true;
}
});
cWindow.setWindowAnimations(R.style.dialogWindowAnimInToOut);
cWindow.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.background_white));
return configDialog;
}else {
return null;
}
}
public static AlertDialog showProgressDialog(Context context){
if (!((Activity) context).isFinishing()){
LayoutInflater inflater = LayoutInflater.from(context);
View config = inflater.inflate(R.layout.alert_configure,null);
ArialRoundTextView atvWifi = config.findViewById(R.id.atv_wifi);
ArialRoundTextView atvPass = config.findViewById(R.id.atv_pass);
atvWifi.setText(R.string.uploading);
atvPass.setVisibility(View.INVISIBLE);
AlertDialog configDialog = new AlertDialog.Builder(context,R.style.TransparentDialog)
.setView(config)
.create();
configDialog.setCancelable(false);
configDialog.show();
Window cWindow = configDialog.getWindow();
ViewTreeObserver vto = atvWifi.getViewTreeObserver();
vto.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
atvWifi.getViewTreeObserver().removeOnPreDrawListener(this);
width = atvWifi.getMeasuredWidth();
cWindow.setLayout(width+60, RelativeLayout.LayoutParams.WRAP_CONTENT);
return true;
}
});
cWindow.setWindowAnimations(R.style.dialogWindowAnimInToOut);
cWindow.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.background_yellow));
return configDialog;
}else {
return null;
}
}
private static void setAlphaAnimation(View view){
AlphaAnimation animation = new AlphaAnimation(0f, 1f);
animation.setDuration(500);
view.setAnimation(animation);
animation.start();
}
public static AlertDialog showLoadingDialog(Context context, int viewId){
if (!((Activity) context).isFinishing()) {
LayoutInflater layoutInflater = LayoutInflater.from(context);
View view = layoutInflater.inflate(viewId, null);
AlertDialog dialog = new AlertDialog.Builder(context, R.style.TransparentDialog)
.setView(view)
.create();
dialog.setCancelable(false);
return dialog;
}else {
return null;
}
}
public static void showAlertDialog(Context context, int title, int message) {
if (!((Activity) context).isFinishing()) {
AlertDialog dialog = new AlertDialog.Builder(context)
.setTitle(title)
.setMessage(message)
.setNegativeButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
})
.create();
dialog.setCancelable(true);
dialog.show();
}
}
public static void showAlertDialog(Context context, String message) {
if (!((Activity) context).isFinishing()) {
AlertDialog dialog = new AlertDialog.Builder(context)
.setMessage(message)
.setPositiveButton(R.string.ok, (dialog1, which) -> dialog1.dismiss())
.create();
dialog.setCancelable(true);
dialog.show();
}
}
public static void showAlertDialog(Context context, int messageRes) {
if (!((Activity) context).isFinishing()) {
AlertDialog dialog = new AlertDialog.Builder(context)
.setMessage(messageRes)
.setPositiveButton(R.string.ok, (dialog1, which) -> dialog1.dismiss())
.create();
dialog.setCancelable(true);
if (!dialog.isShowing()) {
dialog.show();
Window window = dialog.getWindow();
window.setWindowAnimations(R.style.dialogWindowAnimInToOut);
window.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.background_white));
}
}
}
public static void showAlertDialog(Context context, int messageRes,
DialogInterface.OnClickListener listener) {
if (!((Activity) context).isFinishing()) {
AlertDialog dialog = new AlertDialog.Builder(context)
.setMessage(messageRes)
.setPositiveButton(R.string.ok, listener)
.create();
dialog.setCancelable(false);
dialog.show();
Window window = dialog.getWindow();
window.setWindowAnimations(R.style.dialogWindowAnimInToOut);
window.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.background_white));
}
}
public static void showProgressDialog(Context context, int id) {
ProgressDialog progressDialog = new ProgressDialog(context);
progressDialog.setMessage(context.getString(id));
progressDialog.show();
}
}
<file_sep>package com.txtled.gp_a209.di.module;
import android.app.Activity;
import android.content.Intent;
import com.txtled.gp_a209.di.scope.ActivityScope;
import dagger.Module;
import dagger.Provides;
/**
* Created by KomoriWu
* on 2017/9/15.
*/
@Module
public class ActivityModule {
private Activity activity;
public ActivityModule(Activity activity) {
this.activity = activity;
}
@Provides
@ActivityScope
Activity provideActivity() {
return activity;
}
@Provides
Intent provideIntent() {
return new Intent();
}
}
<file_sep>package com.txtled.gp_a209.appinfo.mvp;
import com.txtled.gp_a209.base.RxPresenter;
import com.txtled.gp_a209.model.DataManagerModel;
import javax.inject.Inject;
/**
* Created by Mr.Quan on 2020/3/24.
*/
public class AppInfoPresenter extends RxPresenter<AppInfoConteact.View> implements AppInfoConteact.Presenter {
private DataManagerModel dataManagerModel;
@Inject
public AppInfoPresenter(DataManagerModel dataManagerModel) {
this.dataManagerModel = dataManagerModel;
}
@Override
public String geEmail() {
return dataManagerModel.getEmail();
}
}
<file_sep>package com.txtled.gp_a209.utils;
import android.app.Activity;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.util.Log;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import androidx.annotation.ColorRes;
import androidx.annotation.DrawableRes;
import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat;
/**
* Created by Mr.Quan on 2019/12/9.
*/
public class Utils {
public static final boolean isLog = true;
static final char[] hexArray = "0123456789ABCDEF".toCharArray();
public static String bytesToHex(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for (int j = 0; j < bytes.length; j++) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
public static String asciiToString(String value) {
StringBuffer sbu = new StringBuffer();
String[] chars = value.split(",");
for (int i = 0; i < chars.length; i++) {
sbu.append((char) Integer.parseInt(chars[i]));
}
return sbu.toString();
}
public static String formatHex(int data) {
String s = ((Integer) data).toHexString(data);
return data < 16 ? "0" + s : s + "";
}
public static String getWifiSSID(Activity activity) {
String ssid="unknown id";
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.O||Build.VERSION.SDK_INT==Build.VERSION_CODES.P) {
WifiManager mWifiManager = (WifiManager) activity.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
assert mWifiManager != null;
WifiInfo info = mWifiManager.getConnectionInfo();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return info.getSSID();
} else {
return info.getSSID().replace("\"", "");
}
} else if (Build.VERSION.SDK_INT==Build.VERSION_CODES.O_MR1){
ConnectivityManager connManager = (ConnectivityManager) activity.getApplicationContext().getSystemService(Context.CONNECTIVITY_SERVICE);
assert connManager != null;
NetworkInfo networkInfo = connManager.getActiveNetworkInfo();
if (networkInfo.isConnected()) {
if (networkInfo.getExtraInfo()!=null){
return networkInfo.getExtraInfo().replace("\"","");
}
}
}
return ssid;
}
public static String getWifiIp(int i){
return (i & 0xFF) + "." +
((i >> 8) & 0xFF) + "." +
((i >> 16) & 0xFF) + "." +
(i >> 24 & 0xFF);
}
public static VectorDrawableCompat changeSVGColor(@DrawableRes int drawable, @ColorRes int color
, Context context) {
VectorDrawableCompat vectorDrawableCompat = VectorDrawableCompat.create(
context.getResources(), drawable, context.getTheme());
vectorDrawableCompat.setTint(context.getResources().getColor(color));
return vectorDrawableCompat;
}
public static void Logger(String TAG, String type, String value) {
if (!isLog)
return;
Log.i(TAG, type + ":------" + value);
}
/**
* 隐藏软键盘(可用于Activity,Fragment)
*/
public static void hideSoftKeyboard(Context context, View view) {
if (view == null) return;
InputMethodManager inputMethodManager = (InputMethodManager) context.getSystemService(Activity.INPUT_METHOD_SERVICE);
inputMethodManager.hideSoftInputFromWindow(view.getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
}
}
<file_sep>package com.txtled.gp_a209.control.mqtt.listener;
public interface OnMessageListener {
void onSuccess();
void onFailure();
void onTimeout();
}
<file_sep>package com.txtled.gp_a209.di.component;
import android.app.Activity;
import com.txtled.gp_a209.di.module.FragmentModule;
import com.txtled.gp_a209.di.scope.FragmentScope;
import com.txtled.gp_a209.light.LightFragment;
import dagger.Component;
/**
* Created by KomoriWu
* on 2017-09-01.
*/
@FragmentScope
@Component(dependencies = AppComponent.class, modules = FragmentModule.class)
public interface FragmentComponent {
Activity getActivity();
void inject(LightFragment lightFragment);
//void inject(IntroductionFragment introductionFragment);
}
<file_sep>package com.txtled.gp_a209.model.operate;
import android.app.Activity;
/**
* Created by Mr.Quan on 2019/3/6.
*/
public interface OperateHelper {
void requestPermissions(Activity activity, String[] permissions, OnPermissionsListener
permissionsListener);
interface OnPermissionsListener {
void onSuccess(String name);
void onFailure();
void onAskAgain();
}
}
<file_sep>package com.txtled.gp_a209.control.mqtt;
import com.amazonaws.services.iot.client.AWSIotMessage;
import com.amazonaws.services.iot.client.AWSIotQos;
import com.amazonaws.services.iot.client.AWSIotTopic;
import com.txtled.gp_a209.control.mqtt.listener.OnSuccessListener;
public class MyTopic extends AWSIotTopic {
private OnSuccessListener listener;
public MyTopic(String topic, AWSIotQos qos) {
super(topic, qos);
}
public void setListener(OnSuccessListener listener) {
this.listener = listener;
}
@Override
public void onSuccess() {
listener.onSuccess();
System.out.println("MyTopiconononSuccess");
super.onSuccess();
}
@Override
public void onMessage(AWSIotMessage message) {
System.out.println("TopicMessagePayload"+message.getStringPayload());
System.out.println("TopicMessageTopic"+message.getTopic());
listener.onMessage(message);
super.onMessage(message);
}
@Override
public void onFailure() {
listener.onFailure();
System.out.println("MyTopiconFailure");
super.onFailure();
}
@Override
public void onTimeout() {
listener.onTimeout();
System.out.println("MyTopicononTimeout");
super.onTimeout();
}
}
<file_sep>package com.txtled.gp_a209.utils;
import android.Manifest;
/**
* Created by Mr.Quan on 2020/3/11.
*/
public class Constants {
public static String[] permissions = new String[]{
Manifest.permission.ACCESS_FINE_LOCATION,
Manifest.permission.ACCESS_COARSE_LOCATION };
public static final int THIN = 0;
public static final int BOLD = 1;
public static final String TOPIC = "topic";
public static final String MESSAGE = "message";
public static final int WHAT = 0x11;
public static final int RESULT = 2000;
public static final int NAME_RESULT = 2001;
public static final int WIFI_RESULT = 2002;
public static final int INFO = 2003;
public static final int LOGIN = 2004;
public static final int APP = 2005;
public static final int OK = 200;
public static final String PUBLISH = "$aws/things/%s/shadow/update";
public static final String SUBSCRIBE = "$aws/things/%s/shadow/update/accepted";
public static final String REJECTED = "$aws/things/%s/shadow/update/rejected";
public static final String GET_DATA = "$aws/things/%s/shadow/get";
public static final String DATA_SOUND = "{\"state\":{\"desired\":{\"sound\":%d}}}";
public static final String DATA_VOLUME = "{\"state\":{\"desired\":{\"volume\":%d}}}";
public static final String DATA_LIGHT = "{\"state\":{\"desired\":{\"light\":%d}}}";
public static final String DATA_DEVICE = "{\"state\":{\"desired\":{\"device\":%s}}}";
public static final String DATA_DURATION = "{\"state\":{\"desired\":{\"duration\":%d}}}";
public static final String IDENTITY_POOL_ID = "us-east-1:fdd912d6-7ba0-4ce9-8a7f-9d3398b21540";
public static final String USER_ID = "UserId";
public static final String DB_NAME = "FOX";
public static final String THING_DIR = "ThingDir";
public static final int SOCKET_UDP_PORT = 9001;
public static final String MY_OIT_CE = "myiotce";
public static final String ENDPOINT = "endpoint";
public static final String NAME = "name";
public static final String VERSION = "version";
public static final String WIFI = "wifi";
public static final String TYPE = "type";
public static final String DISCOVERY = "{\"discovery\":1}";
public static final String SEND_THING_NAME = "{\"endpoint\":\"%s\",\"thing\":\"%s\"}";
public static final String SEND_CA_ONE = "{\"ca0\":\"%s\"}";
public static final String SEND_CA_TWO = "{\"ca1\":\"%s\"}";
public static final String SEND_CERT_ONE = "{\"cert0\":\"%s\"}";
public static final String SEND_CERT_TWO = "{\"cert1\":\"%s\"}";
public static final String SEND_KEY_ONE = "{\"key0\":\"%s\"}";
public static final String SEND_KEY_TWO = "{\"key1\":\"%s\"}";
public static final String REBOOT = "{\"reboot\":1}";
public static final String FRIENDLY_NAME = "{\"friendlyname\":\"%s\"}";
public static final String REST_API = "a311cdvk7hqtsk-ats.iot.us-east-1.amazonaws.com";
public static final String POLICY_JSON = "{\n" +
" \"Version\": \"2012-10-17\",\n" +
" \"Statement\": [\n" +
" {\n" +
" \"Effect\": \"Allow\",\n" +
" \"Action\": \"iot:*\",\n" +
" \"Resource\": \"*\"\n" +
" }\n" +
" ]\n" +
"}";
public static final String CA = "-----BEGIN CERTIFICATE-----\n" +
"MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF\n" +
"ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6\n" +
"b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL\n" +
"MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv\n" +
"b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj\n" +
"ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM\n" +
"9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw\n" +
"IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6\n" +
"VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L\n" +
"93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm\n" +
"jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC\n" +
"AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA\n" +
"A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI\n" +
"U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs\n" +
"N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv\n" +
"o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU\n" +
"<KEY>" +
"rq<KEY>\n" +
"-----END CERTIFICATE-----\n";
}
<file_sep>package com.txtled.gp_a209.control.mqtt;
import com.amazonaws.services.iot.client.AWSIotMessage;
import com.amazonaws.services.iot.client.AWSIotQos;
import com.txtled.gp_a209.control.mqtt.listener.OnMessageListener;
public class MyShadowMessage extends AWSIotMessage {
private OnMessageListener listener;
public void setListener(OnMessageListener listener){
this.listener = listener;
}
public MyShadowMessage(){
super(null,null);
}
public MyShadowMessage(String topic, AWSIotQos qos) {
super(topic, qos);
}
public MyShadowMessage(String topic, AWSIotQos qos, byte[] payload) {
super(topic, qos, payload);
}
public MyShadowMessage(String topic, AWSIotQos qos, String payload) {
super(topic, qos, payload);
}
@Override
public void onSuccess() {
listener.onSuccess();
super.onSuccess();
}
@Override
public void onFailure() {
listener.onFailure();
super.onFailure();
}
@Override
public void onTimeout() {
listener.onTimeout();
super.onTimeout();
}
}
<file_sep>package com.txtled.gp_a209.main.mvp;
import android.app.Activity;
import android.view.View;
import com.txtled.gp_a209.base.BasePresenter;
import com.txtled.gp_a209.base.BaseView;
import com.txtled.gp_a209.bean.DeviceInfo;
import com.txtled.gp_a209.bean.WWADeviceInfo;
import java.util.List;
/**
* Created by Mr.Quan on 2019/12/9.
*/
public interface MainContract {
interface View extends BaseView {
void getDeviceData(List<WWADeviceInfo> data);
void hidSnackBar();
void checkLocation();
void setNoWifiView();
void closeRefresh();
void setData(List<WWADeviceInfo> refreshData);
void noDevice();
void getWifiName(String ssid);
void deleteError();
void deleteSuccess();
void mqttInitFail();
void success(boolean allOff);
void fail();
void showLoading();
void showSearching();
}
interface Presenter extends BasePresenter<View> {
String init(Activity activity);
void onRefresh();
void deleteDevice(WWADeviceInfo data, String name);
void onClick(android.view.View v);
void discovery();
void onDestroy();
}
}
| 523dff559105bc83263d8e1112c0f62857cbc35c | [
"Java",
"Gradle"
] | 28 | Java | quanyan963/GP-A209 | c2f802be5cf0feaa1d473ced533caed00f492fef | e4310845fd0870925f08abfc2591210b538b99fb | |
refs/heads/main | <repo_name>creatorgaming/music-library<file_sep>/MUSIC.CPP
/***************************************************************************
** **
** **
** $ PROJECT BY <NAME> $ **
** **
** **
** **
***************************************************************************/
//HEADER FILES//
#include <graphics.h>
#include <fstream.h>
#include <process.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <conio.h>
#include <stdio.h>
#include <math.h>
#include <dos.h>
#include <dir.h>
//GLOBAL VARIABLES//
int crac = 0, sign = 0, crs = 0;
char admin_pass[20];
//CLASSES//
class managemu
{
public:
char name[10];
char account[20];
} mulist[500];
class manage
{
public:
char name[20];
char pass[20];
} list[10];
class music
{
public:
char script[500];
} music;
class account
{
public:
char name[20];
char pass[20];
} account;
//FUNCTIONS//
void welcome();
void admin();
void about();
//HOME FUNCTIONS//
/**/ /*************************************************************/ /**/
/**/ void home(); /**/
/**/ void home_arc(); /**/
/**/ void home_bubble(); /**/
/**/ void home_box(); /**/
/**/ void home_column(); /**/
/**/ void home_text(); /**/
/**/ void home_opt(); /**/
/**/ /*************************************************************/ /**/
//END HOME FUNCTIONS//
//ACCOUNTS AND LIBRARY//
/**/ /*************************************************************/ /**/
/**/ void account_sign_up(); /**/
/**/ void account_sign_in(); /**/
/**/ void account_sign_inout(); /**/
/**/ void account_edit(); /**/
/**/ void account_del(); /**/
/**/ void account_pch(); /**/
/**/ void reset_data();
void uninstall(); /**/
/**/ void sign_in_msg(); /**/
/**/ void library(); /**/
/**/ void library_add_new(); /**/
/**/ void library_disco(); /**/
/**/ void library_sound_play(char); /**/
/**/ void library_save(); /**/
/**/ void library_delete(); /**/
/**/ void library_play(); /**/
/**/ /*************************************************************/ /**/
//END ACCOUNTS AND LIBRARY//
//FUNCTIONS END//
void main()
{
//CHANGE DIRECTORY//
mkdir("\\music");
chdir("\\music");
ifstream fin, fincr;
ofstream fout;
//GRAPHIC INITIALISATION//
int gd = DETECT, gm, x, y;
initgraph(&gd, &gm, "C:\\TURBOC3\\BGI"); //Initializing Graphics Mode
cleardevice();
welcome();
fin.open("admin.dat", ios::in | ios::binary);
if (!fin)
{
fin.close();
admin();
}
else
{
fin.read((char *)&admin_pass, sizeof(admin_pass));
if (!strcmp(admin_pass, ""))
{
fin.close();
admin();
}
else
fin.close();
}
fin.open("manage.lis", ios::in | ios::binary);
if (!fin)
{
fin.close();
fout.open("manage.lis", ios::out | ios::binary);
fout.write((char *)&list, sizeof(list));
fout.close();
}
else
fin.close();
fin.open("managemu.lis", ios::in | ios::binary);
if (!fin)
{
fin.close();
fout.open("managemu.lis", ios::out | ios::binary);
fout.write((char *)&mulist, sizeof(mulist));
fout.close();
}
else
fin.close();
fin.open("tac.data", ios::in | ios::binary);
if (!fincr)
{
fin.close();
fout.open("tac.data", ios::out | ios::binary);
crac = 0;
fout.write((char *)&crac, sizeof(crac));
fout.close();
}
else
fin.close();
fincr.open("tmu.dat", ios::in | ios::binary);
if (!fincr)
{
fincr.close();
ofstream foutcr("tmu.dat", ios::out | ios::binary);
crs = 0;
foutcr.write((char *)&crs, sizeof(crs));
foutcr.close();
}
else
fincr.close();
home();
}
void welcome()
{
setbkcolor(12);
setcolor(12);
//HEADER//
for (int i = 5; i <= 600; i += 130)
{
circle(7 + i, 20, 25);
setfillstyle(5, YELLOW);
floodfill(7 + i, 20, 12);
circle(30 + i, 75, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75, 12);
circle(70 + i, 15, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15, 12);
circle(90 + i, 75, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75, 12);
}
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, YELLOW);
floodfill(7 + i, 20 + j, 12);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, 12);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, 12);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, 12);
}
//WELCOME TEXT//
settextstyle(6, 0, 25);
int j = 130;
for (i = 70; i < 85; i++, j++)
{
if (i < 77)
setcolor(BLUE);
else
setcolor(LIGHTCYAN);
outtextxy(i, j, "WELCOME");
delay(100);
}
getch();
}
void admin()
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
circle(320, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(320, 0, LIGHTRED);
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(45, 5, " SET ADMIN PASS");
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(225, 82, "Pass:");
gotoxy(45, 6);
ifstream fincr, fin;
ofstream foutcr, fout;
foutcr.open("admin.dat", ios::out | ios::binary);
foutcr.close();
gets(admin_pass);
if (strlen(admin_pass) < 5 || strlen(admin_pass) < 5 || strlen(admin_pass) > 20 || strlen(admin_pass) > 20)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, "PASS MUST HAVE 5-20 CHARS");
delay(1000);
admin();
}
fout.open("admin.dat", ios::out | ios::binary);
fout.write((char *)&admin_pass, sizeof(admin_pass));
fout.close();
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " SETTING UP ADMIN ACCOUNT");
delay(1000);
}
void about()
{
char opt;
cleardevice();
setbkcolor(WHITE);
setcolor(LIGHTRED);
circle(320, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(320, 0, LIGHTRED);
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(40, 5, "THE MUSIC LIBRARY");
outtextxy(170, 40, " PROJECT");
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(85, 112, " BY <NAME> ");
setcolor(WHITE);
settextstyle(0, 0, 1);
outtextxy(120, 150, "Visit - https://www.linkedin.com/in/divyanshutyagi/");
setcolor(CYAN);
rectangle(0, 450, 640, 480);
setfillstyle(1, CYAN);
floodfill(0 + 50, 450 + 5, CYAN);
setcolor(WHITE);
settextstyle(0,0,2);
outtextxy(0, 455, "[H]OME E[X]IT");
opt = getche();
tolower(opt);
switch (opt)
{
case 'h':
home();
break;
case 'x':
exit(0);
break;
default:
about();
break;
}
}
void home()
{
cleardevice();
home_arc();
home_text();
home_text();
home_opt();
}
void home_arc()
{
setbkcolor(LIGHTRED);
//MENU ARC//
cleardevice();
setcolor(CYAN);
circle(35, 240, 200);
setfillstyle(1, YELLOW);
floodfill(35, 240, CYAN);
home_box();
home_bubble();
home_box(); //BOX CORRECTION//
home_column(); //COLUMN CORRECTION//
//END MENU ARC//
}
void home_bubble()
{
int i, j, k = 0;
for (i = 0; i <= 6 * 75; i += 75)
{
for (j = 0; j <= 1200; j += 150)
{
setcolor(YELLOW);
circle(-450 + j + k, 25 + i, 25);
setfillstyle(4, YELLOW);
floodfill(-450 + j + k, 25 + i, YELLOW);
}
k += 75;
}
}
void home_box()
{
for (int i = 0, j = 0; i < 5; i++, j++)
{
if (i < 4)
{
setcolor(CYAN);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, CYAN);
}
else
{
setcolor(12);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, LIGHTRED);
}
}
home_column();
}
void home_column()
{
if (sign == 0)
{
setcolor(CYAN);
//NAME//
rectangle(425, 175, 585, 195);
setfillstyle(1, WHITE);
floodfill(425 + 50, 175 + 5, CYAN);
//PASSWORD//
rectangle(425, 220, 585, 240);
setfillstyle(1, WHITE);
floodfill(425 + 50, 220 + 5, CYAN);
//SIGN UP//
rectangle(452, 270, 560, 295);
setfillstyle(1, CYAN);
floodfill(452 + 50, 270 + 5, CYAN);
}
else
{
//SIGN UP//
setcolor(CYAN);
rectangle(452, 270, 560, 295);
setfillstyle(1, CYAN);
floodfill(452 + 50, 270 + 5, CYAN);
}
//EXIT CIRCLE//
circle(630, 400, 40);
setfillstyle(1, CYAN);
floodfill(630, 400, CYAN);
}
void home_text()
{
//TITLE//
setcolor(CYAN);
settextstyle(0, 0, 4);
outtextxy(45, 5, "THE MUSIC LIBRARY");
//MENU TEXT//
setcolor(CYAN);
settextstyle(5, 0, 3);
outtextxy(5, 120, "[1] Library");
outtextxy(5, 180, "[2] About Us");
outtextxy(5, 240, "[3] Edit Account");
//END MENU TEXT//
if (sign == 0)
{
//SIGN IN COLUMN//
setcolor(LIGHTRED);
settextstyle(10, 0, 1);
outtextxy(445, 120, "SIGN [I]N");
setcolor(CYAN);
settextstyle(6, 0, 1);
outtextxy(428, 170, "NAME:");
outtextxy(428, 215, "PASS:");
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(457, 270, "SIGN [U]P");
//END SIGN IN COLUMN//
}
else
{
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(445, 130, "WELCOME");
setcolor(LIGHTRED);
settextstyle(0, 0, 2);
outtextxy(425, 150, account.name);
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(457, 270, "SIGN [O]UT");
}
//EXIT//
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(595, 388, "E[X]IT");
}
void home_opt()
{
char opt;
opt = getche();
if (!isalpha(opt))
tolower(opt);
switch (opt)
{
case 'u':
if (sign == 0)
account_sign_up();
break;
case 'i':
if (sign == 0)
account_sign_in();
break;
case 'o':
if (sign == 1)
{
account_sign_inout();
home();
}
break;
case '1':
if (sign == 1)
library();
else
sign_in_msg();
break;
case '2':
about();
break;
case '3':
account_edit();
break;
case 'x':
exit(0);
break;
default:
home();
break;
}
}
//END HOME FUNCTION DEFINATION//
void account_sign_up()
{
int i, j;
cleardevice();
ifstream fin, fincr;
ofstream foutcr;
fincr.open("tac.data", ios::in | ios::binary);
fincr.read((char *)&crac, sizeof(crac));
if (crac == 9)
{
fincr.close();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 310);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " No More Accounts");
outtextxy(0, 275, " Limit Reached");
delay(1000);
home();
}
setbkcolor(YELLOW);
//INFO TEXT//
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(40, 140, "Add an account and get");
outtextxy(40, 160, "your own music library");
setcolor(YELLOW);
//HEADER//
for (i = 5; i <= 600; i += 130)
{
circle(7 + i, 20, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20, YELLOW);
circle(30 + i, 75, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75, YELLOW);
circle(70 + i, 15, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15, YELLOW);
circle(90 + i, 75, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75, YELLOW);
}
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20 + j, YELLOW);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, YELLOW);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, YELLOW);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, YELLOW);
}
//BOX//
for (i = 0, j = 0; i < 5; i++, j++)
{
if (i < 4)
{
setcolor(CYAN);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, CYAN);
}
else
{
setcolor(12);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, LIGHTRED);
}
}
//COLUMN//
setcolor(CYAN);
//NAME//
rectangle(425, 175, 585, 195);
setfillstyle(1, WHITE);
floodfill(425 + 50, 175 + 5, CYAN);
//PASSWORD//
rectangle(425, 220, 585, 240);
setfillstyle(1, WHITE);
floodfill(425 + 50, 220 + 5, CYAN);
//SIGN UP//
rectangle(452, 270, 560, 295);
setfillstyle(1, CYAN);
floodfill(452 + 50, 270 + 5, CYAN);
//SIGN UP COLUMN TEXT//
setcolor(LIGHTRED);
settextstyle(10, 0, 1);
outtextxy(449, 120, "SIGN UP");
setcolor(CYAN);
settextstyle(6, 0, 1);
outtextxy(428, 170, "NAME:");
outtextxy(428, 215, "PASS:");
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(460, 270, " SUBMIT");
//END SIGN UP COLUMN TEXT//
gotoxy(61, 12);
gets(account.name);
gotoxy(61, 15);
gets(account.pass);
if (strlen(account.name) < 5 || strlen(account.pass) < 5)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NAME/PASS MUST HAVE 5-14 CHARACTERS");
delay(1000);
home();
}
int found = 0;
fin.open("manage.lis", ios::in | ios::binary);
for (i = 0; i < 10; i++)
{
fin.read((char *)&list[i], sizeof(list[i]));
if (!strcmp(list[i].name, account.name))
{
found = 1;
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NAME ALREADY TAKEN");
delay(1000);
home();
}
}
fin.close();
if (found == 0)
fincr.open("tac.data", ios::in | ios::binary);
fincr.read((char *)&crac, sizeof(crac));
fincr.close();
strcpy(list[crac].name, account.name);
strcpy(list[crac].pass, account.pass);
ofstream fout;
fout.open("manage.lis", ios::out | ios::binary);
fout.write((char *)&list, sizeof(list));
fout.close();
crac++;
foutcr.open("tac.data", ios::out | ios::binary);
foutcr.write((char *)&crac, sizeof(crac));
foutcr.close();
fout.open(account.name, ios::out | ios::binary);
fout.write((char *)&account, sizeof(account));
fout.close();
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " Your account is added");
delay(1000);
home();
};
void account_sign_in()
{
int found = 0, i;
gotoxy(61, 12);
gets(account.name);
gotoxy(61, 15);
gets(account.pass);
if (strlen(account.name) < 5 || strlen(account.pass) < 5)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NAME/PASS MUST HAVE 5-14 CHARACTERS");
delay(1000);
home();
}
ifstream fin;
fin.open("manage.lis", ios::in | ios::binary);
for (i = 0; i < 100; i++)
{
fin.read((char *)&list[i], sizeof(list[i]));
if (!strcmp(list[i].name, account.name) && !strcmp(list[i].pass, account.pass))
{
found = 1;
ifstream finac(account.name, ios::in | ios::binary);
finac.read((char *)&account, sizeof(account));
finac.close();
account_sign_inout();
sign = 1;
break;
}
}
fin.close();
if (found == 1)
{
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(170, 235, " SIGNING IN");
delay(1000);
home();
}
else
{
account_sign_inout();
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "NAME/PASS NOT MATCHED");
delay(1000);
home();
}
}
void account_sign_inout()
{
cleardevice();
setbkcolor(LIGHTRED);
//SIGN OUT//
if (sign == 1)
{
sign = 0;
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(170, 235, " SIGNING OUT");
delay(1000);
home();
}
//SIGN IN//
else
{
cleardevice();
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(160, 235, " CHECKING");
delay(1000);
}
}
void sign_in_msg()
{
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(160, 235, "SIGN IN FIRST!");
delay(1000);
home();
}
void library()
{
int i, j;
char opt;
cleardevice();
setbkcolor(WHITE);
setcolor(LIGHTRED);
rectangle(0, 0, 640, 50);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 0 + 5, LIGHTRED);
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(60, 5, "Your Music List");
setcolor(CYAN);
rectangle(0, 450, 640, 480);
setfillstyle(1, CYAN);
floodfill(0 + 50, 450 + 5, CYAN);
setcolor(WHITE);
settextstyle(0, 0, 2);
outtextxy(80, 455, "[H]OME E[X]IT");
settextstyle(0, 0, 1);
for (i = 0; i < 50; i++)
{
strcpy(mulist[i].name, "");
}
ifstream fin("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(mulist));
int k = 0;
for (i = 0; i < 375; i += 75)
{
for (j = 0; j < 400; j += 40)
{
if (k >= 50)
{
break;
}
setcolor(GREEN);
if (!strcmp(account.name, mulist[k].account))
{
outtextxy(30 + i, 70 + j, mulist[k].name);
}
else
j -= 40;
k++;
}
if (k >= 50)
{
break;
}
}
fin.close();
//BOX//
for (i = 0, j = 0; i < 5; i++, j++)
{
if (i < 4)
{
setcolor(YELLOW);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, LIGHTRED);
floodfill(410 + 50 + i, 120 + 5 + j, YELLOW);
}
else
{
setcolor(CYAN);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, LIGHTRED);
floodfill(410 + 50 + i, 120 + 5 + j, CYAN);
}
}
//COLUMN//
setcolor(CYAN);
//PASSWORD//
rectangle(425, 130, 585, 150);
setfillstyle(1, WHITE);
floodfill(425 + 50, 130 + 5, CYAN);
//SEARCH BOX//
rectangle(452, 170, 560, 195);
rectangle(452, 220, 560, 245);
rectangle(452, 270, 560, 295);
setfillstyle(1, CYAN);
floodfill(452 + 50, 170 + 5, CYAN);
floodfill(452 + 50, 220 + 5, CYAN);
floodfill(452 + 50, 270 + 5, CYAN);
//SIGN UP COLUMN TEXT//
setcolor(CYAN);
settextstyle(6, 0, 1);
outtextxy(428, 125, "NAME:");
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(460, 170, "[A]DD NEW");
outtextxy(460, 220, " [D]ELETE");
outtextxy(460, 270, " [P]LAY");
//END SIGN UP COLUMN TEXT//
opt = getche();
tolower(opt);
switch (opt)
{
case 'a':
library_add_new();
break;
case 'd':
library_delete();
break;
case 'p':
library_play();
break;
case 'h':
home();
break;
case 'x':
exit(0);
default:
library();
break;
}
}
void library_add_new()
{
int i;
char n;
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, YELLOW);
floodfill(7 + i, 20 + j, YELLOW);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, YELLOW);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, YELLOW);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, YELLOW);
}
textcolor(YELLOW);
for (i = 0; i < 500; i++)
{
music.script[i] = ' ';
}
for (i = 0; i < 500; i++)
{
n = getche();
if (((int)n >= 65 && (int)n <= 90) || ((int)n >= 97 && (int)n <= 122) || n == '0' || n == '1')
{
if (n == '0')
break;
music.script[i] = n;
library_sound_play(music.script[i]);
library_disco();
//DISCO TEXT//
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(525, 4, "SAVE[0]");
//END DISCO TEXT//
delay(100);
nosound();
}
else
i--;
}
if (i == 499)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " RECORD LIMIT REACHED!");
delay(1000);
}
library_save();
}
void library_disco()
{
clrscr();
for (int i = 70; i <= 570; i += 100)
{
for (int j = 50; j <= 350; j += 100)
{
setcolor(LIGHTRED);
circle(i, j, random(35 - 20 + 1) + 20);
setfillstyle(4, YELLOW);
floodfill(i, j, LIGHTRED);
}
}
}
void library_sound_play(char n)
{
switch (n)
{
case 'z':
sound(27.5000);
break;
case 'x':
sound(32.7032);
break;
case 'c':
sound(38.8909);
break;
case 'v':
sound(46.2493);
break;
case 'b':
sound(55.0000);
break;
case 'n':
sound(61.7354);
break;
case 'm':
sound(73.4162);
break;
case 'a':
sound(87.3071);
break;
case 's':
sound(103.826);
break;
case 'd':
sound(123.471);
break;
case 'f':
sound(146.8325);
break;
case 'g':
sound(174.6145);
break;
case 'h':
sound(207.6525);
break;
case 'j':
sound(246.9425);
break;
case 'k':
sound(293.6655);
break;
case 'l':
sound(359.2285);
break;
case 'q':
sound(415.3055);
break;
case 'w':
sound(493.8835);
break;
case 'e':
sound(587.3305);
break;
case 'r':
sound(698.4565);
break;
case 't':
sound(830.6095);
break;
case 'y':
sound(987.7675);
break;
case 'u':
sound(1174.6650);
break;
case 'i':
sound(1396.9150);
break;
case 'o':
sound(1567.9850);
break;
case 'p':
sound(1864.6650);
break;
case '1':
break;
}
}
void library_save()
{
char opt, csname[15];
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
circle(320, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(320, 0, LIGHTRED);
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(45, 5, "THE MUSIC LIBRARY");
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(225, 82, "Name:");
gotoxy(45, 6);
for (int i = 0; i < 50; i++)
{
strcpy(mulist[i].name, "");
}
ifstream fincr, fin;
ofstream foutcr, fout;
fincr.open("tmu.dat", ios::in | ios::binary);
fincr.read((char *)&crs, sizeof(crs));
fincr.close();
fin.open("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(mulist));
fin.close();
gets(mulist[crs].name);
if (strlen(mulist[crs].name) >= 10)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NAME CAN HAVE MAX 10 CHARACTERS");
delay(1000);
library_save();
}
fout.open("managemu.lis", ios::out | ios::binary);
strcpy(mulist[crs].account, account.name);
fout.write((char *)&mulist, sizeof(mulist));
fout.close();
fout.open(mulist[crs].name, ios::out | ios::binary);
fout.write((char *)&music, sizeof(music));
fout.close();
foutcr.open("tmu.dat", ios::out | ios::binary);
crs++;
foutcr.write((char *)&crs, sizeof(crs));
foutcr.close();
outtextxy(10, 400, " The song is added to LIBRARY PLAY");
rectangle(0, 450, 640, 480);
setfillstyle(1, CYAN);
floodfill(0 + 50, 450 + 5, CYAN);
setcolor(WHITE);
outtextxy(0, 453, "[L]IBRARY [H]OME");
opt = getche();
tolower(opt);
switch (opt)
{
case 'l':
library();
break;
case 'h':
home();
break;
default:
opt = getche();
break;
}
}
void library_delete()
{
int j = 0, n = 0, found = 0;
char sname[10];
gotoxy(61, 9);
gets(sname);
settextstyle(0, 0, 1);
ifstream fin("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(mulist));
fin.close();
for (n = 0; n <= 50; n++)
{
if (!strcmp(mulist[n].name, sname))
{
found = 1;
break;
}
}
if (found == 0)
{
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "Music Name Is Wrong");
delay(1000);
library();
}
fstream fiocr("tmu.dat", ios::in | ios::out | ios::binary);
fiocr.read((char *)&crs, sizeof(crs));
for (n = 0; n <= 50; n++)
{
if (!strcmp(mulist[n].name, sname))
{
remove(sname);
for (j = n + 1; j <= 51; j++)
{
mulist[j - 1] = mulist[j];
}
crs--;
fiocr.write((char *)&crs, sizeof(crs));
n--;
}
}
fiocr.close();
ofstream fout("managemu.lis", ios::out | ios::binary);
fout.write((char *)&mulist, sizeof(mulist));
fout.close();
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "DELETING YOUR MUSIC");
delay(1000);
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, " MUSIC DELETED");
delay(1000);
library();
};
void library_play()
{
int i;
char sname[10], n;
for (i = 0; i < 500; i++)
{
music.script[i] = ' ';
}
gotoxy(61, 9);
gets(sname);
ifstream fin(sname, ios::in | ios::binary);
if (!fin)
{
fin.close();
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "Music Name Is Wrong");
delay(1000);
library();
}
fin.read((char *)&music, sizeof(music));
fin.close();
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, YELLOW);
floodfill(7 + i, 20 + j, YELLOW);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, YELLOW);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, YELLOW);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, YELLOW);
}
textcolor(YELLOW);
for (i = 0; i < 500; i++)
{
n = music.script[i];
if (((int)n >= 65 && (int)n <= 90) || ((int)n >= 97 && (int)n <= 122) || n == '0' || n == '1')
{
if (n == '0')
break;
library_sound_play(music.script[i]);
library_disco();
delay(100);
nosound();
}
}
library();
}
void account_edit()
{
char opt;
cleardevice();
setbkcolor(WHITE);
setcolor(LIGHTRED);
circle(315, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(315, 0, LIGHTRED);
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(60, 5, "EDIT YOU ACCOUNT");
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(30, 92, "[1] Delete An Account");
outtextxy(30, 122, "[2] Change Pass Of An Account");
outtextxy(30, 152, "[3] Reset All Data");
outtextxy(30, 182, "[4] UNINSTALL");
rectangle(0, 450, 640, 480);
setfillstyle(1, CYAN);
floodfill(0 + 50, 450 + 5, CYAN);
setcolor(WHITE);
outtextxy(0, 455, "[H]OME E[X]IT");
opt = getche();
tolower(opt);
switch (opt)
{
case '1':
account_del();
break;
case '2':
account_pch();
break;
case '3':
reset_data();
home();
break;
case '4':
uninstall();
break;
case 'h':
home();
break;
case 'x':
exit(0);
break;
}
}
void account_del()
{
int i, j, found = 0;
char name[20], pass[20];
cleardevice();
setbkcolor(YELLOW);
//INFO TEXT//
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(10, 140, " Your account will be ");
outtextxy(10, 160, "permanently deleted and ");
outtextxy(10, 180, " data will be erased");
setcolor(YELLOW);
//HEADER//
for (i = 5; i <= 600; i += 130)
{
circle(7 + i, 20, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20, YELLOW);
circle(30 + i, 75, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75, YELLOW);
circle(70 + i, 15, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15, YELLOW);
circle(90 + i, 75, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75, YELLOW);
}
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20 + j, YELLOW);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, YELLOW);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, YELLOW);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, YELLOW);
}
//BOX//
for (i = 0, j = 0; i < 5; i++, j++)
{
if (i < 4)
{
setcolor(CYAN);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, CYAN);
}
else
{
setcolor(12);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, LIGHTRED);
}
}
//COLUMN//
setcolor(CYAN);
//NAME//
rectangle(425, 175, 585, 195);
setfillstyle(1, WHITE);
floodfill(425 + 50, 175 + 5, CYAN);
//PASSWORD//
rectangle(425, 220, 585, 240);
setfillstyle(1, WHITE);
floodfill(425 + 50, 220 + 5, CYAN);
//SIGN UP//
rectangle(452, 270, 560, 295);
setfillstyle(1, CYAN);
floodfill(452 + 50, 270 + 5, CYAN);
//SIGN UP COLUMN TEXT//
setcolor(LIGHTRED);
settextstyle(10, 0, 1);
outtextxy(449, 120, "DETAILS");
setcolor(CYAN);
settextstyle(6, 0, 1);
outtextxy(428, 170, "NAME:");
outtextxy(428, 215, "PASS:");
setcolor(WHITE);
settextstyle(1, 0, 1);
outtextxy(460, 270, " DELETE");
//END SIGN UP COLUMN TEXT//
gotoxy(61, 12);
gets(name);
gotoxy(61, 15);
gets(pass);
if (strlen(name) < 5 || strlen(pass) < 5)
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NAME/PASS MUST HAVE 6-14 CHARACTERS");
delay(1000);
account_del();
}
ifstream fin;
fin.open("manage.lis", ios::in | ios::binary);
fin.read((char *)&list, sizeof(list));
fin.close();
for (i = 0; i < 100; i++)
{
if ((strcmp(list[i].name, name) == 0) && (strcmp(list[i].pass, pass) == 0))
{
found = 1;
break;
}
}
if (found == 0)
{
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "NAME/PASS NOT MATCHED");
delay(1000);
account_edit();
}
for (j = i + 1; j < 10; j++)
list[j - 1] = list[j];
ofstream fout;
fout.open("manage.lis", ios::out | ios::binary);
fout.write((char *)&list, sizeof(list));
fout.close();
fin.open("tac.dat", ios::in | ios::binary);
fin.read((char *)&crac, sizeof(crac));
fin.close();
crac--;
fout.open("tac.dat", ios::out | ios::binary);
fout.write((char *)&crac, sizeof(crac));
fout.close();
int l;
fin.open("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(mulist));
fin.close();
fstream fiocr;
fiocr.open("tmu.dat", ios::in | ios::out | ios::binary);
fiocr.read((char *)&crs, sizeof(crs));
for (i = 0; i < 50; i++)
{
if (!strcmp(name, mulist[i].account))
{
remove(mulist[i].name);
for (int j = i + 1; j <= 51; j++)
{
mulist[j - 1] = mulist[j];
}
crs--;
fiocr.write((char *)&crs, sizeof(crs));
i--;
}
}
fiocr.close();
remove(name);
fout.open("managemu.lis", ios::out | ios::binary);
fout.write((char *)&mulist, sizeof(mulist));
fout.close();
//SIGN OUT MESSAGE//
if (sign == 1)
{
cleardevice();
setbkcolor(LIGHTRED);
sign = 0;
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(170, 235, " SIGNING OUT");
delay(1000);
}
//END SIGN OUT//
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(0, 210, 640, 270);
setfillstyle(1, YELLOW);
floodfill(0 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(50, 235, "Your account is deleted");
delay(1000);
//END DISPLAY MESSAGE//
home();
}
void account_pch()
{
//VARIABLES//
int i, j, found = 0;
char name[20], pass[20], new_pass[20];
ofstream fout;
ifstream fin;
cleardevice();
setbkcolor(YELLOW);
setcolor(YELLOW);
//HEADER//
for (i = 5; i <= 600; i += 130)
{
circle(7 + i, 20, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20, YELLOW);
circle(30 + i, 75, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75, YELLOW);
circle(70 + i, 15, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15, YELLOW);
circle(90 + i, 75, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75, YELLOW);
}
//FOOTER//
for (i = 5; i <= 600; i += 130)
{
int j = 385;
circle(7 + i, 20 + j, 25);
setfillstyle(5, LIGHTRED);
floodfill(7 + i, 20 + j, YELLOW);
circle(30 + i, 75 + j, 28);
setfillstyle(1, CYAN);
floodfill(30 + i, 75 + j, YELLOW);
circle(70 + i, 15 + j, 35);
setfillstyle(1, LIGHTGREEN);
floodfill(70 + i, 15 + j, YELLOW);
circle(90 + i, 75 + j, 25);
setfillstyle(5, LIGHTCYAN);
floodfill(90 + i, 75 + j, YELLOW);
}
//BOX//
for (i = 0, j = 0; i < 5; i++, j++)
{
if (i < 4)
{
setcolor(CYAN);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, CYAN);
}
else
{
setcolor(12);
rectangle(410 + i, 120 + j, 590 + i, 320 + j);
setfillstyle(1, YELLOW);
floodfill(410 + 50 + i, 120 + 5 + j, LIGHTRED);
}
}
//COLUMN//
setcolor(CYAN);
//NAME//
rectangle(425, 175, 585, 195);
setfillstyle(1, WHITE);
floodfill(425 + 50, 175 + 5, CYAN);
//PASSWORD//
rectangle(425, 220, 585, 240);
setfillstyle(1, WHITE);
floodfill(425 + 50, 220 + 5, CYAN);
//NEW PASSWORD//
rectangle(425, 268, 585, 288);
setfillstyle(1, WHITE);
floodfill(425 + 50, 268 + 5, CYAN);
//SIGN UP COLUMN TEXT//
setcolor(LIGHTRED);
settextstyle(10, 0, 1);
outtextxy(449, 120, "DETAILS");
setcolor(CYAN);
settextstyle(6, 0, 1);
outtextxy(428, 170, "NAME:");
outtextxy(428, 215, "PASS:");
outtextxy(428, 263, "NEW PASS:");
//END SIGN UP COLUMN TEXT//
gotoxy(61, 12);
gets(name);
gotoxy(61, 15);
gets(pass);
gotoxy(66, 18);
gets(new_pass);
//ACCOUNT LIST IN MEMORY//
fin.open("manage.lis", ios::in | ios::binary);
fin.read((char *)&list, sizeof(list));
fin.close();
//ACCOUNT LIST TAKEN//
//CHECK OLD PASS//
for (i = 0; i <= 10; i++)
{
if ((strcmp(list[i].name, name) == 0) && (strcmp(list[i].pass, pass) == 0))
{
found = 1;
break;
}
}
if (found == 0)
{
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(60, 210, 590, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(75, 235, "NAME/PASS NOT MATCHED");
delay(1000);
//END DISPLAY MESSAGE//
account_edit();
}
//OLD PASS CHECKED//
//CHECK NEW PASS//
if (strlen(new_pass) < 5)
{
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " NEW PASS MUST HAVE 6-14 CHARACTERS");
delay(1000);
//END DISPLAY MESSAGE//
account_edit();
}
//NEW PASS CHECKED//
//PASS UPDATED IN MEMORY//
strcpy(list[i].pass, <PASSWORD>);
//ACCOUNT LIST UPDATE//
fout.open("manage.lis", ios::out | ios::binary);
fout.write((char *)&list, sizeof(list));
fout.close();
//UPDATED//
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(LIGHTRED);
setcolor(YELLOW);
rectangle(0, 210, 640, 270);
setfillstyle(1, YELLOW);
floodfill(0 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(50, 235, " Your pass is changed");
delay(1000);
//END DISPLAY MESSAGE//
home();
}
void reset_data()
{
//VARIABLES//
char pass[20];
ifstream fin;
ofstream fout;
cleardevice();
setbkcolor(YELLOW);
//TOP CIRCLE//
setcolor(LIGHTRED);
circle(320, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(320, 0, LIGHTRED);
//END CIRCLE//
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(45, 5, " ENTER ADMIN PASS");
//END TITLE//
//TEXT//
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(225, 82, "Pass:");
//END TEXT//
gotoxy(45, 6);
//INPUT PASS//
gets(pass);
//ADMIN DATA IN MEMORY//
fin.open("admin.dat", ios::in | ios::binary);
fin.read((char *)&admin_pass, sizeof(admin_pass));
fin.close();
//ADMIN DATA TAKEN//
//CHECK PASS//
if (strcmp(pass, <PASSWORD>_pass))
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " ADMIN PASS DID NOT MATCH");
delay(1000);
account_edit();
}
//END CHECK PASS//
fin.open("manage.lis", ios::in | ios::binary);
fin.read((char *)&list, sizeof(list));
for (int i = 0; i < 10; i++)
{
remove(list[i].name);
}
fin.close();
fin.open("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(list));
for (i = 0; i < 10; i++)
{
remove(mulist[i].name);
}
fin.close();
//DELETE FILE SYSTEM//
remove("admin.dat");
remove("manage.lis");
remove("managemu.lis");
remove("tac.dat");
remove("tmu.dat");
//END DELETE FILE//
//RESET DATA IN MEMORY//
for (i = 0; i < 10; i++)
{
strcpy(list[i].name, "");
strcpy(list[i].pass, "");
}
strcpy(account.name, "");
strcpy(account.pass, "");
strcpy(admin_pass, "");
//RESET COMPLETE//
//SIGN OUT//
if (sign == 1)
{
cleardevice();
setbkcolor(LIGHTRED);
sign = 0;
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(170, 235, " SIGNING OUT");
delay(1000);
}
//END DISPLAY MESSAGE//
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " RESETTING ALL DATA");
delay(1000);
//PROGRAM RESTART//
main();
}
void uninstall()
{
//VARIABLES//
char pass[20];
ifstream fin;
ofstream fout;
cleardevice();
setbkcolor(YELLOW);
//TOP CIRCLE//
setcolor(LIGHTRED);
circle(320, 0, 350);
setfillstyle(1, LIGHTRED);
floodfill(320, 0, LIGHTRED);
//END CIRCLE//
//TITLE//
setcolor(WHITE);
settextstyle(0, 0, 4);
outtextxy(45, 5, " ENTER ADMIN PASS");
//END TITLE//
//TEXT//
setcolor(CYAN);
settextstyle(0, 0, 2);
outtextxy(225, 82, "Pass:");
//END TEXT//
gotoxy(45, 6);
//INPUT PASS//
gets(pass);
//ADMIN DATA IN MEMORY//
fin.open("admin.dat", ios::in | ios::binary);
fin.read((char *)&admin_pass, sizeof(admin_pass));
fin.close();
//ADMIN DATA TAKEN//
//CHECK PASS//
if (strcmp(pass, admin_pass))
{
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " ADMIN PASS DID NOT MATCH");
delay(1000);
account_edit();
}
//END CHECK PASS//
fin.open("manage.lis", ios::in | ios::binary);
fin.read((char *)&list, sizeof(list));
for (int i = 0; i < 10; i++)
{
remove(list[i].name);
}
fin.close();
fin.open("managemu.lis", ios::in | ios::binary);
fin.read((char *)&mulist, sizeof(list));
for (i = 0; i < 10; i++)
{
remove(mulist[i].name);
}
fin.close();
//DELETE FILE SYSTEM//
remove("admin.dat");
remove("manage.lis");
remove("managemu.lis");
remove("tac.dat");
remove("tmu.dat");
//END DELETE FILE//
chdir("\\TC\\BIN");
rmdir("\\MUSIC");
//RESET DATA IN MEMORY//
for (i = 0; i < 10; i++)
{
strcpy(list[i].name, "");
strcpy(list[i].pass, "");
}
strcpy(account.name, "");
strcpy(account.pass, "");
strcpy(admin_pass, "");
//RESET COMPLETE//
//SIGN OUT//
if (sign == 1)
{
cleardevice();
setbkcolor(LIGHTRED);
sign = 0;
setcolor(YELLOW);
rectangle(150, 210, 500, 270);
setfillstyle(1, YELLOW);
floodfill(150 + 50, 210 + 5, YELLOW);
setcolor(CYAN);
settextstyle(0, 0, 3);
outtextxy(170, 235, " SIGNING OUT");
delay(1000);
}
//END DISPLAY MESSAGE//
//DISPLAY MESSAGE//
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " UNISTALLING ");
delay(1000);
cleardevice();
setbkcolor(YELLOW);
setcolor(LIGHTRED);
rectangle(0, 190, 640, 290);
setfillstyle(1, LIGHTRED);
floodfill(0 + 50, 190 + 5, LIGHTRED);
setcolor(YELLOW);
settextstyle(0, 0, 3);
outtextxy(0, 235, " UNINSTALL COMPLETE! ");
delay(1000);
//PROGRAM RESTART//
exit(0);
}
| 9ca37b17bddf872c3b80614dcabadbb4b40e399d | [
"C++"
] | 1 | C++ | creatorgaming/music-library | 9b6bf866e320d09c44b263b28bdfe138a6b51be7 | c2cdf19621e58d1e32560551fee931d34f1d3575 | |
refs/heads/master | <repo_name>codebeeper/stardb<file_sep>/src/components/app/app.js
import React from "react";
import Header from "../header";
import RandomPlanet from "../random-planet";
import SwapiService from "./../../services/swapi-service";
import "./app.css";
import ItemPage from "../item-page";
export default class App extends React.Component {
swapiService = new SwapiService();
render() {
return (
<div className="container-fluid">
<Header />
<RandomPlanet />
<ItemPage
getImage={this.swapiService.getPersonImage}
getData={this.swapiService.getAllPersons}
details={[["gender", "Gender"], ["eyeColor", "Eye color"]]}
/>
<ItemPage
getImage={this.swapiService.getStarshipImage}
getData={this.swapiService.getAllStarships}
details={[
["model", "Model"],
["length", "Length"],
["costInCredits", "Cost"]
]}
/>
<ItemPage
getImage={this.swapiService.getPlanetImage}
getData={this.swapiService.getAllPlanets}
details={[["population", "Population"], ["diameter", "Diameter"]]}
/>
</div>
);
}
}
<file_sep>/src/components/item-page/item-page.js
import React, { Component } from "react";
import ItemList from "../item-list";
import ItemDetails from "../item-details";
import Row from "../row";
import ErrorBoundary from "../ErrorBoundary";
const Record = ({ item, field, label }) => {
return (
<li className="list-group-item">
<span className="term">{label}</span>
<span>{item[field]}</span>
</li>
);
};
export { Record };
export default class ItemPage extends Component {
state = {
selectedItem: null
};
onItemSelected = item => {
this.setState({ selectedItem: item });
};
render() {
const { getData, getImage, details } = this.props;
const itemList = (
<ItemList
getData={getData}
onItemSelected={this.onItemSelected}
selectedItem={this.state.selectedItem}
>
{/* children */}
{(i, field) => `${i.name} (${i[field]})`}
</ItemList>
);
const itemFields = details.map(([field, label]) => {
return <Record key={field} field={field} label={label} />;
});
const itemDetails = (
<ItemDetails getImage={getImage} item={this.state.selectedItem}>
{itemFields}
</ItemDetails>
);
return (
<ErrorBoundary>
<Row left={itemList} right={itemDetails} />
</ErrorBoundary>
);
}
}
<file_sep>/src/services/swapi-service.js
export default class SwapiService {
_apiBase = "https://swapi.co/api";
async collectAllPages(url, callback) {
const request = async nextPage => {
const res = await this.getResource(nextPage || this._apiBase + url);
let results = [];
results = results.concat(res.results);
if (res.next) {
results = results.concat(await request(res.next));
}
return results;
};
return await request().then(res => res.map(callback));
}
async getResource(url) {
const res = await fetch(url);
if (!res.ok) {
throw new Error(`Could not fetch ${url}` + `, recieved ${res.status}`);
}
return await res.json();
}
getAllPersons = async () => {
return await this.collectAllPages(`/people/`, this._transformPerson);
};
async getPerson(id) {
const person = await this.getResource(this._apiBase + `/people/${id}`);
return this._transformPerson(person);
}
getAllPlanets = async () => {
return await this.collectAllPages("/planets/", this._transformPlanet);
};
async getPlanet(id) {
const planet = await this.getResource(this._apiBase + `/planets/${id}`);
return this._transformPlanet(planet);
}
getAllStarships = async () => {
return await this.collectAllPages("/starships/", this._transformStarship);
};
async getStarship(id) {
const starship = await this.getResource(this._apiBase + `/starship/${id}`);
return this._transformStarship(starship);
}
_extractId = url => {
return url.match(/[^/]\d*(?=\/$)/g)[0];
};
getPersonImage(id) {
return `https://starwars-visualguide.com/assets/img/characters/${id}.jpg`;
}
getStarshipImage(id) {
return `https://starwars-visualguide.com/assets/img/starships/${id}.jpg`;
}
getPlanetImage(id) {
return `https://starwars-visualguide.com/assets/img/planets/${id}.jpg`;
}
_transformPlanet = planet => {
return {
id: this._extractId(planet.url),
name: planet.name,
population: planet.population,
rotationPeriod: planet.rotation_period,
diameter: planet.diameter
};
};
_transformStarship = starship => {
return {
id: this._extractId(starship.url),
name: starship.name,
model: starship.model,
manufacturer: starship.manufacturer,
costInCredits: starship.cost_in_credits,
length: starship.length,
crew: starship.crew,
passengers: starship.passengers,
cargoCapacity: starship.cargo_capacity
};
};
_transformPerson = person => {
console.log(person);
return {
id: this._extractId(person.url),
name: person.name,
gender: person.gender,
birthYear: person.birth_year,
eyeColor: person.eye_color
};
};
}
<file_sep>/src/components/item-details/item-details.js
import React, { Component } from "react";
import Spinner from "../spinner";
import "./item-details.css";
const ItemDetails = props => {
const { item } = props;
const itemInfo = item && (
<React.Fragment>
<img
className="item-image"
src={props.getImage(item.id)}
alt={item.name}
/>
<div className="card-body">
<h4>{item.name}</h4>
<ul className="list-group list-group-flush">
{React.Children.map(props.children, child => {
return React.cloneElement(child, { item });
})}
</ul>
</div>
</React.Fragment>
);
return (
<div className="item-details card">{item ? itemInfo : <Spinner />}</div>
);
};
export default ItemDetails;
<file_sep>/src/components/item-list/item-list.js
import React, { Component } from "react";
import "./item-list.css";
import Pagination from "../pagination/pagination";
import withData from "../../hoc-helpers/with-data";
class ItemList extends Component {
_perPage = 7;
state = {
active: 1,
pages: null,
itemsList: null,
currentVisible: null,
isLoading: true
};
componentDidUpdate(prevProps) {
const { itemsList } = this.props;
if (prevProps.itemsList !== itemsList) {
this.setState(
{
itemsList,
isLoading: false,
pages: Math.ceil(itemsList.length / this._perPage)
},
() => this.paginationClick()
);
}
}
paginationClick = (id = 1) => {
this.setState(({ currentVisible, itemsList }) => {
const peopleToShow = itemsList.slice(
(id - 1) * this._perPage,
id * this._perPage
);
if (currentVisible !== peopleToShow) {
// set the first item active each navigation
this.props.onItemSelected(peopleToShow[0]);
// set visible items (pagination chunk)
return { currentVisible: peopleToShow, active: id };
}
return;
});
};
renderItems(arr) {
const { selectedItem, children, onItemSelected } = this.props;
return arr.map(item => {
// use props function to set label
const label = children(item, Object.keys(item)[3]);
const classes =
selectedItem && item.id === selectedItem.id
? "list-group-item active"
: "list-group-item";
return (
<li
onClick={() => onItemSelected(item)}
key={item.name}
className={classes}
>
{label}
</li>
);
});
}
render() {
const { pages, currentVisible, active, itemsList } = this.state;
const elementsList = currentVisible && this.renderItems(currentVisible);
if (this.state.isLoading) {
return <ul className="item-list list-group">{`Loading...`}</ul>;
}
return (
<React.Fragment>
<ul className="item-list list-group">{elementsList}</ul>
<Pagination
total={itemsList && itemsList.length}
step={currentVisible && currentVisible.length}
perPage={this._perPage}
pages={pages}
active={active}
navigate={this.paginationClick}
/>
</React.Fragment>
);
}
}
export default withData(ItemList);
| 483dfc203952ce5d4b00ed371f749f7cb71e1a7b | [
"JavaScript"
] | 5 | JavaScript | codebeeper/stardb | 8545422bca9ad9641d1eedc56cb395e330391c00 | b1d10bdfdb637c77ba3384564c26eca817e8266a | |
refs/heads/master | <repo_name>oswavare14/Lab6_OswaldoVarela_P3<file_sep>/v.cpp
#include "v.h"
#include <iostream>
#include <string>
using namespace std;
v::v():
Bombas(){
}
<file_sep>/Normal.h
#include "Bombas.h"
#include <iostream>
#include <string>
using namespace std;
#ifndef NORMAL
#define NORMAL
class Normal: public Bombas{
public:
int alcance;
Normal();
};
#endif
<file_sep>/Jugador.h
#include <iostream>
#include <string>
#include "Items.h"
using namespace std;
#ifndef PERSONAJE
#define PERSINAJE
class Jugador: public Items{
public:
string nombre;
bool verdad;
bool tipo;
Jugador();
Jugador(string,bool,bool);
~Jugador();
};
#endif
<file_sep>/makefile
run: main.o Escenarios.o Items.o Jugador.o Bombas.o Normal.o Espina.o v.o Invisible.o Tren.o
g++ main.o Escenarios.o Items.o Jugador.o Bombas.o Normal.o Espina.o v.o Invisible.o Tren.o -o run -lncurses
main.o: main.cpp Items.h Escenarios.h Jugador.h Bombas.h Normal.h Espina.h v.h Invisible.h Tren.h
g++ -c main.cpp
Items.o: Items.h Items.cpp
g++ -c Items.cpp
Escenarios.o: Escenarios.h Items.h Escenarios.cpp
g++ -c Escenarios.cpp
Jugador.o: Jugador.h Items.h Jugador.cpp
g++ -c Jugador.cpp
Bombas.o: Bombas.h Items.h Bombas.cpp
g++ -c Bombas.cpp
Normal.o: Normal.h Bombas.h Normal.cpp
g++ -c Normal.cpp
Espina.o: Espina.h Bombas.h Espina.cpp
g++ -c Espina.cpp
v.o: v.h Bombas.h v.cpp
g++ -c v.cpp
Invisible.o: Invisible.h Escenarios.h Invisible.cpp
g++ -c Invisible.cpp
Tren.o: Tren.h Escenarios.h Tren.cpp
g++ -c Tren.cpp
<file_sep>/Bombas.cpp
#include "Bombas.h"
#include <string>
#include <iostream>
using namespace std;
Bombas::Bombas(){
}
Bombas::Bombas(int numero):
Items(x,y){
this->numero=numero;
}
void Bombas::setmovio(){
numero=numero-1;
}
<file_sep>/main.cpp
#include <iostream>
#include <string>
#include <ncurses.h>
#include "Items.h"
#include "Escenarios.h"
#include "Jugador.h"
#include "Bombas.h"
#include "Normal.h"
#include "Espina.h"
#include "v.h"
#include "Invisible.h"
#include "Tren.h"
using namespace std;
int main(){
int entrar = 0;
do{
initscr();
refresh();
printw("SOLO Falto Aplicar todo de NCURSES");
move(1,0);
refresh();
printw("HAHAHA");
refresh();
getch();
endwin();
}while(entrar!=0);
return 0;
}
<file_sep>/Bombas.h
#include "Items.h"
#include <iostream>
#include <string>
using namespace std;
#ifndef BOMBA
#define BOMBA
class Bombas: public Items{
public:
int numero;
bool movio;
Bombas();
Bombas(int);
void setmovio();
~Bombas();
};
#endif
<file_sep>/Invisible.cpp
#include "Invisible.h"
#include <iostream>
#include <string>
using namespace std;
Invisible::Invisible(){
}
Invisible::Invisible(int numero, int valor):
Escenarios(nombre){
this->numero=numero;
this->valor=valor;
}
<file_sep>/v.h
#include "Bombas.h"
#include <iostream>
#include <string>
using namespace std;
#ifndef V
#define V
class v: public Bombas{
public:
v();
};
#endif
<file_sep>/Invisible.h
#include "Escenarios.h"
#include <iostream>
#include <string>
using namespace std;
#ifndef INVISIBLE
#define INVISIBLE
class Invisible: public Escenarios{
public:
int numero;
int valor;
Invisible();
Invisible(int,int);
};
#endif
<file_sep>/Tren.h
#include "Escenarios.h"
#include <iostream>
#include <string>
#include <vector>
using namespace std;
#ifndef TREN
#define TREN
class Tren: public Escenarios{
public:
vector<string> posicion;
vector<int> x;
vector<int> y;
Tren();
void setx(int);
void sety(int);
vector<string> getLista();
};
#endif
<file_sep>/Items.h
#include <iostream>
#include <string>
using namespace std;
#ifndef ITEM
#define ITEM
class Items{
public:
int x;
int y;
Items();
Items(int,int);
~Items();
};
#endif
<file_sep>/Items.cpp
#include "Items.h"
#include <string>
using namespace std;
Items::Items(){
}
Items::Items(int x, int y){
this->x=x;
this->y=y;
}
Items::~Items(){
}
<file_sep>/Escenarios.h
#include <iostream>
#include <string>
#include <vector>
#include "Items.h"
using namespace std;
#ifndef ESENARIO
#define ESENARIO
class Escenarios{
public:
string nombre;
Items** matriz;
vector<string> bombas;
Escenarios();
Escenarios(string);
void setBombas(string);
Items** setMatriz();
~Escenarios();
};
#endif
<file_sep>/Espina.h
#include "Bombas.h"
#include <iostream>
#include <string>
using namespace std;
#ifndef ESPINA
#define ESPINA
class Espina: public Bombas{
public:
int cantidad;
Espina();
};
#endif
<file_sep>/Escenarios.cpp
#include "Escenarios.h"
#include <string>
#include <vector>
using namespace std;
Escenarios::Escenarios(){
}
Escenarios::Escenarios(string nombre){
this->nombre=nombre;
// bombas = new vector();
}
void Escenarios::setBombas(string bomba){
bombas.push_back(bomba);
}
Items** Escenarios::setMatriz(){
matriz = new Items*[11];
for(int i = 0; i<10; i++){
matriz[i]=new Items[13];
}
}
Escenarios::~Escenarios(){
}
<file_sep>/Normal.cpp
#include "Normal.h"
#include <iostream>
#include <string>
using namespace std;
Normal::Normal():
Bombas(){
numero = 2;
}
<file_sep>/Espina.cpp
#include "Espina.h"
#include <iostream>
#include <string>
using namespace std;
Espina::Espina():
Bombas(){
}
<file_sep>/Tren.cpp
#include "Tren.h"
#include <iostream>
#include <string>
#include <vector>
using namespace std;
Tren::Tren():
Escenarios(nombre){
}
void Tren::setx(int xx){
x.push_back(xx);
//castear para lista
}
void Tren::sety(int yy){
y.push_back(yy);
//castear para lista
}
vector<string> Tren::getLista(){
return posicion;
}
<file_sep>/Jugador.cpp
#include "Jugador.h"
#include <iostream>
#include <string>
using namespace std;
Jugador::Jugador(){
}
Jugador::Jugador(string nombre, bool verdad, bool tipo):
Items(x,y){
this->nombre=nombre;
this->verdad=verdad;
this->tipo=tipo;
}
Jugador::~Jugador(){
}
| 73c8cf7d88551d0a058f90edb1737f0d47f55766 | [
"Makefile",
"C++"
] | 20 | C++ | oswavare14/Lab6_OswaldoVarela_P3 | 772e2aff4fd367a52b8efa308039a5f48886758f | f9fb3cde663f9f6e39ccbcfc7e273add8d5f329c | |
refs/heads/master | <file_sep># encoding: UTF-8
# Copyright © 2013, 2014, Watu
require "rubygems"
# Test coverage
require "simplecov"
require "coveralls"
SimpleCov.start do
add_filter "/test/"
end
Coveralls.wear! # Comment out this line to have the local coverage generated.
require "minitest/autorun"
require "minitest/reporters"
MiniTest::Reporters.use!
# This class is here only to trick shoulda into attaching itself to MiniTest due to: https://github.com/thoughtbot/shoulda-context/issues/38
module ActiveSupport
class TestCase < MiniTest::Unit::TestCase
end
end
require "shoulda"
require "shoulda-context"
require "shoulda-matchers"
require "mocha/setup"
# Database setup
require "active_record"
require "logger"
ActiveRecord::Base.logger = Logger.new(STDERR)
ActiveRecord::Base.logger.level = Logger::WARN
ActiveRecord::Base.configurations = {"sqlite3" => {adapter: "sqlite3", database: ":memory:"}}
ActiveRecord::Base.establish_connection("sqlite3")
# Make the code to be tested easy to load.
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
<file_sep># encoding: UTF-8
# Copyright © 2013, 2014, Watu
module RandomUniqueId
VERSION = "0.2.1"
end
<file_sep># encoding: UTF-8
# Copyright © 2011, 2012, 2013, 2014, Watu
require_relative "test_helper"
require "random_unique_id"
ActiveRecord::Schema.define(version: 0) do
create_table :blogs do |t|
t.string :rid
t.string :name
end
add_index :blogs, :rid, unique: true
create_table :posts do |t|
t.string :rid
t.string :type
t.integer :blog_id
end
add_index :posts, :rid, unique: true
end
class Blog < ActiveRecord::Base
has_many :posts
has_random_unique_id
end
class Post < ActiveRecord::Base
belongs_to :blog
has_random_unique_id
end
class TextPost < Post
end
class ImagePost < Post
end
class RandomUniqueIdTest < MiniTest::Unit::TestCase
context "With a record with random id" do
setup { @text_post = TextPost.create! }
should "generate a random id" do
assert @text_post.rid
end
should "return random id as param" do
assert_equal @text_post.rid, @text_post.to_param
end
should "resolve random id collision" do
# Mock RandomUniqueId to return a collision on the first call, and hopefully a non collision on the second, expecting n to grow by one.
RandomUniqueId.expects(:generate_random_id).with(5).returns(@text_post.rid)
new_rid = @text_post.rid + "i"
RandomUniqueId.expects(:generate_random_id).with(6).returns(new_rid)
new_record = TextPost.create! # No exception should be raised.
assert_equal new_rid, new_record.rid
end
should "resolve random id collision in different classes of the same table (due to STI)" do
# Mock RandomUniqueId to return a collision on the first call, and hopefully a non collision on the second, expecting n to grow by one.
RandomUniqueId.expects(:generate_random_id).with(5).returns(@text_post.rid)
new_rid = @text_post.rid + "i"
RandomUniqueId.expects(:generate_random_id).with(6).returns(new_rid)
new_record = ImagePost.create! # No exception should be raised.
assert_equal new_rid, new_record.rid
end
should "have automatic *_rid= and *_rid methods" do
blog = Blog.create!
@text_post.blog_rid = blog.rid
@text_post.save!
assert_equal blog, @text_post.blog
assert_equal blog.rid, @text_post.blog_rid
end
should "populate a table with rids" do
# Create a bunch of blogs without rid by manually inserting them into the talbe.
rid_less_records = 10
5.times { Blog.create! }
existing_rids = Blog.all.map(&:rid).compact
rid_less_records.times { Blog.connection.execute("INSERT INTO blogs (name) VALUES ('Blag')") }
assert_equal rid_less_records, Blog.where(:rid => nil).count # Just to be sure this test is being effective.
rids_populated = 0
Blog.populate_random_unique_ids { |_, rid_just_populated| rids_populated += 1 if rid_just_populated }
assert_equal rid_less_records, rids_populated
assert_equal 0, Blog.where(:rid => nil).count
assert_equal existing_rids.count, Blog.where(:rid => existing_rids).count # Make sure the existing rids where not touched.
end
end
end
<file_sep># encoding: UTF-8
# Copyright © 2013, 2014, Watu
source "https://rubygems.org"
gem "bundler", "~> 1.3"
gem "coveralls", require: false
gem "minitest", "~> 4.7.5"
gem "minitest-reporters"
gem "mocha"
gem "rake"
gem "simplecov"
gem "shoulda"
gem "sqlite3"
gem "activesupport", "~> 3.2.0"
gem "activerecord", "~> 3.2.0"
<file_sep># Random Unique ID
[](https://travis-ci.org/watu/random_unique_id)
[](https://coveralls.io/r/watu/random_unique_id?branch=master)
[](https://codeclimate.com/github/watu/random_unique_id)
[](http://inch-ci.org/github/watu/random_unique_id)
[](http://badge.fury.io/rb/random_unique_id)
This gem will generate a random unique id for your active record records that you can use instead of their actual ID for
all external interactions with users. The goal is for you to be able to hide how many records you have, for business
purposes, but also to make IDs non-predictable.
This gem is built to work with Ruby 1.9, 2.0, 2.1 as well as with Rails 3.2 and 4.0. All of these cases are
[continuously tested for](https://travis-ci.org/watu/random_unique_id.png?branch=master).
## Installation
Add this line to your application's Gemfile:
gem "random_unique_id"
And then execute:
$ bundle
Or install it yourself as:
$ gem install random_unique_id
## Usage
The usage is very simple. For each record where you want to have a random id generated, add the following line to the
class:
has_random_unique_id
For example:
class Post < ActiveRecord::Base
has_random_unique_id
end
You need to also add a column, called `rid` of type string/varchar. It is recommended that you also add a unique index
on that column, for example:
def up
add_column :posts, :rid, :string
add_index :posts, :rid, :unique
end
The method `to_param` will be overridden to return the `rid` instead of the `id`. The method `belongs_to` gets extended
to define `_rid` methods similar to the `_id` method, like: `blog_rid` and `blog_rid=`. If you don't want to define
those pass `define_rid_method` as false, for example:
class Post
belongs_to :blog, define_rid_method: false
end
Classes that have rids also get a method called `populate_random_unique_ids` to help you populate the rid of existing
records. For example:
def up
add_column :posts, :rid, :string
add_index :posts, :rid, :unique
say_with_time "Post.populate_random_unique_ids" do
Post.reset_column_information
Post.populate_random_unique_ids { print "."}
end
end
## Changelog
### Version 0.2.0
- Added method populate_random_unique_ids.
- Improved documentation
- Started testing with Ruby 2.1.
### Version 0.1.0
- Initial release of the code extracted from [Watu](http://github.com/watu).
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am "Add some feature"`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
<file_sep># encoding: UTF-8
# Copyright © 2011, 2012, 2013, 2014, Watu
require "random_unique_id/version"
require "securerandom"
require "active_support"
require "active_record"
module RandomUniqueId
extend ActiveSupport::Concern
module ClassMethods
# Mark a model as containing a random unique id. A field called rid of type string is required. It's recommended
# that it's indexed and unique. For example, you could add it to a migration like this:
# def up
# add_column :posts, :rid, :string
# add_index :posts, :rid, :unique
# end
#
# and then to the model like this:
# class Post
# has_random_unique_id
# # ... other stuff
# end
def has_random_unique_id
validates :rid, presence: true, uniqueness: true
before_validation :generate_random_unique_id, if: Proc.new { |r| r.rid.blank? }
define_method(:to_param) { rid }
end
def belongs_to(*attrs)
define_rid_method = attrs[1].try(:delete, :rid)
super.tap do
if define_rid_method != false
relationship_name = attrs[0]
rel = reflections[relationship_name]
return if rel.options[:polymorphic] # If we don't know the class, we cannot find the record by rid.
class_name = rel.options[:class_name] || relationship_name.to_s.classify
related_class = class_name.constantize
define_rid_accessors(related_class, relationship_name) if related_class.attribute_names.include? "rid"
end
end
end
# Populate all the blank rids in a table. This is useful when adding rids to a table that already has data in it.
# For example:
# def up
# add_column :posts, :rid, :string
# add_index :posts, :rid, :unique
# say_with_time "Post.populate_random_unique_ids" do
# Post.reset_column_information
# Post.populate_random_unique_ids { print "."}
# end
# end
#
# This method uses update_column to avoid running validations and callbacks. It will not change existing rids, so
# it's safe to call several times and a failure (even without a transaction) is not catastrophic.
def populate_random_unique_ids
find_each do |record|
rid_just_populated = false
if record.rid.blank?
record.generate_random_unique_id
record.update_column(:rid, record.rid)
rid_just_populated = true
end
yield(record, rid_just_populated) if block_given?
end
end
private
def define_rid_accessors(related_class, relationship_name)
define_method("#{relationship_name}_rid") do
self.send(relationship_name).try(:rid)
end
define_method("#{relationship_name}_rid=") do |rid|
record = related_class.find_by_rid(rid)
self.send("#{relationship_name}=", record)
record
end
end
end
def generate_random_unique_id(n=5, field="rid")
# Find the topmost class before ActiveRecord::Base so that when we do queries, we don't end up with type=Whatever in the where clause.
klass = self.class
self.class.ancestors.each do |k|
if k == ActiveRecord::Base
break # we reached the bottom of this barrel
end
if k.is_a? Class
klass = k
end
end
begin
self.send("#{field}=", RandomUniqueId.generate_random_id(n))
n += 1
end while klass.unscoped.where(field => self.send(field)).exists?
end
def self.generate_random_id(n=10)
# IMPORTANT: don't ever generate dashes or underscores in the RIDs as they are likely to end up in the UI in Rails
# and they'll be converted to something else by jquery ujs or something like that.
generated_rid = ""
while generated_rid.length < n
generated_rid = (generated_rid + SecureRandom.urlsafe_base64(n * 3).downcase.gsub(/[^a-z0-9]/, ""))[0..(n-1)]
end
return generated_rid
end
end
ActiveRecord::Base.send(:include, RandomUniqueId)
| b904973a272124369a78bb62edcd5bd443f50c47 | [
"Markdown",
"Ruby"
] | 6 | Ruby | jjconti/random_unique_id | 495e6355c899512c157ef97de8fc41dbd1840367 | de84930edb164df749613cc1a5199a8648ddb105 | |
refs/heads/master | <file_sep>import createSliderWithTooltip from 'rc-slider/lib/createSliderWithTooltip';
import '../style';
export default createSliderWithTooltip;
| 1ec8e0d79110e80dc3cfd75542476d517edac919 | [
"JavaScript"
] | 1 | JavaScript | gag-mext/create-tooltip | c15ff326ec1f1ee76cd6c2302e32d70ce3038bf8 | 1d612b253473990b302fc46b572097ec44b1c4d5 | |
refs/heads/master | <repo_name>samuelmolina/richCharts<file_sep>/src/main/java/com/bchetty/charts/model/Series.java
package com.bchetty.charts.model;
import java.util.List;
/**
*
* @author <NAME>, Chetty
*/
public class Series {
private String name;
private List<Long> data;
public Series() {}
public Series(String name, List<Long> data) {
this.name = name;
this.data = data;
}
}
<file_sep>/README.md
richCharts
==========
Demo project showing the Javascript charting library (Highcharts) in action.
Technology Stack: Java, JSF, Richfaces, Gson, Highcharts
The demo application polls the server for data related to JVM's heap memory usage and displays the statistics in a chart, generated by the Javascript framework - Highcharts. | a7ef16777b347de75df5d38dc73be6b1a57da5ea | [
"Markdown",
"Java"
] | 2 | Java | samuelmolina/richCharts | 27ae7aac95c23f67986e13438bb248415ed13dec | cab56384c21e54ea35635f7f265bb880f6f132ec | |
refs/heads/master | <repo_name>yuanshaowu/logpolice-spring-boot-starter<file_sep>/src/main/java/com/logpolice/infrastructure/enums/NoticeSendEnum.java
package com.logpolice.infrastructure.enums;
/**
* 消息推送类型
*
* @author huang
* @date 2019/8/28
*/
public enum NoticeSendEnum {
/**
* MAIL:邮件
* DING_DING:钉钉
* FEI_SHU:飞书
*/
MAIL,
DING_DING,
FEI_SHU;
}
<file_sep>/src/main/java/com/logpolice/infrastructure/dto/DingDingAtCommand.java
package com.logpolice.infrastructure.dto;
import lombok.*;
import java.util.Set;
/**
* 钉钉对象命令
*
* @author huang
* @date 2019/8/28
*/
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class DingDingAtCommand {
/**
* 被@人的手机号
*/
@Getter
@Setter
private Set<String> atMobiles;
/**
* 所有人@时:true,否则为false
*/
@Getter
@Setter
private boolean isAtAll;
}<file_sep>/src/main/java/com/logpolice/infrastructure/enums/RedisClientTypeEnum.java
package com.logpolice.infrastructure.enums;
/**
* redis客户端类型
*
* @author huang
* @date 2019/8/28
*/
public enum RedisClientTypeEnum {
/**
* LOCAL_CACHE:本地
* REDIS:redis
*/
REDIS_TEMPLATE,
JEDIS;
}
<file_sep>/src/main/java/com/logpolice/infrastructure/utils/RedisFactory.java
package com.logpolice.infrastructure.utils;
import com.logpolice.infrastructure.enums.RedisClientTypeEnum;
/**
* redis工厂
*
* @author huang
* @date 2020/5/27
*/
public interface RedisFactory {
/**
* 获取类型
*
* @return 类型
*/
RedisClientTypeEnum getType();
/**
* 获取值
*
* @param key 键
* @return 值
*/
String get(String key);
/**
* 设置值
*
* @param key 键
* @param value 值
* @param seconds 秒
* @return 状态
*/
boolean setex(String key, String value, int seconds);
/**
* 锁
*
* @param key 键
* @param value 值
* @param seconds 秒
* @return 状态
*/
boolean lock(String key, String value, int seconds);
/**
* 解锁
*
* @param key 键
* @return 状态
*/
boolean unlock(String key);
}
<file_sep>/src/main/java/com/logpolice/infrastructure/properties/LogpoliceProperties.java
package com.logpolice.infrastructure.properties;
import com.logpolice.domain.entity.NoticeFrequencyType;
import com.logpolice.infrastructure.enums.NoticeSendEnum;
import java.util.HashSet;
import java.util.Set;
/**
* 日志报警配置
*
* @author huang
* @date 2019/8/29
*/
public interface LogpoliceProperties {
/**
* 获取工程名
*/
String getAppCode();
/**
* 获取工程地址
*/
String getLocalIp();
/**
* 日志报警是否打开
*/
Boolean getEnabled();
/**
* 日志报警清除时间
*/
default Long getCleanTimeInterval() {
return Long.valueOf(LogpoliceConstant.CLEAN_TIME_INTERVAL);
}
/**
* 通知频率类型:按时间或按次数
*/
default NoticeFrequencyType getFrequencyType() {
return NoticeFrequencyType.TIMEOUT;
}
/**
* 此次出现相同的异常时,与上次通知的时间做对比,假如超过此设定的值,则再次通知
*/
default Long getTimeInterval() {
return LogpoliceConstant.TIME_INTERVAL;
}
/**
* 此次出现相同异常时,与上次通知的出现次数作对比,假如超过此设定的值,则再次通知
*/
default Long getShowCount() {
return LogpoliceConstant.SHOW_COUNT;
}
/**
* 钉钉推送类型
*/
default NoticeSendEnum getNoticeSendType() {
return NoticeSendEnum.DING_DING;
}
/**
* 日志报警是否打开redis
*/
default Boolean getEnableRedisStorage() {
return LogpoliceConstant.ENABLE_REDIS_STORAGE;
}
/**
* 异常redisKey
*/
default String getExceptionRedisKey() {
return LogpoliceConstant.EXCEPTION_STATISTIC_REDIS_KEY;
}
/**
* 异常白名单
*/
default Set<String> getExceptionWhiteList() {
return new HashSet<>();
}
/**
* 类白名单
*/
default Set<String> getClassWhiteList() {
return new HashSet<>();
}
/**
* 获取模板格式
*/
default String getLogPattern() {
return LogpoliceConstant.PROFILES_ACTIVE;
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/rpc/LockUtilsRedis.java
package com.logpolice.infrastructure.rpc;
import com.logpolice.infrastructure.enums.NoticeDbTypeEnum;
import com.logpolice.infrastructure.utils.LockUtils;
import com.logpolice.infrastructure.utils.RedisFactory;
import java.util.List;
/**
* redis锁
*
* @author huang
* @date 2019/9/3
*/
public class LockUtilsRedis implements LockUtils {
private final List<RedisFactory> redisFactories;
public LockUtilsRedis(List<RedisFactory> redisFactories) {
this.redisFactories = redisFactories;
}
@Override
public NoticeDbTypeEnum getType() {
return NoticeDbTypeEnum.REDIS;
}
@Override
public boolean lock(String key) {
return redisFactories.get(0).lock(key, "1", 1);
}
@Override
public void unlock(String key) {
redisFactories.get(0).unlock(key);
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/exception/DingDingTokeNotExistException.java
package com.logpolice.infrastructure.exception;
/**
* 钉钉token空异常
*
* @author huang
* @date 2019/8/29
*/
public class DingDingTokeNotExistException extends RuntimeException {
public DingDingTokeNotExistException(String message, Object... args) {
super(String.format(message, args));
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/rpc/ExceptionStatisticRedis.java
package com.logpolice.infrastructure.rpc;
import com.alibaba.fastjson.JSONObject;
import com.logpolice.domain.entity.ExceptionStatistic;
import com.logpolice.domain.repository.ExceptionStatisticRepository;
import com.logpolice.infrastructure.enums.NoticeDbTypeEnum;
import com.logpolice.infrastructure.properties.LogpoliceConstant;
import com.logpolice.infrastructure.utils.RedisFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import java.util.List;
import java.util.Optional;
/**
* 异常统计本地缓存
*
* @author huang
* @date 2019/9/11
*/
@Slf4j
public class ExceptionStatisticRedis implements ExceptionStatisticRepository {
private final List<RedisFactory> redisFactories;
public ExceptionStatisticRedis(List<RedisFactory> redisFactories) {
this.redisFactories = redisFactories;
}
@Override
public NoticeDbTypeEnum getType() {
return NoticeDbTypeEnum.REDIS;
}
@Override
public Optional<ExceptionStatistic> findByOpenId(String openId) {
ExceptionStatistic exceptionStatistic = null;
String result = redisFactories.get(0).get(openId);
if (!StringUtils.isEmpty(result)) {
exceptionStatistic = JSONObject.parseObject(result, ExceptionStatistic.class);
}
return Optional.ofNullable(exceptionStatistic);
}
@Override
public boolean save(String openId, ExceptionStatistic exceptionStatistic) {
redisFactories.get(0).setex(openId, JSONObject.toJSONString(exceptionStatistic), LogpoliceConstant.CLEAN_TIME_INTERVAL);
return true;
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/utils/DateUtils.java
package com.logpolice.infrastructure.utils;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
/**
* 日期工具类
*
* @author huang
* @date 2019/9/3
*/
public class DateUtils {
/**
* 格式化时间
*/
private final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
/**
* 时间戳转换时间格式
*
* @param timeStamp 时间戳
* @return 时间格式
*/
public static LocalDateTime getLocalDateTime(Long timeStamp) {
Instant instant = Instant.ofEpochMilli(timeStamp);
return LocalDateTime.ofInstant(instant, ZoneId.systemDefault());
}
/**
* 时间格式化
*
* @param localDateTime 时间
* @return 格式化时间
*/
public static String format(LocalDateTime localDateTime) {
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(DATE_FORMAT);
return dateTimeFormatter.format(localDateTime);
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/config/LogpoliceServiceAutoConfiguration.java
package com.logpolice.infrastructure.config;
import com.logpolice.application.NoticeService;
import com.logpolice.application.NoticeServiceFactory;
import com.logpolice.domain.repository.ExceptionNoticeRepository;
import com.logpolice.domain.repository.ExceptionStatisticRepository;
import com.logpolice.infrastructure.utils.LockUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.List;
/**
* 本地缓存自动装配
*
* @author huang
* @date 2019/8/28
*/
@Configuration
@AutoConfigureAfter({LogpoliceRedisAutoConfiguration.class, LogpoliceLoclCacheAutoConfiguration.class,
LogpoliceMailAutoConfiguration.class, LogpoliceDingDingAutoConfiguration.class})
public class LogpoliceServiceAutoConfiguration {
private final List<ExceptionNoticeRepository> exceptionNoticeRepositories;
private final List<ExceptionStatisticRepository> exceptionStatisticRepositories;
private final List<LockUtils> lockUtils;
@Autowired
public LogpoliceServiceAutoConfiguration(List<ExceptionNoticeRepository> exceptionNoticeRepositories,
List<ExceptionStatisticRepository> exceptionStatisticRepositories,
List<LockUtils> lockUtils) {
this.exceptionNoticeRepositories = exceptionNoticeRepositories;
this.exceptionStatisticRepositories = exceptionStatisticRepositories;
this.lockUtils = lockUtils;
}
@Bean
public NoticeServiceFactory noticeServiceFactory() {
return new NoticeServiceFactory(exceptionNoticeRepositories, exceptionStatisticRepositories, lockUtils);
}
@Bean
public NoticeService noticeService() {
return new NoticeService(noticeServiceFactory());
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/dto/DingDingCommand.java
package com.logpolice.infrastructure.dto;
import lombok.*;
import java.util.Set;
/**
* 钉钉命令
*
* @author huang
* @date 2019/8/27
*/
@ToString
@NoArgsConstructor
@AllArgsConstructor
public class DingDingCommand {
/**
* 消息内容
*/
@Getter
@Setter
private String msgtype;
/**
* 钉钉文本命令
*/
@Getter
@Setter
private DingDingTextCommand text;
/**
* 钉钉对象命令
*/
@Getter
@Setter
private DingDingAtCommand at;
/**
* 创建钉钉命令(自定义构造)
*
* @param content 消息内容
* @param msgtype 此消息类型为固定text
* @param atMobiles 被@人的手机号
* @param isAtAll 所有人@时:true,否则为false
*/
public DingDingCommand(String content, String msgtype, Set<String> atMobiles, Boolean isAtAll) {
this.msgtype = msgtype;
this.at = new DingDingAtCommand(atMobiles, isAtAll);
this.text = new DingDingTextCommand(content);
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/config/LogpoliceJedisAutoConfiguration.java
package com.logpolice.infrastructure.config;
import com.logpolice.infrastructure.utils.redis.JedisUtils;
import com.logpolice.infrastructure.utils.redis.RedisTemplateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import redis.clients.jedis.JedisSentinelPool;
/**
* redis配置自动装配
*
* @author huang
* @date 2019/8/28
*/
@Configuration
@AutoConfigureAfter({LogpoliceMailAutoConfiguration.class, LogpoliceDingDingAutoConfiguration.class, LogpoliceRedisTemplateAutoConfiguration.class})
@ConditionalOnClass(JedisSentinelPool.class)
@ConditionalOnMissingBean(RedisTemplateUtils.class)
public class LogpoliceJedisAutoConfiguration {
private final JedisSentinelPool jedisSentinelPool;
@Autowired
public LogpoliceJedisAutoConfiguration(JedisSentinelPool jedisSentinelPool) {
this.jedisSentinelPool = jedisSentinelPool;
}
@Bean
public JedisUtils jedisUtils() {
return new JedisUtils(jedisSentinelPool);
}
}
<file_sep>/src/main/java/com/logpolice/domain/repository/ExceptionNoticeRepository.java
package com.logpolice.domain.repository;
import com.logpolice.domain.entity.ExceptionNotice;
import com.logpolice.infrastructure.enums.NoticeSendEnum;
/**
* 消息仓储层
*
* @author huang
* @date 2019/8/28
*/
public interface ExceptionNoticeRepository {
/**
* 获取消息类型
*
* @return 消息类型
*/
NoticeSendEnum getType();
/**
* 推送消息
*
* @param exceptionNotice 异常信息
*/
void send(ExceptionNotice exceptionNotice);
}
<file_sep>/src/main/java/com/logpolice/infrastructure/config/LogpoliceRedisTemplateAutoConfiguration.java
package com.logpolice.infrastructure.config;
import com.logpolice.infrastructure.utils.redis.RedisTemplateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.data.redis.core.RedisTemplate;
/**
* redis配置自动装配
*
* @author huang
* @date 2019/8/28
*/
@Configuration
@AutoConfigureAfter({LogpoliceMailAutoConfiguration.class, LogpoliceDingDingAutoConfiguration.class})
@ConditionalOnClass(RedisTemplate.class)
@Import(RedisAutoConfiguration.class)
public class LogpoliceRedisTemplateAutoConfiguration {
private final RedisTemplate redisTemplate;
@Autowired
public LogpoliceRedisTemplateAutoConfiguration(RedisTemplate redisTemplate) {
this.redisTemplate = redisTemplate;
}
@Bean
public RedisTemplateUtils redisTemplateUtils() {
return new RedisTemplateUtils(redisTemplate);
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/properties/LogpoliceFeiShuProperties.java
package com.logpolice.infrastructure.properties;
/**
* 飞书报警钉钉配置
*
* @author huang
* @date 2019/8/29
*/
public interface LogpoliceFeiShuProperties {
/**
* webhook
*/
String getFeiShuWebHook();
}
<file_sep>/src/main/java/com/logpolice/infrastructure/config/LogpoliceDingDingAutoConfiguration.java
package com.logpolice.infrastructure.config;
import com.logpolice.infrastructure.properties.LogpoliceDingDingProperties;
import com.logpolice.infrastructure.rpc.DingDingNoticeRpc;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
/**
* 钉钉配置自动装配
*
* @author huang
* @date 2019/8/28
*/
@Configuration
@AutoConfigureAfter(LogpoliceAutoConfiguration.class)
public class LogpoliceDingDingAutoConfiguration {
private final LogpoliceDingDingProperties logpoliceDingDingProperties;
@Autowired
public LogpoliceDingDingAutoConfiguration(LogpoliceDingDingProperties logpoliceDingDingProperties) {
this.logpoliceDingDingProperties = logpoliceDingDingProperties;
}
@Bean
public DingDingNoticeRpc dingDingNoticeRpc() {
return new DingDingNoticeRpc(logpoliceDingDingProperties);
}
}
<file_sep>/src/main/java/com/logpolice/infrastructure/rpc/LockUtilsLocalCache.java
package com.logpolice.infrastructure.rpc;
import com.logpolice.infrastructure.enums.NoticeDbTypeEnum;
import com.logpolice.infrastructure.utils.LockUtils;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger;
/**
* jvm锁
*
* @author huang
* @date 2019/9/3
*/
public class LockUtilsLocalCache implements LockUtils {
private final Map<String, AtomicInteger> versionMap;
public LockUtilsLocalCache(Map<String, AtomicInteger> versionMap) {
this.versionMap = versionMap;
}
@Override
public NoticeDbTypeEnum getType() {
return NoticeDbTypeEnum.LOCAL_CACHE;
}
@Override
public boolean lock(String key) {
AtomicInteger version = versionMap.computeIfAbsent(key, v -> new AtomicInteger(1));
return Objects.equals(version.incrementAndGet(), 1);
}
@Override
public void unlock(String key) {
AtomicInteger version = versionMap.get(key);
if (Objects.nonNull(version)) {
version.set(0);
}
}
}
<file_sep>/README.md
# 日志异常消息通知的spring-boot-start框架:logpolice-spring-boot-starter
## 注意:
此版本为<动态加载>报警配置版本,若不需要,可切换master分支,支持application.properties
## 背景:
对于项目工程来说,bug是不可能避免的。生产环境并不能像本地环境一样方便调试,无法第一时间知道线上事故。所以我们就需要项目的异常通知功能,在用户发现bug之前,开发者自可以提前发现问题点,避免不必要的线线上事故。
如果捕获项目全局异常,部分异常不是我们想关注的,这时候可以考虑基于日志的log.error()主动触发异常提示开发者,并精确获取异常堆栈信息,在获取异常消息推送的避免消息轰炸,可以根据自定义配置决定日志推送策略。
本项目基于以上需求并 结合DDD领域开发设计而成,简单接入消息报功能
## 功能
1. 监听log.error(),异步推送堆栈信息,快速接入
2. 提供推送策略,避免消息轰炸(超频/超时)
3. 提供本地,redis 数据存储,按需配置(默认本地)
4. 提供钉钉,邮件 推送类型,按需配置(默认钉钉)
5. 提供异常过滤白名单
6. 此版本提供报警属性动态配置
## 系统需求



## 当前版本

## 快速接入(默认本地缓存&钉钉推送)
(默认钉钉推送,本地缓存。有需求可以更改配置,邮箱或redis异常存储)
1. 工程``mvn clean install``打包本地仓库。
2. 在引用工程中的``pom.xml``中做如下依赖
```
<dependency>
<groupId>com.logpolice</groupId>
<artifactId>logpolice-spring-boot-starter</artifactId>
<version>1.2.2-hot</version>
</dependency>
```
3. 项目新增类文件,实现LogpoliceProperties, LogpoliceDingDingProperties:
```
@Component
public class LogpoliceAcmProperties implements LogpoliceProperties, LogpoliceDingDingProperties {
@Override
public String getAppCode() {
return "工程名";
}
@Override
public String getLocalIp() {
return "工程地址";
}
@Override
public Boolean getEnabled() {
return true;
}
@Override
public Long getTimeInterval() {
return 60 * 5;
}
@Override
public Boolean getEnableRedisStorage() {
return true;
}
@Override
public NoticeSendEnum getNoticeSendType() {
return NoticeSendEnum.DING_DING;
}
@Override
public Set<String> getExceptionWhiteList() {
return new HashSet<>();
}
@Override
public Set<String> getClassWhiteList() {
return new HashSet<>();
}
}
```
4. 钉钉配置:[钉钉机器人](https://open-doc.dingtalk.com/microapp/serverapi2/krgddi "自定义机器人")
5. 以上配置好以后就可以写demo测试啦,首先创建logback.xml
```
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<!-- encoder 默认配置为PatternLayoutEncoder -->
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<appender name="LogDingDingAppender" class="com.logpolice.port.LogSendAppender"/>
<root level="ERROR">
<appender-ref ref="LogDingDingAppender"/>
</root>
</configuration>
```
注:如果已有logback.xml,引用LogSendAppender类即可
```
<appender name="LogDingDingAppender" class="com.logpolice.port.LogSendAppender"/>
<root level="ERROR">
<appender-ref ref="LogDingDingAppender"/>
</root>
```
6. 然后编写测试类,需要主动打印exception堆栈信息,否则日志获取不到:
```
@RunWith(SpringRunner.class)
@SpringBootTest(classes = DemoApplication.class)
public class DemoApplicationTest1s {
private Logger log = LoggerFactory.getLogger(DemoApplicationTest1s.class);
@Test
public void test1() {
try {
int i = 1 / 0;
} catch (Exception e) {
log.error("哈哈哈哈,param:{}, error:{}", 1, e);
}
}
}
```
log.error()写入异常,推送效果(钉钉/邮箱)



log.error()未写入异常,推送效果(钉钉/邮箱)


## 全部配置
1. 通用配置(*必填), 需实现:LogpoliceProperties接口
```
* 1. 工程名
* 2. 工程地址
3. 日志报警开关 (默认:关闭)
4. 日志报警清除时间 (默认:21600秒 = 6小時)
5. 通知频率类型:超时/超频 (默认:超时)
6. 超时间隔时间 (默认:300秒 = 5分钟)
7. 超频间隔次数 (默认:10次)
8. 消息推送类型:钉钉/邮件 (默认:钉钉)
9. redis配置开关 (默认:关闭)
10. redis缓存key (默认:logpolice_exception_statistic:)
11. 异常白名单 (默认:无)
12. 类文件白名单 (默认:无)
13. 日志模板(默认:无)
```
2. 钉钉配置(若接入,*必填), 需实现:LogpoliceDingDingProperties接口
```
* 钉钉webhook
被@人的手机号(默认:无)
此消息类型为固定text(默认:text)
所有人@时:true,否则为false(默认:false)
```
3. 邮箱配置(若接入,*必填), 需实现:LogpoliceMailProperties接口
```
#报警配置,接口实现
* 发件人 (应配置spring邮箱配置一致)
* 收件人
抄送 (默认:无)
密抄送 (默认:无)
#application.properties spring邮箱配置
* spring.mail.host=smtp.qq.com
* spring.mail.username=<EMAIL>
* spring.mail.password=<PASSWORD>
* spring.mail.default-encoding=UTF-8
* spring.mail.properties.mail.smtp.ssl.enable=true
* spring.mail.properties.mail.imap.ssl.socketFactory.fallback=false
* spring.mail.properties.mail.smtp.ssl.socketFactory.class=com.fintech.modules.base.util.mail.MailSSLSocketFactory
* spring.mail.properties.mail.smtp.auth=true
* spring.mail.properties.mail.smtp.starttls.enable=true
* spring.mail.properties.mail.smtp.starttls.required=true
```
4. 飞书配置(若接入,*必填), 需实现:LogpoliceFeiShuProperties接口
```
* 飞书webhook
```
## 常用配置
1. 推送类型(钉钉/邮件,默认钉钉)
```
@Override
public NoticeSendEnum getNoticeSendType() {
return NoticeSendEnum.DING_DING;
}
```
2. 推送策略(超时时间/超频次数,默认超时)
```
@Override
public NoticeFrequencyType getFrequencyType() {
return NoticeFrequencyType.SHOW_COUNT;
}
@Override
public Long getTimeInterval() {
return 30;
}
```
```
@Override
public NoticeFrequencyType getFrequencyType() {
return NoticeFrequencyType.TIMEOUT;
}
@Override
public Long getShowCount() {
return 10;
}
```
3. 日志数据重置时间,异常白名单
```
@Override
public Long getCleanTimeInterval() {
return 10800;
}
@Override
public Set<String> getExceptionWhiteList() {
return Arrays.stream("java.lang.ArithmeticException".split(",")).collect(Collectors.toSet());
}
```
## redis接入
1. 修改application.properties 异常redis开关
```
@Override
public Boolean getEnableRedisStorage() {
return true;
}
@Override
public String getExceptionRedisKey() {
return xxx_xxxx_xxxx;
}
```
2. 需要引入spring-boot-starter-data-redis
```
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
```
3. application.properties 新增redis配置
```
spring.redis.database=0
spring.redis.host=xx.xx.xx.xxx
spring.redis.port=6379
spring.redis.password=<PASSWORD>
```
## 邮件接入
1. 有邮件通知的话需要在``pom.xml``中加入如下依赖
```
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
```
2. 实现LogpoliceMailProperties
```
public class LogpoliceAcmProperties implements LogpoliceProperties,LogpoliceMailProperties {
......省略常用配置
@Override
public String getFrom() {
return "<EMAIL>";
}
@Override
public String[] getTo() {
return new String["<EMAIL>"];
}
@Override
public String[] getCc() {
return new String[0];
}
@Override
public String[] getBcc() {
return new String[0];
}
}
```
3. application.properties 新增邮件配置,(163,qq 不同邮箱配置可能有差异)
```
spring.mail.host=smtp.qq.com
spring.mail.username=<EMAIL>
spring.mail.password=<PASSWORD>
spring.mail.default-encoding=UTF-8
spring.mail.properties.mail.smtp.ssl.enable=true
spring.mail.properties.mail.imap.ssl.socketFactory.fallback=false
spring.mail.properties.mail.smtp.ssl.socketFactory.class=com.fintech.modules.base.util.mail.MailSSLSocketFactory
spring.mail.properties.mail.smtp.auth=true
spring.mail.properties.mail.smtp.starttls.enable=true
spring.mail.properties.mail.smtp.starttls.required=true
```
注:有任何好的建议可以联系 qq:379198812,感谢支持 | af01be26b1b3bdbe8f9f0eb437864ebfb86a7bea | [
"Markdown",
"Java"
] | 18 | Java | yuanshaowu/logpolice-spring-boot-starter | 34a9c9e327b9a7d7be4478bd608cc5fe49841f05 | 9a39ba10fcebb7bdb5bb41194b460888f7ff3e83 | |
refs/heads/master | <repo_name>razemint/statistical-distribution<file_sep>/source.py
import pip
import seaborn as sns
from scipy import stats
import numpy as np
import matplotlib.pyplot as plt
from IPython import get_ipython
np.random.seed(2019)
sns.set_palette("deep", desat=.6)
sns.set_context(rc={"figure.figsize": (8, 4)})
mu, sigma = 0, 0.2
datos = np.random.normal(mu, sigma, 1000)
cuenta, cajas, ignorar = plt.hist(datos, 20)
plt.ylabel('memo')
plt.xlabel('irvinho')
plt.title('memo sobre irvinho')
plt.show()
<file_sep>/crab_a2.py
from tkinter import *
#border = El borde de la pantalla, orillas y todo
border = Tk()
border.title("Distribucion de probabilidad")
border.resizable(0, 0)
border.geometry("800x650")
border.iconbitmap("sources/bordericon_a.ico")
border.config(bg="#3F3F3F")
#variables
box_text_x_num = StringVar()
box_text_y_num = StringVar()
#mainFrame = La ventana principal
mainFrame = Frame(border)
mainFrame.config(bg="#4c4c4c")
mainFrame.config(width="790", height="645")
mainFrame.pack()
#mainFrameImg = PhotoImage(file = "sources/mainframeimg.png")
#mainLabel = Titulo FES
mainLabel = Label(mainFrame, text="Facultad de Estudios Superiores\nAragón")
mainLabel.place(x=212, y=250)
mainLabel.config(fg="#FFFFFF", bg="#3F3F3F", font=("Verdana", 20))
mainLabel.pack()
#crabFrame = Para los cuadros de texto
crabFrame = Frame(mainFrame)
crabFrame.config(bg="#4c4c4c")
crabFrame.pack()
#box_text_x = Caja uno
box_text_x = Entry(crabFrame, textvariable = box_text_x_num)
box_text_x.grid(row=0, column=1)
box_text_x_label = Label(crabFrame, text="crab that goes in X:")
box_text_x_label.grid(row=0, column=0, pady=10)
box_text_x_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
#box_text_y = Caja dos
box_text_y = Entry(crabFrame, textvariable = box_text_y_num)
box_text_y.grid(row=1, column=1)
box_text_y_label = Label(crabFrame, text="crab that goes in Y:")
box_text_y_label.grid(row=1, column=0, pady=10)
box_text_y_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
#def get_x = Para guardar X
def box_text_x_get():
box_text_x_num.get()
#def get_y = Para guardar Y
def box_text_y_get():
box_text_y_num.get()
#button_end = Boton para resolver
button_end=Button(mainFrame, text="Resolver", command=(box_text_x_get, box_text_y_get))
button_end.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
border.mainloop()<file_sep>/inicio.py
import numpy as np
from cal.simpson import simpson13, fx
n = int(input("inserta el número de iteraciones\n"))
a = float(input("inserta intervalo inferior\n"))
b = float(input("inserta intervalo superior\n"))
f = str(input("inserta la función\n"))
print("el resultado de la integral es: ")
print(simpson13(n, a, b, f))
<file_sep>/README.md
# statistical-distribution
statistical distribution with python
<file_sep>/test_.py
print("haras una puta suma o una puta resta?")
inputa = str(input())
if inputa == ("suma"):
resultado = 1+1
print(resultado)
elif inputa == ("resta"):
resultado = 1-1
print(resultado)
else:
print("vales verga")
<file_sep>/crab_a1.py
from tkinter import *
#border = El borde de la pantalla, orillas y todo
border = Tk()
border.title("Distribucion de probabilidad")
border.resizable(0, 0)
border.geometry("800x650")
border.iconbitmap("sources/bordericon_a.ico")
border.config(bg="#3F3F3F")
#mainFrame = La ventana principal
mainFrame = Frame(border)
mainFrame.config(bg="#4c4c4c")
mainFrame.config(width="790", height="645")
mainFrame.pack()
mainFrameImg = PhotoImage(file = "sources/mainframeimg.png")
#mainLabel = Titulo FES
mainLabel = Label (mainFrame, text="Facultad de Estudios Superiores\nAragon", justify="center")
mainLabel.place(x=212, y=30)
mainLabel.place(bordermode="inside")
mainLabel.config(fg="#FFFFFF")
mainLabel.config(bg="#4c4c4c")
mainLabel.config(font=("Verdana", 18))
border.mainloop()<file_sep>/crab_a3.py
from tkinter import *
#border = El borde de la pantalla, orillas y todo
border = Tk()
border.title("Distribucion de probabilidad")
border.resizable(0, 0)
border.geometry("800x650")
border.iconbitmap("sources/bordericon_a.ico")
border.config(bg="#3F3F3F")
#variables
box_text_simp_int_inf_num = StringVar()
box_text_simp_int_sup_num = StringVar()
box_text_simp_init_num = StringVar()
box_text_simp_func_num = StringVar()
#mainFrame = La ventana principal
mainFrame = Frame(border)
mainFrame.config(bg="#4c4c4c")
mainFrame.config(width="790", height="645")
mainFrame.pack()
#mainFrameImg = PhotoImage(file = "sources/mainframeimg.png")
#mainLabel = Titulo FES
mainLabel = Label(mainFrame, text="Facultad de Estudios Superiores\nAragón")
mainLabel.place(x=212, y=250)
mainLabel.config(fg="#FFFFFF", bg="#3F3F3F", font=("Verdana", 20))
mainLabel.pack()
#crabFrame = Para los cuadros de texto
crabFrame = Frame(mainFrame)
crabFrame.config(bg="#4c4c4c")
crabFrame.pack()
#simpFrame = Para los cuadros de texto
simpFrame = Frame(crabFrame)
simpFrame.config(bg="#4c4c4c")
simpFrame.pack()
#----------------Simpson 1/3------------------
#box_text_simp_label = "Metodo Simpson 1/3"
box_text_simp_label = Label(simpFrame, text="Metodo Simpson 1/3")
box_text_simp_label.grid(row=0, column=0)
box_text_simp_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 18))
#box_text_simp_int_inf = Caja uno
box_text_simp_int_inf = Entry(simpFrame, textvariable = box_text_simp_int_inf_num)
box_text_simp_int_inf.grid(row=1, column=1)
box_text_simp_int_inf_label = Label(simpFrame, text="Intervalo Inferior:")
box_text_simp_int_inf_label.grid(row=1, column=0, pady=10)
box_text_simp_int_inf_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
#box_text_simp_int_sup = Caja dos
box_text_simp_int_sup = Entry(simpFrame, textvariable = box_text_simp_int_sup_num)
box_text_simp_int_sup.grid(row=2, column=1)
box_text_simp_int_sup_label = Label(simpFrame, text="Intervalo Superior:")
box_text_simp_int_sup_label.grid(row=2, column=0, pady=10)
box_text_simp_int_sup_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
#box_text_simp_init = Caja tres
box_text_simp_init = Entry(simpFrame, textvariable = box_text_simp_init_num)
box_text_simp_init.grid(row=3, column=1)
box_text_simp_init_label = Label(simpFrame, text="Iteraciones:")
box_text_simp_init_label.grid(row=3, column=0, pady=10)
box_text_simp_init_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
#box_text_simp_func = Caja cuatro
box_text_simp_func = Entry(simpFrame, textvariable = box_text_simp_func_num)
box_text_simp_func.grid(row=4, column=1)
box_text_simp_func_label = Label(simpFrame, text="Funcion:")
box_text_simp_func_label.grid(row=4, column=0, pady=10)
box_text_simp_func_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 10))
def box_text_simp_int_inf_get():
box_text_simp_int_inf_num.get()
def box_text_simp_int_sup_get():
box_text_simp_int_sup_num.get()
def box_text_simp_init_get():
box_text_simp_init_num.get()
def box_text_simp_func_get():
box_text_simp_func_num.get()
#button_simp = Boton para resolver
button_simp=Button(simpFrame, text="Resolver")
button_simp.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
button_simp.grid(row=5, column=0, pady=10)
#box_text_simp_ans = Caja cinco
box_text_simp_ans = Entry(simpFrame, text="")
box_text_simp_ans.grid(row=5, column=1)
#----------------Simpson 1/3------------------
border.mainloop()<file_sep>/test.py
import scipy.stats as ss
import numpy as np
import matplotlib.pyplot as plt
a = int(input("inserta mu"))
b = int(input("inserta mu"))
c = int(input("inserta mu"))
x = np.random.normal(a, b, c)
print(x)
print(np.var(x))
print(np.average(x))
<file_sep>/Untitled-2.py
from tkinter import *
root = Tk()
def testVal(inStr,acttyp):
if acttyp == '1': #insert
if not inStr.isdigit():
return False
return True
entry = Entry(root, validate="key")
entry['validatecommand'] = (entry.register(testVal),'%P','%d')
entry.pack()
root.mainloop()<file_sep>/simpson_ship.py
from math import *
import numpy as np
import matplotlib.pyplot as plt
def simpson13(n, a, b, f):
h = (b - a) / n
suma = 0.0
for i in range(1, n):
x = a + i * h
if(i % 2 == 0):
suma = suma + 2 * fx(x, f)
else:
suma = suma + 4 * fx(x, f)
suma = suma + fx(a, f) + fx(b, f)
rest = suma * (h / 3)
return (rest)
def fx(x, f):
return eval(f)
<file_sep>/cal/normal.py
import scipy.stats as ss
import numpy as np
import matplotlib.pyplot as plt
media, desviacion_estandar = 16.666, 3.7267
normal = ss.norm(media, desviacion_estandar)
x = np.linspace(normal.ppf(0.01),
normal.ppf(0.99), 100)
fp = normal.pdf((x))
plt.plot(x, fp)
plt.title('Distribución de probabilidad Normal')
plt.ylabel('probabilidad')
plt.xlabel('valores')
plt.show()
<file_sep>/crab_a5_1.py
from tkinter import *
import os;
#buttomSimpPress = Soluciona el problema
def buttonSimpPress():
simpCommand = '\"\"inicio.py"\"'
os.system(simpCommand)
def buttonNormPress():
normCommand = '\"\"cal\\normal.py"\"'
os.system(normCommand)
#border = El borde de la pantalla, orillas y todo
border = Tk()
border.title("Distribucion de probabilidad")
border.resizable(0, 0)
border.geometry("800x650")
border.iconbitmap("sources/bordericon_a.ico")
border.config(bg="#3F3F3F")
#mainFrame = La ventana principal
mainFrame = Frame(border)
mainFrame.config(bg="#4c4c4c")
mainFrame.config(width="790", height="645")
mainFrame.pack()
#mainFrameImg = PhotoImage(file = "sources/mainframeimg.png")
#mainLabel = Titulo FES
mainLabel = Label(mainFrame, text="Facultad de Estudios Superiores\nAragón")
mainLabel.place(x=212, y=250)
mainLabel.config(fg="#FFFFFF", bg="#3F3F3F", font=("Verdana", 20))
mainLabel.pack()
#crabFrame = Para los cuadros de texto
crabFrame = Frame(mainFrame)
crabFrame.config(bg="#4c4c4c")
crabFrame.pack()
#simpFrame = Para los cuadros de texto
simpFrame = Frame(crabFrame)
simpFrame.config(bg="#4c4c4c")
simpFrame.pack()
#----------------Simpson 1/3------------------
#box_text_simp_label = "Metodo Simpson 1/3"
box_text_simp_label = Label(simpFrame, text="Metodo Simpson 1/3")
box_text_simp_label.grid(row=0, column=0)
box_text_simp_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 18))
#button_simp = Boton para resolver
button_simp=Button(simpFrame, text="Introducir Valores", command=buttonSimpPress)
button_simp.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
button_simp.grid(row=1, column=0, pady=10)
#----------------Simpson 1/3------------------
#------------Distribución normal--------------
#box_text_norm_label = "Distribución Normal"
box_text_norm_label = Label(simpFrame, text="Metodo de Distribución Normal")
box_text_norm_label.grid(row=2, column=0)
box_text_norm_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 18))
#button_norm = Boton para resolver
button_norm=Button(simpFrame, text="Introducir Valores", command=buttonNormPress)
button_norm.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
button_norm.grid(row=3, column=0, pady=10)
#------------Distribución normal--------------
#-----------------Varianza--------------------
#box_text_norm_label = "Distribución Normal"
box_text_vari_label = Label(simpFrame, text="Varianza")
box_text_vari_label.grid(row=4, column=0)
box_text_vari_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 18))
#button_norm = Boton para resolver
button_vari=Button(simpFrame, text="Introducir Valores", command=buttonNormPress)
button_vari.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
button_vari.grid(row=5, column=0, pady=10)
#-----------------Varianza--------------------
#--------------Valor Esperado-----------------
#box_text_norm_label = "Distribución Normal"
box_text_exp_label = Label(simpFrame, text="Valor Esperado")
box_text_exp_label.grid(row=6, column=0)
box_text_exp_label.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 18))
#button_norm = Boton para resolver
button_exp=Button(simpFrame, text="Introducir Valores", command=buttonNormPress)
button_exp.config(fg="#FFFFFF", bg="#4c4c4c", font=("Verdana", 15))
button_exp.grid(row=7, column=0, pady=10)
#--------------Valor Esperado-----------------
border.mainloop() | d2786bf5ba0875776ea67d369148bf5eee99f7f5 | [
"Markdown",
"Python"
] | 12 | Python | razemint/statistical-distribution | a1c754d41cfb5a3e047afb4ba41c7e8abbaafbc6 | 70df839e527d07eeb6f4a4b02a76781076d2104c | |
refs/heads/master | <repo_name>jasonsahl/wgfast<file_sep>/README.md
#### *WG-FAST*
The whole genome focused array SNP typing (*WG-FAST*) pipeline
written by: <NAME>
Email: jasonsahl at gmail dot com
for a more comprehensive overview, look at the Manual
#### overview
The goal of *WG-FAST* is to phylogenetically genotype an unknown
sample in the context of a well studied pathogen. This sample
can either be a metagenomics dataset, a metatranscriptomics dataset,
or a single isolate sequencing dataset
#### Installation
1. The easiest way to install is through conda:
```conda create -n wgfast python=3.6```
```source activate wgfast```
```conda install -c bioconda gatk4=4.2.6.1 picard=2.27.4 raxml samtools bbmap dendropy minimap2 biopython```
#You might need to install Bioconda with: pip install Biopython
2. Download the wgfast github repository, install:
```git clone https://github.com/jasonsahl/wgfast.git```
```python setup.py build```
```python setup.py install```
3. Open the script (wgfast.py) with a text editor and change the path to your *WG-FAST* installation directory.
For example:
WGFAST_PATH="/Users/jsahl/wgfast"
4. To verify your installation, enter the wgfast directory and type the command below. If everything
is working correctly, all tests should pass:
```python tests/test_all_functions.py```
<file_sep>/wg_fast/util.py
from __future__ import division
import os,os.path
import re
import logging
import subprocess
import sys
from subprocess import Popen
import time
try:
from Bio import SeqIO
from Bio import Phylo
except:
print("BioPython is not in your PYTHONPATH, but needs to be")
sys.exit()
try:
import dendropy
from dendropy import treecalc
except:
print("dendropy is not installed, but needs to be")
sys.exit()
import glob
from operator import itemgetter
import threading
import collections
import random
#logPrint stuff
OUTSTREAM = sys.stdout
ERRSTREAM = sys.stderr
DEBUG = False
def logPrint(msg, stream=None):
if stream is None:
stream = OUTSTREAM
stream.write('LOG: %s - %s\n' % (timestamp(), removeRecursiveMsg(msg)))
stream.flush()
def errorPrint(msg, stream=None):
if stream is None:
stream = ERRSTREAM
stream.write('ERROR: %s - %s\n' % (timestamp(), removeRecursiveMsg(msg)))
stream.flush()
def debugPrint(fmsg, stream=None):
"""In this case msg is a function, so the work is only done if debugging is one"""
if DEBUG:
if stream is None:
stream = ERRSTREAM
stream.write('DEBUG: %s - %s\n' % (timestamp(), removeRecursiveMsg(fmsg())))
stream.flush()
def timestamp():
return time.strftime('%Y/%m/%d %H:%M:%S')
def removeRecursiveMsg(msg):
"""
This takes a message and if it starts with something that looks like
a message generated with these tools it chops it off. Useful if using
one of these logging functions to print output from a program using
the same logging functions
"""
if msg.startswith('ERROR: ') or msg.startswith('DEBUG: ') or msg.startswith('LOG: '):
return msg.split(' - ', 1)[1]
else:
return msg
def mp_shell(func, params, numProc):
from multiprocessing import Pool
p = Pool(numProc)
out = p.map(func, params)
p.terminate()
return out
def report_stats(results,name,output):
outfile = open(output, "w")
total_size = []
mapped_size = []
with open(results) as infile:
for line in infile:
newline = line.strip()
fields = newline.split()
"""this just makes sure that the file looks correct"""
if len(fields)==3:
total_size.append(float(fields[1]))
mapped_size.append(float(fields[2]))
else:
print("coverage file for %s is malformed" % name)
print("-------------------------")
try:
total_summed = sum(total_size)
total_mapped = sum(mapped_size)
mapped_value = (total_mapped/total_summed)*100
outfile.write(str(name)+"\t"+str.format('{0:.4f}',mapped_value)+"\n")
outfile.close()
except:
pass
def merge_files_by_column(column,file_1,file_2,out_file):
"""Takes 2 file and merge their columns based on the column. It is assumed
that line ordering in the files do not match, so we read both files into memory
and join them"""
join_map = {}
with open(file_1) as my_file_1:
for line in my_file_1:
line.strip()
row = line.split()
column_value = row.pop(column)
join_map[column_value] = row
with open(file_2) as my_file_2:
for line in my_file_2:
line.strip()
row = line.split()
column_value = row.pop(column)
if column_value in join_map:
join_map[column_value].extend(row)
fout = open(out_file, 'w')
for k, v in join_map.items():
fout.write('\t'.join([k] + v) + '\n')
fout.close()
def sum_coverage(coverage,cov,name):
outfile = open("%s.amount_covered.txt" % name, "w")
another_outfile = open("%s.sum_covered.txt" % name, "w")
all = []
my_dict = {}
cov_dict = {}
with open(coverage) as my_file:
for line in my_file:
fields=line.split()
fields = map(lambda s: s.strip(), fields)
all.append(fields)
for x, y in all:
"""Here we're only counting it if it is above the given coverage threshold"""
try:
cov_dict[x].append(int(y))
except KeyError:
cov_dict[x] = [int(y)]
if int(y)>=int(cov):
try:
my_dict[x].append(y)
except KeyError:
my_dict[x] = [y]
else:
pass
for k,v in my_dict.items():
outfile.write(str(k)+"\t"+str(len(v))+"\n")
for k,v in cov_dict.items():
another_outfile.write(str(k)+"\t"+str(sum(v))+"\n")
outfile.close()
another_outfile.close()
def test_file(option, opt_str, value, parser):
try:
with open(value): setattr(parser.values, option.dest, value)
except IOError:
print('%s file cannot be opened' % option)
sys.exit()
def test_dir(option, opt_str, value, parser):
if os.path.exists(value):
setattr(parser.values, option.dest, value)
else:
print("directory of fastqs cannot be found")
sys.exit()
def test_filter(option, opt_str, value, parser):
if "F" in value:
setattr(parser.values, option.dest, value)
elif "T" in value:
setattr(parser.values, option.dest, value)
else:
print("option not supported. Only select from T and F")
sys.exit()
def test_methods(option, opt_str, value, parser):
if "ML" in value:
setattr(parser.values, option.dest, value)
elif "MP" in value:
setattr(parser.values, option.dest, value)
else:
print("option not supported. Only select from MP or ML")
sys.exit()
def test_models(option, opt_str, value, parser):
if "GTRGAMMA" in value:
setattr(parser.values, option.dest, value)
elif "ASC_GTRGAMMA" in value:
setattr(parser.values, option.dest, value)
else:
print("substitution model is not supported")
sys.exit()
def get_seq_length(ref, name):
"""uses BioPython in order to calculated the length of
each fasta entry in the reference fasta"""
outfile = open("%s.tmp.txt" % name, "w")
for record in SeqIO.parse(open(ref), "fasta"):
outfile.write(str(record.id)+"\t"+str(len(record.seq))+"\n")
outfile.close()
def remove_column(temp_file, name):
outfile = open("%s.coverage.out" % name, "w")
my_fields = []
with open(temp_file) as my_file:
for line in my_file:
fields=line.split()
del fields[1]
my_fields.append(fields)
for x in my_fields:
outfile.write("\t".join(x))
outfile.write("\n")
outfile.close()
def get_seq_name(in_fasta):
"""used for renaming the sequences - tested"""
return os.path.basename(in_fasta)
def get_readFile_components(full_file_path):
"""function adapted from:
https://github.com/katholt/srst2 - tested"""
(file_path,file_name) = os.path.split(full_file_path)
m1 = re.match("(.*).gz",file_name)
ext = ""
if m1 is not None:
ext = ".gz"
file_name = m1.groups()[0]
(file_name_before_ext,ext2) = os.path.splitext(file_name)
full_ext = ext2+ext
return file_path,file_name_before_ext,full_ext
def read_file_sets(dir_path):
"""match up pairs of sequence data, adapted from
https://github.com/katholt/srst2 will be tough to test
with variable names and read paths"""
fileSets = {}
forward_reads = {}
reverse_reads = {}
num_paired_readsets = 0
num_single_readsets = 0
for infile in glob.glob(os.path.join(dir_path, "*.fastq.gz")):
(file_path,file_name_before_ext,full_ext) = get_readFile_components(infile)
m=re.match("(.*)(_S.*)(_L.*)(_R.*)(_.*)", file_name_before_ext)
if m is None:
#m=re.match("(.*)("+"_R1"+")(_.*)$",file_name_before_ext)
m=re.match("(.*)("+"_R1"+")(.*)$",file_name_before_ext)
if m is not None:
(baseName,read) = m.groups()[0], m.groups()[1]
forward_reads[baseName] = infile
else:
#m=re.match("(.*)("+"_R2"+")(_.*)$",file_name_before_ext)
m=re.match("(.*)("+"_R2"+")(.*)$",file_name_before_ext)
if m is not None:
(baseName,read) = m.groups()[0], m.groups()[1]
reverse_reads[baseName] = infile
else:
print("Could not determine forward/reverse read status for input file %s" % infile)
else:
baseName, read = m.groups()[0], m.groups()[3]
if read == "_R1":
forward_reads[baseName] = infile
elif read == "_R2":
reverse_reads[baseName] = infile
else:
print("Could not determine forward/reverse read status for input file")
fileSets[file_name_before_ext] = infile
num_single_readsets += 1
for sample in forward_reads:
if sample in reverse_reads:
fileSets[sample] = [forward_reads[sample],reverse_reads[sample]] # store pair
num_paired_readsets += 1
else:
fileSets[sample] = [forward_reads[sample]] # no reverse found
num_single_readsets += 1
logging.info('Warning, could not find pair for read:' + forward_reads[sample])
for sample in reverse_reads:
if sample not in fileSets:
fileSets[sample] = reverse_reads[sample] # no forward found
num_single_readsets += 1
logging.info('Warning, could not find pair for read:' + reverse_reads[sample])
if num_paired_readsets > 0:
logging.info('Total paired readsets found:' + str(num_paired_readsets))
if num_single_readsets > 0:
logging.info('Total single reads found:' + str(num_single_readsets))
return fileSets
def get_sequence_length(fastq_in):
from itertools import islice
from gzip import GzipFile
with GzipFile("%s" % fastq_in) as file:
head = list(islice(file, 2))
return len(head[1])
def _perform_workflow_run_loop_dev(data):
"""sample ID"""
idx = data[0]
"""path to data"""
f = data[1]
dir_path = data[2]
reference = data[3]
ref_coords = data[4]
coverage = data[5]
proportion = data[6]
matrix = data[7]
scratch_dir = data[8]
doc = data[9]
tmp_dir = data[10]
"""This is now the PATH to bbduk.sh"""
wgfast_path = data[11]
processors = data[12]
ploidy = data[13]
if os.path.isfile("%s.tmp.xyx.matrix" % idx):
pass
else:
"""This means that the data is paired end"""
if len(f)>1:
if os.path.isfile("%s.F.paired.fastq.gz" % idx):
pass
else:
length = int(get_sequence_length(f[0])/2)
try:
subprocess.check_call("bbduk.sh in=%s in2=%s ref=%s/bin/illumina_adapters_all.fasta out=%s.F.paired.fastq.gz out2=%s.R.paired.fastq.gz minlen=%s overwrite=true > /dev/null 2>&1" % (f[0],f[1],wgfast_path,idx,idx,length), shell=True)
except:
print("Read trimmer did not finish correctly")
sys.exit()
subprocess.check_call("minimap2 -ax sr %s/reference.fasta %s.F.paired.fastq.gz %s.R.paired.fastq.gz | samtools sort -l 0 -@ %s - | samtools view -F 4 -Su -o %s_renamed.bam -" % (scratch_dir,idx,idx,processors,idx),stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
else:
"""Single end read support"""
length = int(get_sequence_length(f[0])/2)
try:
subprocess.check_call("bbduk.sh -Xmx2g in=%s ref=%s/bin/illumina_adapters_all.fasta out=%s.single.fastq.gz minlen=%s overwrite=true ignorebadquality" % (f[0],wgfast_path,idx,length), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
except:
print("Read trimmer did not finish correctly")
sys.exit()
subprocess.check_call("minimap2 -ax sr %s/reference.fasta %s.single.fastq.gz | samtools sort -l 0 -@ %s - | samtools view -F 4 -Su -o %s_renamed.bam -" % (scratch_dir,idx,processors,idx),stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
"""inserts read group information, required by new versions of GATK"""
try:
subprocess.check_call("picard AddOrReplaceReadGroups I=%s_renamed.bam O=%s_renamed_header.bam SORT_ORDER=coordinate RGID=%s RGLB=%s RGPL=illumina RGSM=%s RGPU=name CREATE_INDEX=true VALIDATION_STRINGENCY=SILENT" % (idx,idx,idx,idx,idx),stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
except:
time.sleep(5)
subprocess.check_call("picard AddOrReplaceReadGroups I=%s_renamed.bam O=%s_renamed_header.bam SORT_ORDER=coordinate RGID=%s RGLB=%s RGPL=illumina RGSM=%s RGPU=name CREATE_INDEX=true VALIDATION_STRINGENCY=SILENT" % (idx,idx,idx,idx,idx),stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
try:
subprocess.check_call("samtools index %s_renamed_header.bam > /dev/null 2>&1" % idx,stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
except:
print("problem indexing file with samtools..exiting")
sys.exit()
#TODO: be able to change the ploidy, which will affect the number of calls
subprocess.check_call("gatk HaplotypeCaller -R %s/reference.fasta -I %s_renamed_header.bam -O %s.test.vcf -ERC BP_RESOLUTION -ploidy %s" % (scratch_dir,idx,idx,ploidy), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
subprocess.check_call("gatk GenotypeGVCFs -O %s.vcf.out -R %s/reference.fasta -V %s.test.vcf --include-non-variant-sites" % (idx,scratch_dir,idx), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
#subprocess.check_call("bcftools filter -G 5 -g 5 %s.test.vcf2 > %s.vcf.out" % (idx,idx),stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
if "T" == doc:
subprocess.check_call("samtools depth -aa %s_renamed_header.bam > %s.coverage" % (idx,idx), shell=True)
remove_column("%s.coverage" % idx, idx)
sum_coverage("%s.coverage.out" % idx, coverage, idx)
merge_files_by_column(0,"ref.genome_size.txt", "%s.amount_covered.txt" % idx, "%s.results.txt" % idx)
merge_files_by_column(0,"ref.genome_size.txt", "%s.sum_covered.txt" % idx, "%s.cov.results.txt" % idx)
report_stats("%s.results.txt" % idx, idx, "%s_breadth.txt" % idx)
report_stats("%s.cov.results.txt" % idx, idx, "%s_sum_cov.txt" % idx)
else:
pass
#filtered.vcf will be created in this function
good_calls = process_vcf("%s.vcf.out" % idx, ref_coords, coverage, proportion, idx)
if int(good_calls) > 0:
make_temp_matrix("%s.filtered.vcf" % idx, matrix, idx)
else:
print("sample %s had no SNP calls and will not be inserted into the tree" % idx)
print("-------------------------")
def run_loop_dev(fileSets,dir_path,reference,processors,ref_coords,coverage,proportion,
matrix,scratch_dir,doc,tmp_dir,wgfast_path,ploidy):
files_and_temp_names = []
for idx, f in fileSets.items():
files_and_temp_names.append([idx,f,dir_path,reference,ref_coords,coverage,proportion,
matrix,scratch_dir,doc,tmp_dir,wgfast_path,processors,ploidy])
mp_shell(_perform_workflow_run_loop_dev,files_and_temp_names,processors)
def process_vcf(vcf, ref_coords, coverage, proportion, name):
"""finds SNPs that pass user-defined thresholds
for coverage and proportion - needs to look at tests"""
vcf_out = open("%s.filtered.vcf" % name, "w")
good_snps = []
mixed_snps = []
mixed_refs = []
ref_set = set(ref_coords)
with open(vcf) as vcf_in:
for line in vcf_in:
newline=line.strip("\n")
if newline.startswith('#'):
pass
elif newline.startswith('Java'):
pass
elif newline.startswith('/tmp'):
pass
elif newline.startswith('Try'):
pass
elif newline.startswith('INFO'):
pass
elif newline in ['\n', '\r\n']:
pass
else:
fields=newline.split()
"""for GATK, a period signifies a reference call.
First we want to look at the situation where this is
not the case"""
merged_fields=fields[0]+"::"+fields[1]
if merged_fields in ref_set:
#This indicates that the position is a SNP
if "." != fields[4] and len(fields[4]) == 1 and len(fields[3])==1 and fields[8]!="GT:AD:PGT:PID:PS":
if fields[6] == "LowQual":
pass
else:
snp_fields=fields[9].split(':')
if int(len(snp_fields))>2:
prop_fields=snp_fields[1].split(',')
fixed_coverage = int(float(snp_fields[2]))
if fixed_coverage>=coverage:
if int(prop_fields[1])/int(snp_fields[2])>=float(proportion):
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+fields[4]+"\n")
good_snps.append("1")
else:
#Changed out a gap character with an N
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+"N"+"\n")
mixed_snps.append("1")
"""if problems are encountered, throw in a gap. Could be too conservative"""
else:
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+"N"+"\n")
else:
pass
#This is if the position is reference
elif "." in fields[4] and len(fields[4])==1 and len(fields[3])==1:
if fields[6] == "LowQual":
pass
else:
if len(fields) == 10:
if "DP" in fields[7]:
nosnp_fields=fields[7].split(';')
#This will provide the coverage
if "DP" in nosnp_fields[0]:
cov_fields=nosnp_fields[0].replace("DP=","")
elif "DP" in nosnp_fields[1]:
cov_fields=nosnp_fields[1].replace("DP=","")
fixed_coverage = int(float(cov_fields))
if fixed_coverage>=coverage:
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+fields[3]+"\n")
else:
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+"N"+"\n")
mixed_refs.append("1")
else:
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+"N"+"\n")
mixed_refs.append("1")
#If it can't determine the status of the position, add an N
else:
vcf_out.write(fields[0]+"::"+fields[1]+"\t"+"N"+"\n")
vcf_out.close()
print("number of SNPs in genome %s = %s" % (name, str(len(good_snps))))
print("number of discarded SNPs in genome %s = %s" % (name, str(len(mixed_snps))))
print("number of discarded Reference positions in genome due to no coverage %s = %s" % (name, str(len(mixed_refs))))
print("-------------------------")
return len(good_snps)
def sort_information(x):
"""simple sort - tested"""
try:
fields = x.split("::")
return int(fields[1])
except:
raise TypeError("problem encountered parsing fields")
def matrix_to_fasta(matrix_in, outfile):
"""function to convert a SNP matrix to a multi-fasta file - tested"""
reduced = []
out_fasta = open(outfile, "w")
redux = []
with open(matrix_in) as my_file:
for line in my_file:
newline = line.strip()
fields = newline.split()
reduced.append(fields[1:])
test=map(list, zip(*reduced))
for x in test:
out_fasta.write(">"+str(x[0])+"\n")
out_fasta.write("".join(x[1:]))
out_fasta.write("\n")
redux.append(">"+str(x[0])+"".join(x[1:3]))
out_fasta.close()
return redux
def run_raxml(fasta_in, tree, out_class_file, insertion_method, parameters, model, suffix):
"""untested function, system calls"""
if "NULL" == parameters:
if "ASC_GTRGAMMA" == model:
args = ['raxmlHPC-SSE3', '-f', '%s' % insertion_method,
'-s', '%s' % fasta_in, '-m', '%s' % model, '-n', '%s' % suffix, '-t',
'%s' % tree, '--asc-corr=lewis', '--no-bfgs', '>', '/dev/null 2>&1']
else:
args = ['raxmlHPC-SSE3', '-f', '%s' % insertion_method,
'-s', '%s' % fasta_in, '-m', '%s' % model, '-n', '%s' % suffix, '-t',
'%s' % tree, '--no-bfgs', '>', '/dev/null 2>&1']
else:
if "ASC_GTRGAMMA" == model:
args = ['raxmlHPC-SSE3', '-f', '%s' % insertion_method,
'-s', '%s' % fasta_in, '-m', '%s' % model, '-n', '%s' % suffix, '-R', parameters, '-t',
'%s' % tree, '--asc-corr=lewis', '--no-bfgs', '>', '/dev/null 2>&1']
else:
args = ['raxmlHPC-SSE3', '-f', '%s' % insertion_method,
'-s', '%s' % fasta_in, '-m', '%s' % model, '-n', '%s' % suffix, '-R', parameters, '-t',
'%s' % tree, '--no-bfgs', '>', '/dev/null 2>&1']
try:
vcf_fh = open('%s.raxml.out' % suffix, 'w')
except:
logPrint('could not open raxml file')
try:
log_fh = open('%s.raxml.log' % suffix, 'w')
except:
logPrint('could not open log file')
#TODO: See why this doesn't catch many RAxML errors
try:
raxml_run = Popen(args, stderr=log_fh, stdout=vcf_fh)
raxml_run.wait()
except:
logPrint("sequence(s) were not inserted into tree!!!!!")
try:
os.system("sed 's/\[[^]]*\]//g' RAxML_labelledTree.%s > %s.tree_including_unknowns_noedges.tree" % (suffix, suffix))
subprocess.check_call("mv RAxML_labelledTree.%s %s_tree_including_unknowns_edges.tree" % (suffix, suffix) , shell=True)
except:
logPrint("error encountered running RAxML...check log file")
sys.exit()
try:
subprocess.check_call("cat RAxML_classificationLikelihoodWeights.%s >> %s" % (suffix, out_class_file), shell=True)
except:
pass
os.system("rm RAxML_*.%s" % suffix)
return suffix
def subsample_snps(matrix, dist_sets, used_snps, subnums):
"""get a list of all possible positions, depending
on those positions in the original matrix - tested"""
allSNPs = [ ]
with open(matrix) as my_file:
for line in my_file:
if line.startswith("LocusID"):
pass
else:
fields=line.split()
allSNPs.append(fields[0])
for k,v in used_snps.items():
for z in dist_sets:
if len(z) == 0:
pass
else:
if z[0]==k:
for x in range(1,int(subnums)+1):
kept_snps=random.sample(set(allSNPs), int(v))
outfile = open("%s.%s.%s.tmp.matrix" % (k,x,z[1]), "w")
in_matrix=open(matrix)
firstLine = in_matrix.readline()
outfile.write(firstLine)
outfile.write("\n")
first_fields = firstLine.split()
fixed_fields = []
for y in first_fields:
fixed_fields.append(re.sub('[:,]', '', y))
gindex=fixed_fields.index(z[1])
for line in in_matrix:
matrix_fields=line.split()
if matrix_fields[0] in kept_snps:
outfile.write(line)
outfile.write("\n")
else:
outfile.write("\t".join(matrix_fields[:gindex])+"\t"+"-"+"\t"+"\t".join(matrix_fields[gindex+1:])+"\n")
in_matrix.close()
outfile.close()
return allSNPs
def find_used_snps():
"""report how many SNPs were used in a given sample. This is
then used for the sub-sampling routine - tested"""
curr_dir= os.getcwd()
used_SNPs = {}
for infile in glob.glob(os.path.join(curr_dir, "*.filtered.vcf")):
name=get_seq_name(infile)
reduced=name.replace('.filtered.vcf', '')
good_snps=[]
with open(infile) as my_file:
for line in my_file:
fields=line.split()
try:
if fields[1] != "N":
good_snps.append("1")
else:
pass
except:
raise TypeError("abnormal number of fields observed")
used_SNPs[str(reduced)] = int(len(good_snps))
return used_SNPs
def branch_lengths_2_decimals(str_newick_tree):
"""replaces branch lengths in scientific notation with decimals = tested"""
colon_s = 0
comma_back_paren_s = 0
num = ''
new_tree = ''
for count, char in enumerate(str_newick_tree):
if char == ':':
colon_s = count
continue
if char in (')', ','):
comma_back_paren_s = 1
num = '%f' % float(num)
new_tree += ":" + num
colon_s = 0
num = ''
if colon_s != 0:
num = num + char
if colon_s == 0:
new_tree += char
new_tree = new_tree.strip('\'').strip('\"').strip('\'') + ";"
return new_tree
def fasta_to_tab(fasta):
"""tested"""
outfile = open("out.tab", "w")
with open(fasta) as my_fasta:
for record in SeqIO.parse(my_fasta,"fasta"):
"""this list is just for testing,
and is ok if it's overwritten for each
fasta"""
for_test = []
outfile.write(str(record.id)+"\t"+str(record.seq)+"\n")
for_test.append(record.id)
for_test.append(str(record.seq))
outfile.close()
return for_test
def tab_to_fasta(new_tab):
"""tested"""
outfile = open("out.fasta", "w")
with open(new_tab) as infile:
for line in infile:
to_test = []
fields = line.split()
outfile.write(">"+fields[0]+"\n")
outfile.write(fields[1].upper())
to_test.append(fields[0])
to_test.append(fields[1].upper())
outfile.close()
return to_test
def tab_to_matrix(tab):
"""tested"""
reduced = []
out_matrix = open("tab_matrix", "w")
with open(tab) as my_file:
for line in my_file:
tmp_list = []
fields = line.split()
tmp_list.append(fields[0])
for nucs in fields[1]:
tmp_list.append(nucs.upper())
reduced.append(tmp_list)
test=list(map(list,zip(*reduced)))
to_return = []
for x in test:
out_matrix.write("\t".join(x))
out_matrix.write("\n")
to_return.append(list(x))
out_matrix.close()
return to_return
def filter_alignment(tab):
"""tested"""
outfile = open("tab.filtered", "w")
with open(tab) as infile:
firstLine = infile.readline()
test_fields = []
outfile.write(firstLine)
outfile.write("\n")
for line in infile:
valid_fields = []
fields = line.split()
for field in fields:
"""skip fields that might be present when missing data are included"""
if field != "-" and field != "N" and field != "X":
valid_fields.append(field)
else:
pass
counter=collections.Counter(valid_fields)
values=counter.values()
#sorted_values = values.sort(key=int)
sorted_values = sorted(values)
if len(sorted_values)>int(1):
outfile.write(line)
outfile.write("\n")
else:
pass
test_fields.append(valid_fields)
outfile.close()
return test_fields
def raxml_calculate_base_tree(in_fasta, model, name):
"""not tested, all system calls"""
args = ['raxmlHPC-SSE3', '-f', 'd', '-p', '12345',
'-s', '%s' % in_fasta, '-m', '%s' % model, '-n', '%s' % name, "--no-bfgs",
'>', '/dev/null 2>&1']
try:
vcf_fh = open('raxml.out', 'w')
except:
logPrint('could not open raxml file')
try:
log_fh = open('raxml.log', 'w')
except:
logPrint('could not open log file')
try:
raxml_run = Popen(args, stderr=log_fh, stdout=vcf_fh)
raxml_run.wait()
except:
print("could not infer base pruned tree")
sys.exit()
def file_to_fasta(matrix, out_fasta):
"""almost identical to matrix_to_fasta. Not tested"""
reduced = [ ]
out_matrix = open(out_fasta, "w")
with open(matrix) as my_file:
for line in my_file:
fields = line.strip().split()
reduced.append(fields)
test=map(list, zip(*reduced))
for x in test:
out_matrix.write(">"+str(x[0])+"\n")
out_matrix.write("".join(x[1:]))
out_matrix.write("\n")
out_matrix.close()
def prune_fasta(to_prune, infile, outfile):
"""tested"""
my_out = open(outfile, "w")
seqrecords = [ ]
ids = [ ]
with open(infile) as my_fasta:
for record in SeqIO.parse(my_fasta, "fasta"):
if record.id not in to_prune:
seqrecords.append(record)
ids.append(record.id)
SeqIO.write(seqrecords, my_out, "fasta")
my_out.close()
return ids
def remove_invariant_sites(in_fasta, out_fasta):
"""only keep invarint sites, all functions are tested"""
fasta_to_tab(in_fasta)
tab_to_matrix("out.tab")
filter_alignment("tab_matrix")
file_to_fasta("tab.filtered", out_fasta)
def compare_subsample_results(outnames,distances,fudge):
"""needs testing"""
curr_dir= os.getcwd()
for infile in glob.glob(os.path.join(curr_dir, "*.subsample.distances.txt")):
name=get_seq_name(infile)
split_fields=name.split(".")
genomes_used = []
all_dists=[]
dists_greater_than_true=[]
dists_equal_to_true=[]
dists_less_than_true=[]
"""Here, I get the list of the genomes that are being analyzed"""
try:
with open(infile) as my_file:
for line in my_file:
fields = line.split()
all_dists.append(float(fields[7]))
except:
print("problem parsing input file: %s" % infile)
if len(all_dists)>=1:
max_dist=max(all_dists)
print("")
print("maximum subsample distance between %s and %s = %.2f" % (fields[3],fields[5],float(max_dist)))
else:
print("problem grabbing distances- make sure that subsample.distances.txt files aren't empty")
true_dists = [ ]
for distance in distances:
if distance[1] == split_fields[1]:
true_dists.append(distance[2])
for all_dist in all_dists:
if "%.2f" % float(all_dist)> "%.2f" % (float(true_dists[0])+float(fudge)):
dists_greater_than_true.append("1")
elif "%.2f" % float(all_dist)<"%.2f" % (float(true_dists[0])-float(fudge)):
dists_less_than_true.append("1")
else:
dists_equal_to_true.append("1")
try:
greaters = int(len(dists_greater_than_true))
equals = int(len(dists_equal_to_true))
lessers = int(len(dists_less_than_true))
print("True distance between Reference and %s = %.2f" % (split_fields[1],float(true_dists[0])))
print("Sample: %s" % split_fields[0])
print("Subsample distances between Reference and %s greater than true value = %s" % (split_fields[2],greaters))
print("Subsample distances between Reference and %s equal to true value = %s" % (split_fields[2],equals))
print("Subsample distances between Reference and %s less than true value = %s" % (split_fields[2],lessers))
p = (greaters+lessers)/(greaters+lessers+equals)
print("Placement p value = %.3f" % float(p))
print("-------------------------")
except:
pass
def transform_tree(tree):
"""converts a Newick tree into a Nexus-formatted
tree that can be visualized with FigTree - needs testing"""
#infile = open(tree, "U")
tree_string = []
with open(tree) as infile:
for line in infile:
tree_string.append(line)
#infile.close()
mytree = Phylo.read(tree, 'newick')
tree_names = [ ]
for clade in mytree.find_clades():
if clade.name:
tree_names.append(clade.name)
outfile = open("transformed.tree", "w")
outfile.write("#NEXUS"+"\n")
outfile.write("begin taxa;"+"\n")
outfile.write("\t"+"dimensions ntax="+str(len(tree_names))+";"+"\n")
outfile.write("\t"+"taxlabels"+"\n")
for clade in mytree.find_clades():
if clade.name:
tree_names.append(clade.name)
nr=[x for i, x in enumerate(tree_names) if x not in tree_names[i+1:]]
for tree_name in nr:
if "QUERY__" in tree_name:
outfile.write("\t"+"'%s'" % tree_name+"[&!color=#-3407821]"+"\n")
else:
outfile.write("\t"+tree_name+"\n")
outfile.write(";"+"\n")
outfile.write("end;"+"\n")
outfile.write(""+"\n")
outfile.write("begin trees;"+"\n")
for x in tree_string:
outfile.write("\t"+"tree tree_1 = [&R] "+str(x)+"\n")
outfile.write("end;"+"\n")
infile.close()
outfile.close()
def write_reduced_matrix(matrix):
"""This function takes a NASP formatted
SNP matrix and writes a temporary matrix
that can be easily combined with temporary files - tested"""
in_matrix = open(matrix)
outfile = open("temp.matrix", "w")
outdata = [ ]
firstLine = in_matrix.readline()
first_fields=firstLine.split()
last=first_fields.index("#SNPcall")
outfile.write("\t".join(first_fields[:last]))
outfile.write("\n")
for line in in_matrix:
fields = line.split()
outfile.write("\t".join(fields[:last]))
outfile.write("\n")
outdata.append(len(fields[:last]))
outfile.close()
in_matrix.close()
return outdata
def make_temp_matrix(vcf, matrix, name):
"""these are all of the screened SNPs - tested"""
matrix_ids=[]
with open(matrix) as in_matrix:
firstLine = in_matrix.readline()
for line in in_matrix:
newline = line.strip("\n")
mfields=newline.split()
matrix_ids.append(mfields[0])
value_dict={}
new_dicts={}
with open(vcf) as my_file:
for line in my_file:
newline = line.strip("\n")
if newline.startswith("#"):
pass
else:
fields=newline.split()
value_dict.update({fields[0]:fields[1]})
#value_dict could only contain Ns
if len(value_dict)>=1:
"""here is where value_dict_set is populated"""
keys = []
for k,v in value_dict.items():
keys.append(k)
value_dict_set = set(keys)
new_dicts = value_dict
for x in matrix_ids:
if x not in value_dict_set:new_dicts.update({x:"N"})
else:
print("no usable information in vcf file, did you use the correct reference?")
#new_dicts contains all calls, good and bad
value_dict = {}
"""variety will contain a complete set of SNPs"""
variety = []
#This ensures that the values are in order
for x in matrix_ids:
for k,v in new_dicts.items():
if x == k:
variety.append(v)
variety_set = set(variety)
#Changed this from a to w on 12/5/19
out_matrix = open("%s.tmp.xyx.matrix" % name,"w")
if len(variety)>=1:
if "A" or "T" or "G" or "C" in value_dict_set:
out_matrix.write('%s\n' % name)
for x in matrix_ids:
if x in new_dicts:
out_matrix.write("%s\n" % new_dicts.get('%s' % x))
else:
print("sample %s had no usable positions!!!" % name)
else:
print("sample %s has problems" % name)
print("-------------------------")
out_matrix.close()
value_dict_set = []
return new_dicts
def grab_names():
"""These names will be used for future iterations - tested"""
curr_dir= os.getcwd()
outnames = [ ]
for infile in glob.glob(os.path.join(curr_dir, "*.filtered.vcf")):
name=get_seq_name(infile)
reduced=name.replace(".filtered.vcf","")
outnames.append(reduced)
return outnames
def parse_likelihoods(infile):
"""This function parses the likelihoods output from RAxML - tested"""
like_dict = {}
with open(infile) as my_file:
for line in my_file:
fields = line.split()
try:
like_dict[fields[0]].append(fields[2])
except KeyError:
like_dict[fields[0]] = [fields[2]]
print("sample_name"+"\t"+"insertion_likelihood"+"\t"+"number of potential insertion nodes")
for k,v in like_dict.items():
print(k+"\t"+v[0]+"\t"+str(len(v)))
return like_dict
def calculate_pairwise_tree_dists(intree, output):
"""uses dendropy function to calculate all pairwise distances between tree - tested"""
tree = dendropy.Tree.get_from_path(intree, "newick", preserve_underscores=True)
outfile = open("%s" % output, "w")
distances = tree.phylogenetic_distance_matrix()
distances_sets = [ ]
for i,t1 in enumerate(tree.taxon_namespace):
for t2 in tree.taxon_namespace[i+1:]:
distances_sets.append(distances(t1, t2))
try:
for i, t1 in enumerate(tree.taxon_namespace):
for t2 in tree.taxon_namespace[i+1:]:
outfile.write("Distance between '%s' and '%s': %s\n" % (t1.label, t2.label, distances(t1, t2)))
except:
print("problem iterating through tree. Tree is empty or not Newick format")
outfile.close()
return distances_sets
def get_closest_dists_new(final_sets, outnames):
"""tested"""
results = []
for final_set in final_sets:
if len(final_set) == 0:
pass
results.append(final_set[1]+final_set[2])
return results
def find_two_new(infile,outnames):
"""find two closest genomes to each query genome,
return the names and distances (sorted), for the
two genomes - tested"""
distances = ()
output_tuples = ()
for outname in outnames:
outname_tuple = ()
with open(infile) as my_file:
for line in my_file:
fields=line.split()
new_fields=[ ]
for x in fields:
new_fields.append(re.sub('[:,]', '', x))
final_fields=[ ]
for y in new_fields:
final_fields.append(y.replace("QUERY___",""))
"""We need the distance of all samples, compared to the reference"""
if "Reference" in final_fields[2].replace("'",""):
distances=((final_fields[2].replace("'",""),final_fields[4].replace("'",""),final_fields[5].replace("'","")),)+distances
elif "Reference" in final_fields[4].replace("'",""):
distances=((final_fields[4].replace("'",""),final_fields[2].replace("'",""),final_fields[5].replace("'","")),)+distances
if final_fields[4].replace("'","") == outname:
if "Reference" not in final_fields[2].replace("'","") and final_fields[2].replace("'","") not in outnames:
outname_tuple=((final_fields[4].replace("'",""),final_fields[2].replace("'",""),final_fields[5].replace("'","")),)+outname_tuple
elif final_fields[2].replace("'","") == outname:
if "Reference" not in final_fields[4].replace("'","") and final_fields[4].replace("'","") not in outnames:
outname_tuple=((final_fields[2].replace("'",""),final_fields[4].replace("'",""),final_fields[5].replace("'","")),)+outname_tuple
for result in sorted(outname_tuple,key=lambda x: float(x[2]))[:2]:
output_tuples=((result),)+output_tuples
return output_tuples,distances
def _perform_workflow_subsample_snps_dev(data):
final_set = data[0]
"""final_set is a tuple of sample, NN, value"""
used_snps = data[1]
"""used_snps is a dictionary"""
subnums = data[2]
"""subnums is an integer"""
allsnps = data[3]
"""allsnps is a list of all SNPs used"""
matrix = data[4]
for k,v in used_snps.items():
if final_set[0]==k:
for x in range(1,int(subnums)+1):
kept_snps=random.sample(set(allsnps), int(v))
solids = set(kept_snps)
if os.path.isfile("%s.%s.%s.tmp.matrix" % (k,x,final_set[1])):
pass
else:
outfile = open("%s.%s.%s.tmp.matrix" % (k,x,final_set[1]), "w")
with open(matrix) as my_matrix:
firstLine = my_matrix.readline()
outfile.write(firstLine)
first_fields = firstLine.split()
fixed_fields = []
for y in first_fields:
fixed_fields.append(re.sub('[:,]', '', y))
gindex=fixed_fields.index(final_set[1])
for line in my_matrix:
matrix_fields=line.split()
if matrix_fields[0] in solids:
outfile.write(line)
else:
outfile.write("\t".join(matrix_fields[:gindex])+"\t"+"-"+"\t"+"\t".join(matrix_fields[gindex+1:])+"\n")
outfile.close()
else:
pass
def subsample_snps_2(final_sets,used_snps,subnums,allsnps,processors,matrix):
files_and_temp_names = []
for f in final_sets:
files_and_temp_names.append([f,used_snps,subnums,allsnps,matrix])
mp_shell(_perform_workflow_subsample_snps_dev, files_and_temp_names, processors)
def subsample_snps_dev(matrix, final_set, used_snps, subnums, allsnps):
"""needs testing"""
for k,v in used_snps.items():
if final_set[0]==k:
for x in range(1,int(subnums)+1):
kept_snps=random.sample(set(allsnps), int(v))
solids = set(kept_snps)
if os.path.isfile("%s.%s.%s.tmp.matrix" % (k,x,final_set[1])):
pass
else:
outfile = open("%s.%s.%s.tmp.matrix" % (k,x,final_set[1]), "w")
in_matrix=open(matrix)
firstLine = in_matrix.readline()
outfile.write(firstLine)
first_fields = firstLine.split()
fixed_fields = []
for y in first_fields:
fixed_fields.append(re.sub('[:,]', '', y))
gindex=fixed_fields.index(final_set[1])
for line in in_matrix:
matrix_fields=line.split()
if matrix_fields[0] in solids:
outfile.write(line)
else:
outfile.write("\t".join(matrix_fields[:gindex])+"\t"+"-"+"\t"+"\t".join(matrix_fields[gindex+1:])+"\n")
in_matrix.close()
outfile.close()
else:
pass
def get_all_snps(matrix):
"""tested"""
allSNPs = [ ]
with open(matrix) as my_matrix:
for line in my_matrix:
if line.startswith("LocusID"):
pass
else:
fields=line.split()
allSNPs.append(fields[0])
return allSNPs
def create_params_files(id, to_prune_set, full_tree, full_matrix, dist_sets, processors):
"""not currently tested, but needs to be"""
if int(processors)<=2:
my_processors = 2
else:
my_processors = int(int(processors)/2)
for item in to_prune_set:
new_name = str(id)+str(item)
if os.path.isfile("%s-PARAMS" % new_name):
continue
else:
tmptree = open("%s.tmp.tree" % new_name, "w")
to_prune = []
for x in dist_sets:
if x[0] == id:
if x[1] == item or x[2] == item:
to_prune.append(x[1])
to_prune_fixed=[]
for x in to_prune:
to_prune_fixed.append(re.sub('[:,]', '', x))
tree_full = dendropy.Tree.get_from_path(full_tree,schema="newick",preserve_underscores=True)
tree_full.prune_taxa_with_labels(to_prune_fixed)
final_tree = branch_lengths_2_decimals(tree_full.as_string("newick"))
tmptree.write(final_tree)
tmptree.close()
tmptree2 = open("%s.tree" % new_name, "w")
with open("%s.tmp.tree" % new_name) as my_file:
for line in my_file:
if line.startswith("[&U]"):
fields = line.split()
fixed_fields = [ ]
for x in fields:
fixed_fields.append(x.replace("'",""))
tmptree2.write(fixed_fields[1])
else:
pass
tmptree2.close()
"""result is a pruned tree that is ready for RAxML"""
matrix_to_fasta(full_matrix, "%s.fasta" % new_name)
os.system("sed 's/://g' %s.fasta | sed 's/,//g' > %s_in.fasta" % (new_name, new_name))
prune_fasta(to_prune, "%s_in.fasta" % new_name, "%s_pruned.fasta" % new_name)
try:
subprocess.check_call("rm RAxML*%s-PARAMS" % new_name, shell=True, stderr=open(os.devnull, 'w'))
except:
pass
#forcing GTRGAMMA
try:
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f e -m GTRGAMMA -s %s_pruned.fasta -t %s.tree -n %s-PARAMS > /dev/null 2>&1" % (my_processors, new_name, new_name, new_name), shell=True)
os.system("mv RAxML_binaryModelParameters.%s-PARAMS %s-PARAMS" % (new_name, new_name))
except:
continue
def process_temp_matrices_dev(dist_sets, sample, tree, processors, patristics, insertion_method, parameters, model):
"""not currently tested, but needs to be"""
name=get_seq_name(sample)
split_fields=name.split(".")
outfile=open("%s.%s.subsample.distances.txt" % (split_fields[0],split_fields[2]), "a")
name_fixed = []
name_fixed.append(re.sub('[:,]', '', split_fields[2]))
to_prune = []
for x in dist_sets:
if x[0] == split_fields[0]:
if x[1] == split_fields[2] or x[2] == split_fields[2]:
to_prune.append(x[1])
to_prune_fixed=[]
for x in to_prune:
to_prune_fixed.append(re.sub('[:,]', '', x))
#full_context includes sample, near neighbor, and replicate
full_context = split_fields[0]+split_fields[1]+split_fields[2]
new_name = split_fields[0]+split_fields[2]
if os.path.isfile("%s.tree_including_unknowns_noedges.tree" % full_context):
print("tree already present, skipping")
pass
else:
tree_full = dendropy.Tree.get_from_path(tree,schema="newick",preserve_underscores=True)
tree_full.prune_taxa_with_labels(to_prune_fixed)
tmptree = open("%s.tmp.tree" % full_context, "w")
final_tree = branch_lengths_2_decimals(tree_full.as_string("newick"))
tmptree.write(final_tree)
tmptree.close()
tmptree2 = open("%s.tree" % full_context, "w")
#for line in open("%s.tmp.tree" % full_context, "U"):
with open("%s.tmp.tree" % full_context) as my_file:
for ine in my_file:
if line.startswith("[&U]"):
fields = line.split()
fixed_fields = [ ]
for x in fields:
fixed_fields.append(x.replace("'",""))
tmptree2.write(fixed_fields[1])
else:
pass
tmptree2.close()
matrix_to_fasta(sample, "%s.fasta" % full_context)
os.system("sed 's/://g' %s.fasta | sed 's/,//g' > %s_in.fasta" % (full_context, full_context))
if os.path.isfile("%s-PARAMS" % new_name):
try:
run_raxml("%s_in.fasta" % full_context, "%s.tree" % full_context, "%s.subsampling_classifications.txt" % full_context, insertion_method, "%s-PARAMS" % new_name, "GTRGAMMA", "%s" % full_context)
except:
pass
else:
try:
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f e -m GTRGAMMA -s %s_pruned.fasta -t %s.tree -n %s-PARAMS --no-bfgs > /dev/null 2>&1" % (my_processors, new_name, new_name, new_name), shell=True)
os.system("mv RAxML_binaryModelParameters.%s-PARAMS %s-PARAMS" % (new_name, new_name))
run_raxml("%s_in.fasta" % full_context, "%s.tree" % full_context, "%s.subsampling_classifications.txt" % full_context, insertion_method, "%s-PARAMS" % (split_fields[0]+split_fields[2]), "GTRGAMMA", "%s" % full_context)
except:
pass
if os.path.isfile("%s.resampling_distances.txt" % full_context):
pass
else:
try:
calculate_pairwise_tree_dists("%s.tree_including_unknowns_noedges.tree" % full_context, "%s.resampling_distances.txt" % full_context)
for line in open("%s.resampling_distances.txt" % full_context,"U"):
resample_fields = line.split()
myid = re.sub("[:']", "",resample_fields[4])
fixedid = myid.replace("QUERY___","")
newid = re.sub("[:']","",resample_fields[2])
fixedid2 = newid.replace("QUERY___","")
if resample_fields[2] == "'Reference'" and fixedid in name_fixed:
outfile.write("resampled distance between Reference and %s = %s\n" % (fixedid, resample_fields[5]))
elif resample_fields[4] == "'Reference':" and fixedid2 in name_fixed:
outfile.write("resampled distance between Reference and %s = %s\n" % (fixedid2, resample_fields[5]))
else:
pass
except:
pass
def check_input_files(matrix,reference):
ref_names = []
with open(reference) as my_ref:
for record in SeqIO.parse(my_ref,"fasta"):
ref_names.append(record.id)
with open(matrix) as f:
for line in f.readlines()[:2]:
if line.startswith("LocusID"):
pass
else:
fields = line.split()
name_fields=fields[0].split("::")
if name_fields[0] in ref_names:
pass
else:
print("The IDs in your Reference don't match the names in your SNP matrix! Please fix and re-start...exiting...")
sys.exit()
def create_merged_vcf():
out_file = open("merged.vcf", "w")
start_dir = os.getcwd()
lists = []
for infile in glob.glob(os.path.join(start_dir, "*.tmp.xyx.matrix")):
data = open(infile).read().splitlines()
lists.append(data)
test=map(list, zip(*lists))
for x in test:
out_file.write("\t".join(x))
out_file.write("\n")
out_file.close()
def _perform_workflow_create_params(data):
id = data[0]
to_prune_set = data[1]
full_tree = data[2]
full_matrix = data[3]
dist_sets = data[4]
processors = data[5]
if int(processors)<=2:
my_processors = 2
else:
my_processors = int(int(processors)/2)
for item in to_prune_set:
new_name = str(id)+str(item)
if os.path.isfile("%s-PARAMS" % new_name):
pass
else:
tmptree = open("%s.tmp.tree" % new_name, "w")
to_prune = []
for x in dist_sets:
if x[0] == id:
if x[1] == item or x[2] == item:
to_prune.append(x[1])
to_prune_fixed=[]
for x in to_prune:
to_prune_fixed.append(re.sub('[:,]', '', x))
tree_full = dendropy.Tree.get_from_path(full_tree,schema="newick",preserve_underscores=True)
tree_full.prune_taxa_with_labels(to_prune_fixed)
final_tree = branch_lengths_2_decimals(tree_full.as_string("newick"))
tmptree.write(final_tree)
tmptree.close()
tmptree2 = open("%s.tree" % new_name, "w")
with open("%s.tmp.tree" % new_name) as my_file:
for line in my_file:
line = line.replace("'","")
tmptree2.write(line)
tmptree2.close()
"""result is a pruned tree that is ready for RAxML"""
matrix_to_fasta(full_matrix, "%s.fasta" % new_name)
os.system("sed 's/://g' %s.fasta | sed 's/,//g' > %s_in.fasta" % (new_name, new_name))
prune_fasta(to_prune,"%s_in.fasta" % new_name,"%s_pruned.fasta" % new_name)
try:
subprocess.check_call("rm RAxML*%s-PARAMS" % new_name, shell=True, stderr=open(os.devnull, 'w'))
except:
pass
#forcing GTRGAMMA
try:
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f e -m GTRGAMMA -s %s_pruned.fasta -t %s.tree -n %s-PARAMS > /dev/null 2>&1" % (my_processors, new_name, new_name, new_name), shell=True)
os.system("mv RAxML_binaryModelParameters.%s-PARAMS %s-PARAMS" % (new_name, new_name))
except:
print("problem creating PARAMS file for %s.tree, exiting" % new_name)
sys.exit()
def create_params_files_dev(new_sample_dicts,tree,matrix,final_sets,processors):
to_run = []
for k,v in new_sample_dicts.items():
to_run.append([k,v,tree,matrix,final_sets,processors])
mp_shell(_perform_workflow_create_params,to_run,processors)
def process_temp_matrices_2(final_sets,final_matrices,tree,processors,patristic_distances, V, parameters, model):
to_run = []
for matrix in final_matrices:
to_run.append([final_sets,matrix,tree,processors,patristic_distances,V,parameters,model])
mp_shell(_perform_workflow_temp_matrices, to_run, processors)
def _perform_workflow_temp_matrices(data):
dist_sets = data[0]
sample = data[1]
tree = data[2]
processors = data[3]
patristics = data[4]
insertion_method = data[5]
parameters = data[6]
model = data[7]
name=get_seq_name(sample)
split_fields=name.split(".")
outfile=open("%s.%s.subsample.distances.txt" % (split_fields[0],split_fields[2]), "a")
name_fixed = []
name_fixed.append(re.sub('[:,]', '', split_fields[2]))
to_prune = []
for x in dist_sets:
if x[0] == split_fields[0]:
if x[1] == split_fields[2] or x[2] == split_fields[2]:
to_prune.append(x[1])
to_prune_fixed=[]
for x in to_prune:
to_prune_fixed.append(re.sub('[:,]', '', x))
#full_context includes sample, near neighbor, and replicate
full_context = split_fields[0]+split_fields[1]+split_fields[2]
new_name = split_fields[0]+split_fields[2]
if os.path.isfile("%s.tree_including_unknowns_noedges.tree" % full_context):
print("tree already present, skipping")
pass
else:
tree_full = dendropy.Tree.get_from_path(tree,schema="newick",preserve_underscores=True)
tree_full.prune_taxa_with_labels(to_prune_fixed)
tmptree = open("%s.tmp.tree" % full_context, "w")
final_tree = branch_lengths_2_decimals(tree_full.as_string("newick"))
tmptree.write(final_tree)
tmptree.close()
tmptree2 = open("%s.tree" % full_context, "w")
with open("%s.tmp.tree" % full_context) as my_file:
for line in my_file:
line = line.replace("'","")
tmptree2.write(line)
tmptree2.close()
matrix_to_fasta(sample, "%s.fasta" % full_context)
os.system("sed 's/://g' %s.fasta | sed 's/,//g' > %s_in.fasta" % (full_context, full_context))
if os.path.isfile("%s-PARAMS" % new_name):
try:
run_raxml("%s_in.fasta" % full_context, "%s.tree" % full_context, "%s.subsampling_classifications.txt" % full_context, insertion_method, "%s-PARAMS" % new_name, "GTRGAMMA", "%s" % full_context)
except:
pass
else:
try:
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f e -m GTRGAMMA -s %s_pruned.fasta -t %s.tree -n %s-PARAMS --no-bfgs > /dev/null 2>&1" % (my_processors, new_name, new_name, new_name), shell=True)
os.system("mv RAxML_binaryModelParameters.%s-PARAMS %s-PARAMS" % (new_name, new_name))
run_raxml("%s_in.fasta" % full_context, "%s.tree" % full_context, "%s.subsampling_classifications.txt" % full_context, insertion_method, "%s-PARAMS" % (split_fields[0]+split_fields[2]), "GTRGAMMA", "%s" % full_context)
except:
pass
if os.path.isfile("%s.resampling_distances.txt" % full_context):
pass
else:
try:
calculate_pairwise_tree_dists("%s.tree_including_unknowns_noedges.tree" % full_context, "%s.resampling_distances.txt" % full_context)
for line in open("%s.resampling_distances.txt" % full_context,"U"):
resample_fields = line.split()
myid = re.sub("[:']", "",resample_fields[4])
fixedid = myid.replace("QUERY___","")
newid = re.sub("[:']","",resample_fields[2])
fixedid2 = newid.replace("QUERY___","")
if resample_fields[2] == "'Reference'" and fixedid in name_fixed:
outfile.write("resampled distance between Reference and %s = %s\n" % (fixedid, resample_fields[5]))
elif resample_fields[4] == "'Reference':" and fixedid2 in name_fixed:
outfile.write("resampled distance between Reference and %s = %s\n" % (fixedid2, resample_fields[5]))
else:
pass
except:
pass
outfile.close()
def qc_files(fasta,tree):
#first grab the IDs from the FASTA file:
fasta_ids = []
with open(fasta) as my_fasta:
for record in SeqIO.parse(fasta,"fasta"):
fasta_ids.append(record.id)
#Now parse the names from the tree:
tree_ids = []
mytree = Phylo.read(tree,'newick')
for clade in mytree.find_clades():
if clade.name:
tree_ids.append(clade.name)
tree_set = set(tree_ids)
fasta_set = set(fasta_ids)
if len(fasta_set) == len(tree_set):
print("new samples didn't get added correctly...exiting")
sys.exit()
else:
pass
<file_sep>/tools/subsample_reads_and_place.py
#!/usr/bin/env python
"""From a SNP matrix, sub-sample
SNPs from a given genome, then find
the SNP level where the unknown genome
is placed correctly >90% of the time"""
from optparse import OptionParser
import sys
import random
import re
import os
import subprocess
import dendropy
import collections
try:
from dendropy import treecalc
except:
print("Dendropy needs to be installed for this script to run")
sys.exit()
from subprocess import Popen
try:
from Bio import SeqIO
except:
print("BioPython needs to be installed for this script to run")
sys.exit()
def test_file(option, opt_str, value, parser):
try:
with open(value): setattr(parser.values, option.dest, value)
except IOError:
print('%s file cannot be opened' % option)
sys.exit()
def subsample_snps(matrix, name, start):
"""get a list of all possible positions, depending
on those positions in the original matrix. Similar
to method in the main script"""
allSNPs = []
with open(matrix) as my_matrix:
for line in my_matrix:
if line.startswith("LocusID"):
pass
else:
fields=line.split()
allSNPs.append(fields[0])
kept_snps=random.sample(set(allSNPs), int(start))
outfile = open("%s.%s.tmp.matrix" % (name,start), "w")
with open(matrix) as my_matrix:
firstLine = my_matrix.readline()
outfile.write(firstLine)
first_fields = firstLine.split()
last=first_fields.index("#SNPcall")
mygenome=first_fields.index(name)
fixed_fields = []
for x in first_fields[:last]:
fixed_fields.append(re.sub('[:,]', '', x))
gindex = []
for x in first_fields[:last]:
gindex.append(first_fields.index(x))
for line in my_matrix:
matrix_fields=line.split()
if matrix_fields[0] in kept_snps:
outfile.write(line)
else:
outfile.write("\t".join(matrix_fields[:mygenome])+"\t"+"-"+"\t"+"\t".join(matrix_fields[mygenome+1:])+"\n")
outfile.close()
return last
def subsample_snps_keep(matrix, name, start):
"""get a list of all possible positions, depending
on those positions in the original matrix. Similar
to method in the main script"""
allSNPs = []
with open(matrix) as my_matrix:
for line in my_matrix:
if line.startswith("LocusID"):
pass
else:
fields=line.split()
allSNPs.append(fields[0])
kept_snps=random.sample(set(allSNPs), int(start))
outfile = open("%s.%s.tmp.matrix" % (name,start), "w")
with open(matrix) as in_matrix:
firstLine = in_matrix.readline()
first_fields = firstLine.split()
last=first_fields.index("#SNPcall")
first_fields.insert(last,"QUERY_%s" % name)
outfile.write("\t".join(first_fields)+"\n")
"""mygenome is the index of the genome that we want to subsample"""
mygenome=first_fields.index(name)
fixed_fields = []
for x in first_fields[:last]:
fixed_fields.append(re.sub('[:,]', '', x))
gindex = [ ]
for x in first_fields[:last]:
gindex.append(first_fields.index(x))
for line in in_matrix:
matrix_fields=line.split()
if matrix_fields[0] in kept_snps:
outfile.write("\t".join(matrix_fields[:last])+"\t"+"\t".join(matrix_fields[mygenome])+"\t"+"".join(matrix_fields[last:])+"\n")
else:
outfile.write("\t".join(matrix_fields[:last])+"\t"+"-"+"\t"+"\t".join(matrix_fields[last:])+"\n")
outfile.close()
return last
def matrix_to_fasta(matrix_in,name,last):
"""converts a SNP matrix to fasta format.
Again, slightly different output compared to tested
function"""
reduced = []
out_fasta = open("%s.fasta" % name, "w")
with open(matrix_in) as my_matrix:
for line in my_matrix:
fields = line.split("\t")
reduced.append(fields[1:last])
test=map(list, zip(*reduced))
for x in test:
out_fasta.write(">"+str(x[0])+"\n"+"".join(x[1:])+"\n")
out_fasta.close()
def branch_lengths_2_decimals(str_newick_tree):
"""replaces branch lengths in scientific notation with decimals
Identical to tested function in main script"""
colon_s = 0
comma_back_paren_s = 0
num = ''
new_tree = ''
for count, char in enumerate(str_newick_tree):
if char == ':':
colon_s = count
continue
if char in (')', ','):
comma_back_paren_s = 1
num = '%f' % float(num)
new_tree += ":" + num
colon_s = 0
num = ''
if colon_s != 0:
num = num + char
if colon_s == 0:
new_tree += char
new_tree = new_tree.strip('\'').strip('\"').strip('\'') + ";"
return new_tree
def insert_sequence(in_fasta, tree, fixed_name, parameters, processors):
args = ['raxmlHPC-PTHREADS-SSE3', '-f', 'V',
'-s', '%s' % in_fasta, '-m', 'GTRGAMMA', '-n', '%s' % fixed_name, '-t',
'%s' % tree, '-T', '%s' % processors, '-R', '%s' % parameters, '--no-bfgs', '>', '/dev/null 2>&1']
try:
vcf_fh = open('raxml.out', 'w')
except:
print('could not open raxml file')
try:
log_fh = open('raxml.log', 'w')
except:
print('could not open log file')
try:
raxml_run = Popen(args, stderr=log_fh, stdout=vcf_fh)
raxml_run.wait()
os.system("sed 's/\[[^]]*\]//g' RAxML_labelledTree.%s > %s.tree_including_unknowns_noedges.tree" % (fixed_name,fixed_name))
subprocess.check_call("rm RAxML*.%s" % fixed_name, shell=True)
except:
print("sequence(s) were not inserted into tree!!!!!")
def prune_tree(fixed_name,tree):
tree_full = dendropy.Tree.get_from_path(tree,schema="newick",preserve_underscores=True)
tree_full.prune_taxa_with_labels(["%s" % fixed_name])
final_tree = branch_lengths_2_decimals(tree_full.as_string("newick"))
tmptree = open("%s.tmpx.tree" % fixed_name, "w")
tmptree.write(final_tree)
tmptree.close()
tmptree2 = open("%s.tmpxz.tree" % fixed_name, "w")
with open("%s.tmpx.tree" % fixed_name) as my_tree:
for line in my_tree:
fields = line.split()
fixed_fields = []
for x in fields:
fixed_fields.append(x.replace("'",""))
else:
pass
tmptree2.write("".join(fixed_fields))
tmptree2.close()
def calculate_pairwise_tree_dists(intree, output):
tree = dendropy.Tree.get_from_path(intree, "newick", preserve_underscores=True)
outfile = open("%s" % output, "w")
distances = tree.phylogenetic_distance_matrix()
for i, t1 in enumerate(tree.taxon_namespace):
for t2 in tree.taxon_namespace[i+1:]:
outfile.write("Distance between '%s' and '%s': %s\n" % (t1.label, t2.label, distances(t1, t2)))
outfile.close()
def parse_distances(distance_file,fixed_name):
true_value = []
with open(distance_file) as my_file:
for line in my_file:
if len(fixed_name)>1:
for name in fixed_name:
#print(name)
fields = line.split()
if fields[2] == "'%s'" % name and fields[4] == "'Reference':":
print(fields)
true_value.append(fields[5])
elif fields[2] =="'Reference'" and fields[4] =="'%s':" % name:
true_value.append(fields[5])
else:
fields = line.split()
if fields[2] == "'%s'" % ''.join(fixed_name) and fields[4] == "'Reference':":
true_value.append(fields[5])
elif fields[2] =="'Reference'" and fields[4] =="'%s':" % ''.join(fixed_name):
true_value.append(fields[5])
else:
pass
return true_value
def get_name_by_ID(in_fasta, ID, out_fasta):
output_handle = open(out_fasta, "w")
seqrecords=[]
with open(in_fasta) as my_fasta:
for record in SeqIO.parse(my_fasta, "fasta"):
if record.id == ID:
seqrecords.append(record)
SeqIO.write(seqrecords, output_handle, "fasta")
output_handle.close()
def rename_fasta(in_fasta, name, out_fasta):
output_handle = open(out_fasta, "w")
seqrecords=[]
with open(in_fasta) as my_fasta:
for record in SeqIO.parse(my_fasta, "fasta"):
seqrecords.append(record.seq)
output_handle.write(">"+name+"\n")
for seqrecord in seqrecords:
output_handle.write(str(seqrecord)+"\n")
output_handle.close()
def get_field_index(matrix_in):
firstLine = open(matrix_in).readline()
first_fields = firstLine.split("\t")
last=first_fields.index("#SNPcall")
return last
def remove_sequence(in_fasta,name,out_fasta):
seqrecords = []
output_handle = open(out_fasta, "w")
with open(in_fasta) as my_fasta:
for record in SeqIO.parse(my_fasta, "fasta"):
if record.id != name:
seqrecords.append(record)
SeqIO.write(seqrecords, output_handle, "fasta")
output_handle.close()
def main(matrix,tree,name,start,step,end,processors,iterations,deviation,remove):
aa = subprocess.call(['which', 'raxmlHPC-PTHREADS-SSE3'])
if aa == 0:
pass
else:
print("RAxML must be in your path as raxmlHPC-PTHREADS-SSE3")
sys.exit()
"""get starting information"""
start_dir = os.getcwd()
start_path = os.path.abspath("%s" % start_dir)
matrix_path = os.path.abspath("%s" % matrix)
tree_path = os.path.abspath("%s" % tree)
"""done with getting starting information"""
os.system("mkdir %s/%s.tmp" % (start_path,name))
fixed_name = []
#Changing directory into temporary one
os.chdir("%s/%s.tmp" % (start_path,name))
os.system("sed 's/://g' %s | sed 's/,//g' > REF.matrix" % matrix_path)
fixed_name.append(re.sub('[:,]', '', name))
calculate_pairwise_tree_dists(tree_path, "%s.all_snps_patristic_distances.txt" % "".join(fixed_name))
last=get_field_index(matrix_path)
matrix_to_fasta("REF.matrix","REF",last)
remove_sequence("REF.fasta", "".join(fixed_name), "REF_pruned.fasta")
true_value = parse_distances("%s.all_snps_patristic_distances.txt" % "".join(fixed_name),fixed_name)
outfile = open("%s.results.out" % ''.join(fixed_name), "w")
if remove == "T":
prune_tree(''.join(fixed_name),tree_path)
else:
pass
print("creating parameters file")
if remove == "T":
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -f e -m GTRGAMMA -s REF_pruned.fasta -t %s.tmpxz.tree -n PARAMS --no-bfgs -T %s > /dev/null 2>&1" % ("".join(fixed_name),processors) , shell=True)
subprocess.check_call("mv RAxML_binaryModelParameters.PARAMS %s.PARAMS" % "".join(fixed_name), shell=True)
elif remove == "F":
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -f e -m GTRGAMMA -s REF.fasta -t %s -n PARAMS --no-bfgs -T %s > /dev/null 2>&1" % (tree_path,processors) , shell=True)
subprocess.check_call("mv RAxML_binaryModelParameters.PARAMS %s.PARAMS" % "".join(fixed_name), shell=True)
else:
print("you need to choose between 'T' and 'F' for remove value. Exiting...")
sys.exit()
print("starting loop")
for i in range(start, end+1, step):
hits = []
for j in range(1,iterations+1):
if remove == "T":
last=subsample_snps("REF.matrix", "".join(fixed_name), i)
else:
last=subsample_snps_keep("REF.matrix", "".join(fixed_name), i)
os.system("sed 's/://g' %s.%s.tmp.matrix | sed 's/,//g' > %s.%s.tmp.fixed.matrix" % ("".join(fixed_name),i,"".join(fixed_name),i))
last=get_field_index("%s.%s.tmp.fixed.matrix" % ("".join(fixed_name),i))
matrix_to_fasta("%s.%s.tmp.fixed.matrix" % ("".join(fixed_name),i), "%s.%s" % ("QUERY_"+"".join(fixed_name),i), last)
get_name_by_ID("%s.%s.fasta" % ("QUERY_"+"".join(fixed_name),i), ''.join(fixed_name), "%s.%s.%s.tmp.fasta" % ("QUERY_"+"".join(fixed_name),i,j))
tmp_name = "QUERY_"+''.join(fixed_name)+str(j)
rename_fasta("%s.%s.%s.tmp.fasta" % ("QUERY_"+"".join(fixed_name),i,j), tmp_name,"%s.%s.%s.zzyzz.fasta" % ("QUERY_"+"".join(fixed_name),i,j))
if remove == "T":
os.system("cat %s.*.zzyzz.fasta REF.fasta > %s.joined.fasta" % ("QUERY_"+"".join(fixed_name),"".join(fixed_name)))
else:
os.system("cat %s.*.zzyzz.fasta REF.fasta > %s.joined.fasta" % ("QUERY_"+"".join(fixed_name),"QUERY_"+"".join(fixed_name)))
os.system("rm %s.*.tmp.fasta %s.*.zzyzz.fasta" % ("QUERY_"+"".join(fixed_name),"QUERY_"+"".join(fixed_name)))
if remove =="T":
insert_sequence("%s.joined.fasta" % "".join(fixed_name), "%s.tmpxz.tree" % "".join(fixed_name), ''.join(fixed_name), "%s.PARAMS" % "".join(fixed_name), processors)
else:
insert_sequence("%s.joined.fasta" % ("QUERY_"+"".join(fixed_name)), "%s" % tree_path, ''.join(fixed_name), "%s.PARAMS" % "".join(fixed_name), processors)
calculate_pairwise_tree_dists("%s.tree_including_unknowns_noedges.tree" % "".join(fixed_name),"%s.all_patristic_distances.txt" % "".join(fixed_name))
os.system("cp %s.tree_including_unknowns_noedges.tree %s.%s.%s.tree" % ("".join(fixed_name),"".join(fixed_name),i,j))
query_names = []
for j in range(1,iterations+1):
#query_names.append("QUERY__"+"QUERY_"+"".join(fixed_name)+str(j))
query_names.append("QUERY___"+"QUERY_"+"".join(fixed_name)+str(j))
subsampled_values = parse_distances("%s.all_patristic_distances.txt" % "".join(fixed_name), query_names)
for value in subsampled_values:
if (float(value)/float(''.join(true_value)))<(1+float(deviation)):
hits.append("1")
else:
pass
outfile.write(str(i)+"\t"+str(len(hits))+"\n")
if int(len(hits))>=int(0.95*iterations):
print("optimal value is for %s is %s" % ("".join(fixed_name),i))
break
if len(hits) == 0:
print("no results at your level of sampling. Change settings and try again")
os.system("mv %s.results.out %s" % (''.join(fixed_name), start_path))
os.chdir("%s" % start_path)
os.system("rm -rf %s/%s.tmp" % (start_path,name))
if __name__ == "__main__":
usage="usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("-m", "--snp_matrix", dest="matrix",
help="path to NASP snp_matrix [REQUIRED]",
action="callback", callback=test_file, type="string")
parser.add_option("-t", "--tree", dest="tree",
help="path to input tree [REQUIRED]",
action="callback", callback=test_file, type="string")
parser.add_option("-n", "--name", dest="name",
help="name of genome to test [REQUIRED]",
action="store", type="string")
parser.add_option("-s", "--start", dest="start",
help="starting number of SNPs to sample, defaults to 50",
action="store", type="int", default="50")
parser.add_option("-p", "--step", dest="step",
help="step through SNPs at this level, defaults to 100",
action="store", type="int", default="100")
parser.add_option("-e", "--end", dest="end",
help="ending number of SNPs to sample, defaults to 100000",
action="store", type="int", default="100000")
parser.add_option("-o", "--processors", dest="processors",
help="number of processors to use with RAxML, defaults to 4",
action="store", type="int", default="4")
parser.add_option("-i", "--iterations", dest="iterations",
help="number of iterations at each level, defaults to 10",
action="store", type="int", default="10")
parser.add_option("-d", "--deviation", dest="deviation",
help="deviation from 1, to determine correct placement, defaults to 0.05",
action="store", type="float", default="0.05")
parser.add_option("-r", "--remove", dest="remove",
help="remove original from tree and place? Defaults to T",
action="store", type="string", default="T")
options, args = parser.parse_args()
mandatories = ["matrix", "tree", "name"]
for m in mandatories:
if not options.__dict__[m]:
print("\nMust provide %s.\n" %m)
parser.print_help()
exit(-1)
main(options.matrix,options.tree,options.name,options.start,options.step,options.end,
options.processors,options.iterations,options.deviation,options.remove)
<file_sep>/tests/test_all_functions.py
#!/usr/bin/env python
"""test each function in the wg-fast
code"""
import unittest
import os
import tempfile
import shutil
import re
import sys
from wg_fast.util import *
curr_dir=os.getcwd()
class Test1(unittest.TestCase):
def test_get_seq_name_basic_function(self):
self.assertEqual(get_seq_name("/path/to/test.fasta"), "test.fasta")
"""tests the condition where you use a tilda instead of full path"""
def test_get_seq_name_tilda(self):
self.assertEqual(get_seq_name("~/test.fasta"), "test.fasta")
"""tests the case where no path is passed"""
def test_get_seq_name_empty(self):
self.assertEqual(get_seq_name(""), "")
"""tests the case where something weird is passed"""
def test_get_seq_name_wrong_slash(self):
self.assertEqual(get_seq_name("\wrong\way"), "\\wrong\\way")
class Test2(unittest.TestCase):
def test_get_readFile_components_basic_function(self):
self.assertEqual(get_readFile_components("/path/to/file.gz"), ('/path/to', 'file', '.gz'))
def test_get_readFile_components_tilda(self):
self.assertEqual(get_readFile_components("~/path/to/file.gz"), ('~/path/to', 'file', '.gz'))
def test_get_readFile_components_non_gz(self):
self.assertEqual(get_readFile_components("~/path/to/file.fasta"), ('~/path/to', 'file', '.fasta'))
def test_get_readFile_components_wrong_slash(self):
self.assertEqual(get_readFile_components("~\path\to\file.fasta"), ('', '~\\path\to\x0cile', '.fasta'))
def test_get_readFile_components_empty(self):
self.assertEqual(get_readFile_components(""), ('', '', ''))
class Test5(unittest.TestCase):
def test_sort_information_basic_function(self):
self.assertEqual(sort_information("ADK1::460"), 460)
def test_sort_information_cant_parse(self):
self.assertRaises(TypeError, sort_information, "ADK1__460")
def test_sort_information_no_input(self):
self.assertRaises(TypeError, sort_information, None)
class Test6(unittest.TestCase):
def test_matrix_to_fasta_basic_function(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tT\n")
fp.close()
self.assertEqual(matrix_to_fasta(fpath, "%s/outfile.txt" % tdir), [">ReferenceAT", ">genome1TT", ">genome2TT"])
shutil.rmtree(tdir)
def test_matrix_to_fasta_unequal_fields(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\n")
fp.close()
self.assertEqual(matrix_to_fasta(fpath, "%s/outfile.txt" % tdir), [">ReferenceAT", ">genome1TT"])
shutil.rmtree(tdir)
def test_matrix_to_fasta_multiple_states(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tTT\n")
fp.close()
self.assertEqual(matrix_to_fasta(fpath, "%s/outfile.txt" % tdir), [">ReferenceAT", ">genome1TT", ">genome2TTT"])
shutil.rmtree(tdir)
class Test7(unittest.TestCase):
def test_write_reduced_matrix_basic_function(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\t#SNPcall\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tT\n")
fp.close()
self.assertEqual(write_reduced_matrix(fpath), [4, 4])
shutil.rmtree(tdir)
def test_write_reduced_matrix_odd_field_numbers(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\t#SNPcall\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\n")
fp.close()
self.assertEqual(write_reduced_matrix(fpath), [4, 3])
shutil.rmtree(tdir)
class Test8(unittest.TestCase):
def test_make_temp_matrix_basic_function(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.matrix")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tT\n")
fp.write("ADK::3\tG\tG\tT\n")
fp.close()
vpath = os.path.join(tdir,"testfile.filtered.vcf")
vp = open(vpath, "w")
vp.write("ADK::1\tG\n")
vp.close()
self.assertEqual(make_temp_matrix(vpath,fpath,"test"), {'ADK::1': 'G', 'ADK::2': 'N', 'ADK::3': 'N'})
shutil.rmtree(tdir)
def test_make_temp_matrix_no_matches(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.matrix")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tT\n")
fp.write("ADK::3\tG\tG\tT\n")
fp.close()
vpath = os.path.join(tdir,"testfile.filtered.vcf")
vp = open(vpath, "w")
vp.write("ADK::6\tA\n")
vp.close()
self.assertEqual(make_temp_matrix(vpath,fpath,"test"), {'ADK::1': 'N', 'ADK::2': 'N', 'ADK::3': 'N', 'ADK::6': 'A'})
shutil.rmtree(tdir)
class Test9(unittest.TestCase):
def test_grab_names_basic_function(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"test.filtered.vcf")
fpath2 = os.path.join(tdir,"name_1.filtered.vcf")
os.chdir("%s" % tdir)
fp = open(fpath, "w")
fp2 = open(fpath2, "w")
fp.close()
fp2.close()
self.assertEqual(grab_names(), ['name_1', 'test'])
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
#class Test10(unittest.TestCase):
# def test_process_coverage_basic_function(self):
# tdir = tempfile.mkdtemp(prefix="filetest_",)
# fpath = os.path.join(tdir,"ECOLI_coverage.sample_summary")
# os.chdir("%s" % tdir)
# fp = open(fpath,"w")
# fp.write("sample_id total mean granular_third_quartile granular_median granular_first_quartile %_bases_above_15\n")
# fp.write("ECOLI 2050 3.82 6 5 4 0.0\n")
# fp.write("Total 2050 3.82 N/A N/A N/A")
# fp.close()
# self.assertEqual(process_coverage("ECOLI"), {'ECOLI':'3.82'})
# os.chdir("%s" % curr_dir)
# shutil.rmtree(tdir)
# def test_process_coverage_missing_match(self):
# tdir = tempfile.mkdtemp(prefix="filetest_",)
# fpath = os.path.join(tdir,"ECOLI_coverage.sample_summary")
# os.chdir("%s" % tdir)
# fp = open(fpath,"w")
# fp.write("sample_id total mean granular_third_quartile granular_median granular_first_quartile %_bases_above_15\n")
# fp.write("EOLI 2050 3.82 6 5 4 0.0\n")
# fp.write("Total 2050 3.82 N/A N/A N/A")
# fp.close()
# self.assertRaises(TypeError, process_coverage, "ECOLI")
# os.chdir("%s" % curr_dir)
# shutil.rmtree(tdir)
class Test11(unittest.TestCase):
def test_find_two_report_error(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"all_patristic_distances.txt")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'H10407_allexternalnucmer': 1.39030683167\n")
fp.close()
self.assertRaises(TypeError, find_two_new, ['E2348_69_allexternalnucmer'], ['H10407_allexternalnucmer','E2348_69_allexternalnucmer'])
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
def test_find_two_basic_function(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"all_patristic_distances.txt")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'H10407_allexternalnucmer': 1.39030683167\n")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'O157_H7_sakai_allexternalnucmer': 4.53192608862\n")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'Reference': 1.29611949657")
fp.close()
self.assertEqual(find_two_new(fpath, ["E2348_69_allexternalnucmer"]),((('E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer', '4.53192608862'), ('E2348_69_allexternalnucmer', 'H10407_allexternalnucmer', '1.39030683167')),(('Reference', 'E2348_69_allexternalnucmer', '1.29611949657'),)))
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
def test_find_two_reversed(self):
tdir = tempfile.mkdtemp(prefix="reversed_",)
fpath = os.path.join(tdir,"all_patristic_distances_reversed.txt")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'H10407_allexternalnucmer': 1.39030683167\n")
fp.write("Distance between 'E2348_69_allexternalnucmer' and 'O157_H7_sakai_allexternalnucmer': 4.53192608862\n")
fp.write("Distance between 'Reference' and 'E2348_69_allexternalnucmer': 0.0941892612547")
fp.close()
self.assertEqual(find_two_new(fpath, ["E2348_69_allexternalnucmer"]),((('E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer', '4.53192608862'), ('E2348_69_allexternalnucmer', 'H10407_allexternalnucmer', '1.39030683167')),(('Reference', 'E2348_69_allexternalnucmer', '0.0941892612547'),)))
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
class Test12(unittest.TestCase):
def test_get_closest_dists_basic_function(self):
self.assertEqual(get_closest_dists_new((('ECOLI_ISO2', 'SSON_046_allexternalnucmer', '0.08198920048'), ('ECOLI_ISO2', 'H10407_allexternalnucmer', '1.3087194675e-06')),['ECOLI', 'ECOLI_IS03_L007', 'ECOLI_ISO2']),(['SSON_046_allexternalnucmer0.08198920048', 'H10407_allexternalnucmer1.3087194675e-06']))
class Test13(unittest.TestCase):
def test_calculate_pairwise_tree_dists_basic_function(self):
"""distances were taken directly from Dendropy, run outside of the pipeline"""
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"test.tree")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp.write("((H10407_all:0.00000095154411648831,SSON_046_all:0.08004748973386473232):0.09609983129754622044,(E2348_69_all:1.63492542157114217893,O157_H7_sakai_all:4.52711175943011490119):0.00000095154411648831,Reference:0.00000095154411648831):0.0;")
fp.close()
self.assertEqual(calculate_pairwise_tree_dists(fpath, "tmp.out"),[0.08004844127798122, 1.7310271559569212, 4.623213493815894, 0.0961017343857792, 1.8110736941466694, 4.703260032005642, 0.17614827257552745, 6.162037181001257, 1.634927324659375, 4.527113662518348])
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
class Test14(unittest.TestCase):
def test_subsample_snps(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"test.matrix")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\tT\n")
fp.write("ADK::3\tG\tG\tT\n")
fp.close()
self.assertEqual(subsample_snps(fpath,{'ECOLI': ['SSON_046_allexternalnucmer', 'H10407_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer'], 'ECOLI_ISO2': ['H10407_allexternalnucmer', 'SSON_046_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer'], 'ECOLI_IS03_L007': ['SSON_046_allexternalnucmer', 'H10407_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer']}, {'ECOLI': 11, 'ECOLI_ISO2': 14, 'ECOLI_IS03_L007': 11},4),['ADK::1','ADK::2','ADK::3'])
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
class Test15(unittest.TestCase):
def test_process_temp_matrices_dev(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir, "tmp.tree")
fpath2 = os.path.join(tdir, "all.distances")
os.chdir("%s" % tdir)
fp = open(fpath,"w")
fp2 = open(fpath2, "w")
fp.write("(E2348_69_allexternalnucmer:1.29611826169204280568,O157_H7_sakai_allexternalnucmer:3.23580782692392832089,(Reference:0.00000061743893499046,((QUERY___ECOLI_ISO2:0.000001,H10407_allexternalnucmer:0.00000030871946749523):0.00000030871946749523,((QUERY___ECOLI:0.000001,QUERY___ECOLI_IS03_L007:0.000001):0.0,SSON_046_allexternalnucmer:0.04099394588025907088):0.04099394588025907088):0.09418733509629113876):0.00000061743893499046);")
fp.close()
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'O157_H7_sakai_allexternalnucmer': 4.53192608862\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'Reference': 1.29611949657\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'QUERY___ECOLI_ISO2': 1.39030752295\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'H10407_allexternalnucmer': 1.39030683167\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'QUERY___ECOLI': 1.43130116011\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'QUERY___ECOLI_IS03_L007': 1.43130116011\n")
fp2.write("Distance between 'E2348_69_allexternalnucmer' and 'SSON_046_allexternalnucmer': 1.47229410599\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'Reference': 3.2358090618\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'QUERY___ECOLI_ISO2': 3.32999708818\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'H10407_allexternalnucmer': 3.3299963969\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'QUERY___ECOLI': 3.37099072534\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'QUERY___ECOLI_IS03_L007': 3.37099072534\n")
fp2.write("Distance between 'O157_H7_sakai_allexternalnucmer' and 'SSON_046_allexternalnucmer': 3.41198367122\n")
fp2.write("Distance between 'Reference' and 'QUERY___ECOLI_ISO2': 0.0941892612547\n")
fp2.write("Distance between 'Reference' and 'H10407_allexternalnucmer': 0.0941885699742\n")
fp2.write("Distance between 'Reference' and 'QUERY___ECOLI': 0.135182898415\n")
fp2.write("Distance between 'Reference' and 'QUERY___ECOLI_IS03_L007': 0.135182898415\n")
fp2.write("Distance between 'Reference' and 'SSON_046_allexternalnucmer': 0.176175844296\n")
fp2.write("Distance between 'QUERY___ECOLI_ISO2' and 'H10407_allexternalnucmer': 1.3087194675e-06\n")
fp2.write("Distance between 'QUERY___ECOLI_ISO2' and 'QUERY___ECOLI': 0.0409962545997\n")
fp2.write("Distance between 'QUERY___ECOLI_ISO2' and 'QUERY___ECOLI_IS03_L007': 0.0409962545997\n")
fp2.write("Distance between 'QUERY___ECOLI_ISO2' and 'SSON_046_allexternalnucmer': 0.08198920048\n")
fp2.write("Distance between 'H10407_allexternalnucmer' and 'QUERY___ECOLI': 0.0409955633192\n")
fp2.write("Distance between 'H10407_allexternalnucmer' and 'QUERY___ECOLI_IS03_L007': 0.0409955633192\n")
fp2.write("Distance between 'H10407_allexternalnucmer' and 'SSON_046_allexternalnucmer': 0.0819885091995\n")
fp2.write("Distance between 'QUERY___ECOLI' and 'QUERY___ECOLI_IS03_L007': 2e-06\n")
fp2.write("Distance between 'QUERY___ECOLI' and 'SSON_046_allexternalnucmer': 0.0409949458803\n")
fp2.write("Distance between 'QUERY___ECOLI_IS03_L007' and 'SSON_046_allexternalnucmer': 0.0409949458803")
fp2.close()
#self.assertEqual(process_temp_matrices(({'ECOLI': ['SSON_046_allexternalnucmer', 'H10407_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer'], 'ECOLI_ISO2': ['H10407_allexternalnucmer', 'SSON_046_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer'], 'ECOLI_IS03_L007': ['SSON_046_allexternalnucmer', 'H10407_allexternalnucmer', 'E2348_69_allexternalnucmer', 'O157_H7_sakai_allexternalnucmer']},fpath,2,fpath2),(
class Test16(unittest.TestCase):
def test_get_all_snps(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("LocusID\tReference\tgenome1\tgenome2\n")
fp.write("ADK::1\tA\tT\tT\n")
fp.write("ADK::2\tT\tT\n")
fp.close()
self.assertEqual(get_all_snps(fpath), ['ADK::1','ADK::2'])
shutil.rmtree(tdir)
class Test17(unittest.TestCase):
def test_find_used_snps_basic(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("gi|16120353|ref|NC_003143.1|::35501\tG\n")
fp.write("gi|16120353|ref|NC_003143.1|::52924\tC\n")
fp.write("gi|16120353|ref|NC_003143.1|::55551\tN\n")
fp.close()
os.chdir("%s" % tdir)
self.assertEqual(find_used_snps(),{'testfile':2})
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
def test_find_used_snps_all_missing(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("gi|16120353|ref|NC_003143.1|::35501\tN\n")
fp.write("gi|16120353|ref|NC_003143.1|::52924\tN\n")
fp.write("gi|16120353|ref|NC_003143.1|::55551\tN\n")
fp.close()
os.chdir("%s" % tdir)
self.assertEqual(find_used_snps(),{'testfile':0})
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
def test_find_used_snps_blank_line(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("gi|16120353|ref|NC_003143.1|::35501\tG\n")
fp.write("gi|16120353|ref|NC_003143.1|::52924\tC\n")
fp.write("gi|16120353|ref|NC_003143.1|::55551\t-\n")
fp.write(" n")
fp.close()
os.chdir("%s" % tdir)
self.assertRaises(TypeError, find_used_snps, ())
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
class Test18(unittest.TestCase):
def test_branch_lengths_to_decimals(self):
self.assertEqual(branch_lengths_2_decimals("('E2348_69_allexternalnucmer':1.63492542157,'O157_H7_sakai_allexternalnucmer':4.52711175943):9.51544116488e-07,Reference:9.51544116488e-07)"),("('E2348_69_allexternalnucmer':1.634925,'O157_H7_sakai_allexternalnucmer':4.527112):0.000001,Reference:0.000001);"))
class Test19(unittest.TestCase):
def test_parse_likelihoods(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.filtered.vcf")
fp = open(fpath, "w")
fp.write("ECOLI\tI6\t0.298716\t0.298716\n")
fp.write("ECOLI\tI5\t0.298714\t0.597430\n")
fp.write("ECOLI\tI4\t0.298714\t0.896143\n")
fp.close()
self.assertEqual(parse_likelihoods(fpath), {'ECOLI':['0.298716','0.298714','0.298714']})
shutil.rmtree(tdir)
class Test20(unittest.TestCase):
def test_fasta_to_tab(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.fasta")
fp = open(fpath, "w")
fp.write(">id\n")
fp.write("ATGC")
fp.close()
self.assertEqual(fasta_to_tab(fpath), ["id","ATGC"])
shutil.rmtree(tdir)
class Test21(unittest.TestCase):
def test_tab_to_fasta(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.tab")
fp = open(fpath, "w")
fp.write(">id\tATCG\n")
fp.close()
self.assertEqual(tab_to_fasta(fpath), [">id","ATCG"])
shutil.rmtree(tdir)
class Test22(unittest.TestCase):
def test_tab_to_matrix(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.tab")
fp = open(fpath, "w")
fp.write("id\tATCG\n")
fp.write("id2\tTTCG\n")
fp.close()
self.assertEqual(tab_to_matrix(fpath), [['id', 'id2'], ['A', 'T'], ['T', 'T'], ['C', 'C'], ['G', 'G']])
shutil.rmtree(tdir)
class Test23(unittest.TestCase):
def test_prune_fasta(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.tab")
fp = open(fpath, "w")
fp.write(">id\nATCG\n")
fp.write(">id2\nTTCG\n")
fp.close()
self.assertEqual(prune_fasta("id",fpath,"%s/outfile.txt" % tdir), ['id2'])
shutil.rmtree(tdir)
class Test24(unittest.TestCase):
def test_filter_alignment(self):
"""tests to make sure that this function
throws out all characters that it is supposed to"""
tdir = tempfile.mkdtemp(prefix="filetest_",)
fpath = os.path.join(tdir,"testfile.tab")
fp = open(fpath, "w")
fp.write("throw away line\n")
fp.write("X\tA\tN\tX\t-\n")
fp.close()
self.assertEqual(filter_alignment(fpath), [['A']])
shutil.rmtree(tdir)
class Test25(unittest.TestCase):
def test_compare_subsample_results(self):
tdir = tempfile.mkdtemp(prefix="filetest_",)
os.chdir("%s" % tdir)
fpath = os.path.join(tdir,"sample..testfile.tab")
fp = open(fpath, "w")
fp.write("Cluster0 Cluster0 100.00 15 0 0 1 15 1 15 1e-07 30.2")
fp.close()
os.chdir("%s" % curr_dir)
shutil.rmtree(tdir)
os.system("rm tab_matrix tab.filtered out.tab out.fasta")
if __name__ == "__main__":
unittest.main()
main()
<file_sep>/setup.py
#!/usr/bin/env python
from distutils.core import setup
import re
__author__ = "<NAME>"
__credits__ = ["<NAME>"]
__license__ = "GPL v3"
__version__ = "1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
long_description = """WG-FAST, genotype
phylogenetically from a NASP formatted SNP
matrix
"""
setup(name='wg_fast',
version=__version__,
description='whole genome focused array SNP typing',
author=__maintainer__,
author_email=__email__,
maintainer=__maintainer__,
maintainer_email=__email__,
packages=['wg_fast'],
long_description=long_description
)
<file_sep>/wgfast_manual.md
## The whole genome focused array SNP typing (*WG-FAST*) pipeline
### Updated 7/16/2019
#### Citation:
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> (2015).
Phylogenetically typing bacterial strains from partial SNP genotypes observed from direct
sequencing of clinical specimen metagenomic data
#### Contact:
Please address queries, conserns, improvements to jasonsahl at gmail dot com
#### What does *WG-FAST* do?
WG-FAST was designed as a tool to phylogenetically genotype unknown samples, even those
with extremely low read coverage, in the context of a well-studied dataset.
#### What does *WG-FAST* not do?
*WG-FAST* is not intended to identify new SNPs in a dataset (i.e replace a de novo analysis).
If too many samples are processed with WG-FAST, a phylogenetic discovery bias can exist.
#### Installation
See Readme
#### Unit tests
-To test that all functions are working correctly in *WG-FAST*, type:
```python tests/test_all_functions.py```
#### Required input files
1. **Directory of sequence reads ("-d")**. The reads must be named according to Illumina HiSeq
or MiSeq conventions. Reads must be in the Illumina 1.9+ FastQ format. If you have old
Illumina FASTQ encodings, they must be converted before running *WG-FAST*. **Important: names
must not have periods ".", brackets "[]", or other weird characters "=:" in the header.**
2. **Directory of reference files ("-r")**. This directory should only contain the following
files:
a. **SNP matrix (must end in ".tsv")**. The easiest way to generate this is by using NASP
(https://github.com/TGenNorth/NASP). If other SNP matrix formats are used, they must conform
to having hte first column including (contig::coordinate) and the column following the SNP calls
must be (#SNPcall). For the sub-sampling routine to complete, a genome must be present in your matrix
that is called 'Reference'. **Important: sample names must not have periods in the header.**
b. **Phylogeny (must end in .tree)**. A script is included with *WG-FAST* that can generate an appropriate
phylogeny for a NASP matrix (see below). This script also generates a 'Parameters' file, which can be used
with *WG-FAST* and cuts down on the computational time required for each subsequent run. The best way to guarantee downstream
compatibility is to generate the phylogeny with RAxML, which is used by the wgfast_prep.py script described below.
c. **Reference genome in FASTA format (must end in .fasta)**. This should be the same FASTA that was used to call SNPs with NASP.
d. **RAxML paramters file (must end in .PARAMS) (optional)**. If provided, RAxML will run faster because the likelihood has already
been calculated. Make sure that you use the conda version of RAxML for full compatibility.
#### Complete list of arguments:
-h, --help show this help message and exist
-r, REFERENCE_DIR, path to reference directory [REQUIRED]
-d, READ_DIRECTORY, path to directory of fastq.gz files [REQUIRED]
-p, PROCESSORS, # of processors to use - defaults to 2
-c, COVERAGE, minimum SNP coverage required to be called, defaults to 3x
-o, PROPORTION, proportion of alleles to be corrected, defaults to 0.9
-k, KEEP, keep temporary files? Defaults to F (T or F)
-s, SUBSAMPLE, run subsample routine? Defaults to T (T or F)
-n, SUBNUMS, number of subsamples to process, defaults to 100
-g, DOC, run depth of coverage on all files? Defaults to T (T or F)
-e, TMP_DIR, temporary directory path for GATK, default = '/tmp'
-z, insertion method (MP or ML), defaults to ML, MP not well tested
-f, How close does a subsample have to be from true placement, Defaults to 0.1 (Float)
-y, Only run sub-sample routine and exit? Defaults to F (T or F)
-j, MODEL, which model to run with raxml (GTRGAMMA, ASC_GTRGAMMA)
#### Test Data:
1. To give *WG-FAST* a try to make sure everything is installed correctly, check out the test_data
directory.
2. The following command was run from a “run” directory created within the WG-FAST
installation:
```python ../wgfast.py -r ../test_data -d ../test_data/reads/ -p 4 -c 1 -s F```
#### What is *WG-FAST* doing?
*WG-FAST* is a pipeline that wraps other tools together to place the samples into a phylogenetic
context, based on a well-characterized dataset. The order of functions conducted by *WG-FAST*
include:
1. Adapters are trimmed using BBduk. A list of adapters is included in the “bin” directory
(“illumina_adapters_all.txt”) within the *WG-FAST* distribution. This file includes all of the
standard Illumina adapters that I could find, plus additional ones that we use in our
laboratory. If you have different adapters than these, add the sequences into this file when
you run *WG-FAST*. The minimum length of sequences to keep is hard coded as 50, but I
could change this value to reflect different lengths if needed.
2. A dictionary is created from the Reference fasta with Picard Tools and the reference fasta is
indexed with SAMtools.
3. Reads are paired into a single sample based on their names, although single end reads are
also supported. *WG-FAST* assumes that names be something like: "S1_R1_001.fastq.gz” or
“R1_001.fastq.gz”. If the name pairings aren’t recognized, they will be run as single ended.
4. Reads are mapped to the reference with minimap2, using default settings. SNPs are then
called with the Haplotype caller method in GATK v4.
5. If selected, coverage across the reference is calculated with the “DepthOfCoverage” method
in SAMtools.
6. SNPs are compared against the SNPs in your initial SNP matrix. If the SNP is missing from
your VCF file, a “-“ is inserted. Furthermore, if a position fails either the user-defined
thresholds for depth or proportion, the position is replaced with a “-“. The number of
discarded SNPs, or those that didn’t pass the filters, is reported.
7. The new matrix is converted into a multi-fasta and processed with the evolutionary placement
algorithm in RAxML v8, placing the unknown into a user-provided phylogeny. The resulting
tree is converted into Nexus, such that the unknowns can be easily visualized (Red) in
FigTree (http://tree.bio.ed.ac.uk/software/figtree/).
8. For the optional sub-sampling routine, the following functions are performed:
**a.** For each query, the two closest genomes are identified, based on lowest pairwise
patristic distances calculated by DendroPy
**b.** Two matrices are created for each query, one for each of the nearest neighbors. Each
of these contains the name of the query and the name of the neighbor, ending in
“tmp.matrix”. If you generate these files (and keep them), then run *WG-FAST* in “-y T”
mode, these files will be skipped and will not need to be created again.
**c.** Each matrix is then converted into a FASTA file and a parameters file is created with
RAxML. The thought process here is that once the parameters files are created, they
can be used for future comparisons. If you run *WG-FAST* with “-y T”, you can re-use
these parameters files and won’t need to generate again. This method uses the
PTHREADS method to take advantage of multiple processors. Currently, only four
separate parameters files can be generated concurrently; this value is hardcoded into
the script. This is due to the large computational demand required by this method, but
could be made into a tunable parameter, if needed.
**d.** The initial tree is pruned of the genome to be re-inserted. Each sub-sampled neighbor
is then inserted back into the pruned phylogeny, using the GTRGAMMA method, to
stay consistent with the parameters file. The patristic distance is then calculated
between the neighbor and the “Reference”. This is done because if a NASP-formatted
matrix is generated, the reference FASTA file will always be called “Reference”. The
“true” distance is also identified based on the original SNP matrix using all of the
original source data. The sub-sampled distance is then divided by the true distance in
order to get a ratio of placement replication. If this value falls within a user-defined
threshold, then the placement is considered to be correct. For each unknown, the
number of incorrect placements is divided by the total number of placements in order
to get an idea of how stable the placement is in that region of the tree at the level of
SNP density provided by the unknown. The resulting p-value can then be used to
assess the robustness of a placement, based on significance values chosen by the
user.
Try running the script with the subsample routine with;
```python ../wgfast.py -r ../test_data -d ../test_data/reads/ -p 4 –c 1```
#### Output printed to screen:
1. The parameters that you called. Default values are also printed so the run can be
reproduced.
2. Number of callable positions, including polymorphic and monomorphic positions. These are
all positions called in each sample, compared to the reference. This is the number prior to
any filtering due to mixed SNP positions.
3. Number of SNPs. These are the number of observed polymorphisms, based on calls made
by GATK.
4. Number of discarded SNPs. These are polymorphisms that were called by GATK, but were
thrown out because they failed to meet the depth and/or proportion filters.
5. Insertion likelihood values. The higher the likelihood value and the fewer the number of
possible insertion nodes, the more trusted the placement, although caveats exist (see
Manuscript).
6. If sub-sample routine is invoked, information is also available for how often the sub-sample
was placed correctly. A sub-sample is considered to be “correct” by comparing the patristic
distance from the sub-sampled genome to the “Reference”, then comparing that to the
distance between the un-sub-sampled genome to the “Reference”. If this ratio falls within the
“fudge-factor” range, then it is considered to be correct. The number of times that the subsample
falls within this range is divided by the total number of iterations and a p-value is
reported.
### Files generated by *WG-FAST*:
1. “transformed.tree”. This file is a nexus file of the starting tree with the unknowns placed. If
this file is opened with figtree (http://tree.bio.ed.ac.uk/software/figtree/), the unknown isolates
are shown in red.
2. “nasp_matrix_with_unknowns.txt”. This file is a modification of the original NASP matrix with
the unknowns inserted, so the user can identify specific SNPs. Information is lost from this
matrix, so it cannot be used with additional *WG-FAST* runs.
3. “coverage_out.txt”. If you choose to have genome coverage information, the coverage across
the reference genome is provided in this file.
4. “all_patristic_distances.txt”. This is the patristic distance, or tree path distance, between all
pairwise comparisons in your analysis.
### Additional scripts included with *WG-FAST*:
1. wgfast_prep.py
-What does it do? Given a NASP matrix, this script will generate a maximum likelihood
phylogeny with RAxML and will also generate a “parameters” file that can be used for future
*WG-FAST* runs. The use of a parameters cuts down on the computational requirements when
running additional *WG-FAST* runs using the same input files.
-What do you need for the script to run? Requirements include a NASP matrix
-What does the output look like? Two files are produced:
a. “nasp_raxml.tree”. Your tree. Names have been fixed to work with WG-FAST. Make sure
that names do not include periods.
b. “nasp.PARAMS”. Parameters file. Use this with the “-X” flag described above.
c. Example usage:
```python wgfast_prep.py -m nasp.matrix```
-Note: if you run this script with a model separate from ASC_GTRGAMMA, you will also need to
use this model when running the main wgfast script, if you use the parameters file. If you have a
large file, you can run the script using the GTRGAMMA model and multiple processors.
```python wgfast_prep.py -m nasp_matrix.tsv -o GTRGAMMA -p 4```
2. subsample_snps_pearson.py
-What does it do? Given a NASP matrix, the script generates a new matrix over a given number
of iterations at a given level of SNP sampling. The script then conducts a Mantel test using the
Pearson correlation between the original similarity matrix and a matrix sampled at a given SNP
density.
-What do you need for the script to run?
a. NASP nasp_matrix
b. 'mothur executable in your path'. Mothur can be freely obtained from:
http://www.mothur.org/wiki/Download_mothur
-What does the output look like? One file is generated “results.txt”, that is new-line delimited,
with each line containing a Pearson correlation value (0 to 1).
-Example usage:
```python subsample_snps_pearson.py -m nasp.matrix -s 100```
3. subsample_reads_and_place.py
-What does it do? This script helps sub-sample your SNP matrix and identify how robust specific
regions of the phylogeny are. This script will prune the genome from the phylogeny, randomly
subsample the SNPs at a given level n separate times, then will re-insert into the tree. The
distance between the “correct” placement will be compared to the re-sampled placement. This
is a computationally intensive method and works best with a job management system (see
below).
-What do you need for the script to run? Requirements include:
a. NASP matrix
b. Corresponding phylogeny
c. Name of genome to test
-What does the output look like? For each genome, you will get an output file, showing each
iteration (1st column), the number of correct placements (2nd column) and the best SNP level, if
one is identified. For example:
```100 1```
```200 10```
```optimal value is for Salmonella_enterica_subsp_enterica_serovar_Uganda_str_R8-3404nucmer is 200```
-Example usage:
```python subsample_reads_and_place.py -m matrix.txt -t nasp_raxml.tree -n name -s 100 -o 6 -e 10000```
<file_sep>/wgfast.py
#!/usr/bin/env python
"""
WG-FAST
written by <NAME>
correspondence: <EMAIL>
"""
from optparse import OptionParser
import subprocess
import os
import sys
import errno
import glob
import tempfile
"""modify line below to reflect your installation directory"""
WGFAST_PATH="/Users/jasonsahl/tools/wgfast"
if os.path.exists(WGFAST_PATH):
sys.path.append("%s" % WGFAST_PATH)
else:
print("your WG-FAST path is not correct. Edit the path in wgfast.py and try again")
sys.exit()
try:
from wg_fast.util import *
except:
print("wgfast path needs to be modified in the wgfast.py file")
sys.exit()
def main(reference_dir,read_directory,processors,coverage,proportion,keep,subsample,
subnums,doc,tmp_dir,fudge,only_subs,model,ploidy):
ref_path=os.path.abspath("%s" % reference_dir)
dir_path=os.path.abspath("%s" % read_directory)
"""Test to make sure all required files are present"""
tree = "".join(glob.glob(os.path.join(ref_path, "*.tree")))
tree_list = glob.glob(os.path.join(ref_path, "*.tree"))
if len(tree_list) == 0:
print("You need to provide a tree in your reference directory ending in '.tree'")
sys.exit()
elif len(tree_list) == 1:
pass
elif len(tree_list) > 1:
print("More than one tree ending in '.tree' found in your reference directory...exiting")
sys.exit()
matrix = "".join(glob.glob(os.path.join(ref_path, "*.tsv")))
matrix_list = glob.glob(os.path.join(ref_path, "*.tsv"))
if len(matrix_list) == 0:
print("You need to provide a NASP formatted matrix ending in '.tsv'")
sys.exit()
elif len(matrix_list) > 1:
print("More than one file in your reference directory ending in '.tsv'...exiting")
sys.exit()
reference = "".join(glob.glob(os.path.join(ref_path, "*.fasta")))
reference_list = glob.glob(os.path.join(ref_path, "*.fasta"))
if len(reference_list) == 0:
print("You must provide a REFERENCE FASTA file")
sys.exit()
elif len(reference_list) > 1:
print("More than one file in your reference directory ending in '.fasta'...exiting")
sys.exit()
"""Get the reference information. Outfile is named $name.tmp.txt"""
get_seq_length(reference,"ref")
subprocess.check_call("tr ' ' '\t' < ref.tmp.txt > ref.genome_size.txt", shell=True)
try:
parameters = "".join(glob.glob(os.path.join(ref_path, "*.PARAMS")))
parameters_list = glob.glob(os.path.join(ref_path, "*.PARAMS"))
if len(parameters_list)>1:
print("More than one RAxML parameters file found in your reference directory...exiting")
sys.exit()
elif len(parameters_list) == 0:
parameters = "NULL"
except:
parameters = "NULL"
#check for binary dependencies
logPrint('testing the paths of all dependencies')
ap=os.path.abspath("%s" % os.getcwd())
#This is now part of the conda install
aa = subprocess.call(['which', 'raxmlHPC-SSE3'])
if aa == 0:
pass
else:
print("RAxML must be in your path as raxmlHPC-SSE3")
sys.exit()
print("*citation: '<NAME>. RAxML version 8: a tool for phylogenetic analysis and post-analysis of large phylogenies. Bioinformatics (2014).'")
print("*citation: '<NAME>, <NAME>, <NAME>. Performance, accuracy, and Web server for evolutionary placement of short sequence reads under maximum likelihood. Syst Biol. 2011;60(3):291-302'")
ab = subprocess.call(['which', 'samtools'])
if ab == 0:
pass
else:
print("samtools must be in your path")
sys.exit()
print("*citation: '<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Genome Project Data Processing S. The Sequence Alignment/Map format and SAMtools. Bioinformatics. 2009;25(16):2078-9'")
ac = subprocess.call(['which','minimap2'])
if ac == 0:
pass
else:
print("minimap2 must be in your path")
sys.exit()
"""This is the new test for bbduk.sh"""
ac = subprocess.call(['which','bbduk.sh'])
if ac == 0:
pass
else:
print("bbduk need to be in your path as bbduk.sh")
sys.exit()
#test for new dependencies
variant_tools = ['gatk','picard']
for tool in variant_tools:
ab = subprocess.call(['which', tool])
if ab == 0:
pass
else:
print("%s must be in your path" % tool)
sys.exit()
print("*citation: 'Li H. Aligning sequence reads, clone sequences and assembly contigs with BWA-MEM. arXivorg. 2013(arXiv:1303.3997 [q-bio.GN])'")
print("Patristic distances calculated with DendroPy")
print("*citation: 'Sukumaran J, Holder MT. DendroPy: a Python library for phylogenetic computing. Bioinformatics. 2010;26(12):1569-71. Epub 2010/04/28. doi: 10.1093/bioinformatics/btq228. PubMed PMID: 20421198'")
print("Also uses GATK for variant calling")
print("*citation: '<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, De<NAME>. The Genome Analysis Toolkit: a MapReduce framework for analyzing next-generation DNA sequencing data. Genome research. 2010;20(9):1297-303'")
print("Uses BioPython for FASTA parsing")
print("*citation :Cock PJ, <NAME>, <NAME>T, <NAME>, <NAME>J, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>J. Biopython: freely available Python tools for computational molecular biology and bioinformatics. Bioinformatics. 2009;25(11):1422-3")
print("")
#done checking for dependencies"""
logPrint('WG-FAST pipeline starting')
logPrint("WG-FAST was invoked with the following parameters:")
print("-m %s \\" % "".join(matrix))
print("-t %s \\" % "".join(tree))
print("-r %s \\" % "".join(reference))
print("-d %s \\" % read_directory)
print("-x %s \\" % "".join(parameters))
print("-p %s \\" % processors)
print("-c %s \\" % coverage)
print("-o %s \\" % proportion)
print("-k %s \\" % keep)
print("-s %s \\" % subsample)
print("-n %s \\" % subnums)
print("-g %s \\" % doc)
print("-e %s \\" % tmp_dir)
print("-f %s \\" % fudge)
print("-y %s \\" % only_subs)
print("-j %s \\" % model)
print("-l %s" % ploidy)
print("-------------------------")
#makes a temporary directory
scratch_dir = tempfile.mkdtemp()
check_input_files(matrix,reference)
########Real work starts here############
if only_subs == "T":
pass
else:
subprocess.check_call("cp %s %s/reference.fasta" % (reference,scratch_dir), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
#index reference file. GATK appears to do this incorrectly"""
subprocess.check_call("samtools faidx %s/reference.fasta" % scratch_dir, stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
subprocess.check_call("picard CreateSequenceDictionary R=%s/reference.fasta" % scratch_dir, stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
#First checkpoint, not sure this saves much time
if os.path.isfile("temp.matrix"):
pass
else:
#write reduced matrix with only the SNP data"""
write_reduced_matrix(matrix)
ref_name=get_seq_name(reference)
if only_subs == "T":
pass
else:
fileSets=read_file_sets(dir_path)
if len(fileSets) == 0:
print("No usable file sets found...exiting")
try:
os.system("rm temp.matrix")
except:
pass
sys.exit()
else:
ref_coords = get_all_snps(matrix)
logPrint("Loop starting")
print("-------------------------")
run_loop_dev(fileSets,dir_path,"%s/reference.fasta" % scratch_dir,processors,
ref_coords,coverage,proportion,matrix,scratch_dir,doc,tmp_dir,WGFAST_PATH,ploidy)
logPrint("Loop finished")
print("-------------------------")
"""will subsample based on the number of SNPs reported by the following function"""
if "T" in doc:
os.system("cat *breadth.txt > breadth_over_%sx_out.txt" % coverage)
os.system("cat *sum_cov.txt> coverage_out.txt")
else:
pass
used_snps=find_used_snps()
#Outnames is required for the sub-sampling routine, even with -y T
outnames=grab_names()
for name in outnames:
for k,v in used_snps.items():
if name == k:
logPrint("number of callable positions in genome %s = %s" % (k,v))
if only_subs == "T":
try:
#Starts with a clean slate, to replace with new EPA algorithm
os.system("rm RAxML*")
except:
pass
pass
else:
create_merged_vcf()
subprocess.check_call("paste temp.matrix merged.vcf > combined.matrix", shell=True)
matrix_to_fasta("combined.matrix", "all.fasta")
os.system("mv combined.matrix %s/nasp_matrix.with_unknowns.txt" % ap)
"""this fixes the SNP output to conform with RAxML"""
os.system("sed 's/://g' all.fasta | sed 's/,//g' > out.fasta")
#QC step here
qc_files("out.fasta",tree)
suffix = run_raxml("out.fasta",tree,"out.classification_results.txt","V",parameters,model,"out")
transform_tree("%s.tree_including_unknowns_noedges.tree" % suffix)
print("")
logPrint("Insertion likelihood values:")
parse_likelihoods("out.classification_results.txt")
print("")
calculate_pairwise_tree_dists("%s.tree_including_unknowns_noedges.tree" % suffix,"all_patristic_distances.txt")
if subsample=="T":
aa = subprocess.call(['which','raxmlHPC-PTHREADS-SSE3'])
if aa == 0:
pass
else:
print("for sub-sample routine, RAxML must be in your path as raxmlHPC-PTHREADS-SSE3")
sys.exit()
print("*citation: '<NAME>. RAxML-VI-HPC: maximum likelihood-based phylogenetic analyses with thousands of taxa and mixed models. Bioinformatics. 2006;22(21):2688-90'")
try:
if os.path.isfile("tmp_patristic_distances.txt"):
pass
else:
os.system("sort -g -k 6 all_patristic_distances.txt | sed 's/://g' > tmp_patristic_distances.txt")
except:
print("all_patrisitic_distances.txt must be in your analysis directory!")
sys.exit()
final_sets,distances=find_two_new("tmp_patristic_distances.txt", outnames)
results = get_closest_dists_new(final_sets,outnames)
logPrint("running subsample routine, forcing GTRGAMMA model")
"""mpshell on this function"""
allsnps = get_all_snps(matrix)
subsample_snps_2(final_sets,used_snps,subnums,allsnps,processors,"temp.matrix")
temp_matrices = glob.glob(os.path.join(ap, "*tmp.matrix"))
final_matrices = []
for matrix in temp_matrices:
final_matrices.append(matrix.replace("%s/" % ap,""))
sample_sets = {}
for matrix in final_matrices:
entries = matrix.split(".")
if entries[0] in sample_sets:
sample_sets[entries[0]].append(entries[2])
else:
sample_sets[entries[0]]=[entries[2]]
new_sample_dicts = {}
for k,v in sample_sets.items():
uniques = []
[uniques.append(item) for item in v if item not in uniques]
new_sample_dicts.update({k:uniques})
logPrint('creating PARAMS file')
create_params_files_dev(new_sample_dicts,tree,"temp.matrix",final_sets,processors)
try:
"""Must make sure that remove previous RAxML files"""
subprocess.check_call("rm RAxML*", shell=True, stderr=open(os.devnull, 'w'))
except:
pass
"""final_matrices does indeed have all of the temp matrices loaded"""
logPrint('adding unknowns to tree')
#TODO: enter progress bar instead of printing out lots of text
process_temp_matrices_2(final_sets,final_matrices,tree,processors,"all_patristic_distances.txt", "V", parameters, model)
print("-------------------------")
compare_subsample_results(outnames,distances,fudge)
else:
pass
#Clean up temporary files
if keep == "T":
pass
else:
for outname in outnames:
try:
subprocess.check_call("rm %s* RAxML* temp.matrix all.fasta out*" % outname, shell=True, stderr=open(os.devnull, 'w'))
except:
pass
os.chdir("%s" % ap)
subprocess.check_call("rm -rf %s" % scratch_dir, shell=True)
logPrint("all done")
if __name__ == "__main__":
parser = OptionParser(usage="usage: %prog [options]",version="%prog 1.3")
parser.add_option("-r", "--reference_directory", dest="reference_dir",
help="path to reference file directory [REQUIRED]",
action="callback", callback=test_dir, type="string")
parser.add_option("-d", "--read_directory", dest="read_directory",
help="path to directory of fastq files [REQUIRED]",
action="callback", callback=test_dir, type="string")
parser.add_option("-p", "--processors", dest="processors",
help="# of processors to use - defaults to 2",
default="2", type="int")
parser.add_option("-c", "--coverage", dest="coverage",
help="minimum SNP coverage required to be called a SNP; defaults to 3",
default="3", type="int")
parser.add_option("-o", "--proportion", dest="proportion",
help="proportion of alleles to be called a SNP, defaults to 0.9",
default="0.9", type="float")
parser.add_option("-k", "--keep", dest="keep",
help="keep temp files? Defaults to F",
action="callback", callback=test_filter, type="string", default="F")
parser.add_option("-s", "--subsample", dest="subsample",
help="Run subsample routine? Defaults to T",
action="callback", callback=test_filter, type="string", default="T")
parser.add_option("-n", "--subnums", dest="subnums",
help="number of subsamples to process, defaults to 100",
action="store", type="int", default="100")
parser.add_option("-g", "--doc", dest="doc",
help="run depth of coverage on all files? Defaults to T",
action="callback", callback=test_filter, type="string", default="T")
parser.add_option("-e", "--temp", dest="tmp_dir",
help="temporary directory for GATK analysis, defaults to /tmp",
action="store", type="string", default="/tmp")
parser.add_option("-f", "--fudge_factor", dest="fudge",
help="How close does a subsample have to be from true placement? Defaults to 0.1",
action="store", type="float", default="0.1")
parser.add_option("-y", "--only_subs", dest="only_subs",
help="Only run sub-sample routine and exit? Defaults to F",
action="callback", callback=test_filter, type="string", default="F")
parser.add_option("-j", "--model", dest="model",
help="which model to run with raxml:GTRGAMMA,ASC_GTRGAMMA",
action="callback", callback=test_models, type="string", default="ASC_GTRGAMMA")
parser.add_option("-l", "--ploidy", dest="ploidy",
help="ploidy to use with GATK, choose from 1 or 2 [DEFAULT]",
action="store", type="int", default="2")
options, args = parser.parse_args()
mandatories = ["reference_dir","read_directory"]
for m in mandatories:
if not options.__dict__[m]:
print("\nMust provide %s.\n" %m)
parser.print_help()
exit(-1)
main(options.reference_dir,options.read_directory,
options.processors,options.coverage,options.proportion,options.keep,options.subsample,
options.subnums,options.doc,options.tmp_dir,options.fudge,
options.only_subs,options.model,options.ploidy)
<file_sep>/tools/wgfast_prep.py
#!/usr/bin/env python
"""from a NASP matrix, creates the tree
necessary for wg-fast to run"""
from optparse import OptionParser
import subprocess
import os
import sys
def test_file(option, opt_str, value, parser):
try:
with open(value): setattr(parser.values, option.dest, value)
except IOError:
print('%s file cannot be opened' % option)
sys.exit()
def test_models(option, opt_str, value, parser):
if "GTRGAMMA" in value:
setattr(parser.values, option.dest, value)
elif "ASC_GTRGAMMA" in value:
setattr(parser.values, option.dest, value)
else:
print("substitution model is not supported")
sys.exit()
def get_field_index(matrix_in):
"""untested function"""
with open(matrix_in) as my_matrix:
firstLine = open(matrix_in).readline().rstrip()
first_fields = firstLine.split("\t")
last=first_fields.index("#SNPcall")
return last
def matrix_to_fasta(matrix_in, last):
"""converts a NASP matrix to fasta format.
Similar to tested function in main script,
but slightly different output"""
reduced = [ ]
out_fasta = open("all.fasta", "w")
with open(matrix_in) as my_matrix:
for line in my_matrix:
fields = line.split("\t")
reduced.append(fields[1:last])
test=map(list, zip(*reduced))
for x in test:
out_fasta.write(">"+str(x[0])+"\n")
out_fasta.write("".join(x[1:])+"\n")
out_fasta.close()
def main(matrix,model,processors,algorithm):
"""determines whether or not raxml is in your path"""
if algorithm == "raxml-ng":
ab = subprocess.call(['which', 'raxml-ng'])
if ab == 0:
pass
else:
print("RAxML must be in your path as raxml-ng")
sys.exit()
elif algorithm == "raxml-HPC":
ab = subprocess.call(['which', 'raxmlHPC-PTHREADS-SSE3'])
if ab == 0:
pass
else:
print("RAxML must be in your path as raxmlHPC-PTHREADS-SSE3")
sys.exit()
last=get_field_index(matrix)
matrix_to_fasta(matrix, last)
#Prep the creation of the FASTA file, removing odd characters
os.system("sed 's/://g' all.fasta | sed 's/,//g' > out.fasta")
if model == "ASC_GTRGAMMA":
subprocess.check_call("raxmlHPC-SSE3 -f d -p 12345 -m %s -s out.fasta -n nasp --asc-corr=lewis --no-bfgs > /dev/null 2>&1" % model, stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
subprocess.check_call("raxmlHPC-SSE3 -f e -m %s -s out.fasta -t RAxML_bestTree.nasp -n PARAMS --asc-corr=lewis --no-bfgs > /dev/null 2>&1" % model, stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
else:
if algorithm == "raxml-HPC":
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f d -p 12345 -m %s -s out.fasta -n nasp --no-bfgs > /dev/null 2>&1" % (processors,model), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
subprocess.check_call("raxmlHPC-PTHREADS-SSE3 -T %s -f e -m %s -s out.fasta -t RAxML_bestTree.nasp -n PARAMS --no-bfgs > /dev/null 2>&1" % (processors,model), stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
elif algorithm == "raxml-ng":
subprocess.check_call("raxml-ng --msa out.fasta --model GTR+G --threads %s --prefix nasp" % processors,stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
if algorithm == "raxml-HPC":
subprocess.check_call("mv RAxML_bestTree.nasp nasp_raxml.tree", shell=True)
subprocess.check_call("mv RAxML_binaryModelParameters.PARAMS nasp.PARAMS", shell=True)
subprocess.check_call("rm RAxML_* out.fasta all.fasta", shell=True)
else:
subprocess.check_call("mv nasp.raxml.bestTree nasp_raxml.tree", stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
subprocess.check_call("rm nasp.raxml.startTree out.fasta all.fasta", stdout=open(os.devnull, 'wb'),stderr=open(os.devnull, 'wb'),shell=True)
print("Model used: %s" % model)
if __name__ == "__main__":
usage="usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("-m", "--snp_matrix", dest="matrix",
help="path to NASP snp_matrix [REQUIRED]",
action="callback", callback=test_file, type="string")
parser.add_option("-o", "--model", dest="model",
help="model for RAxML, can be ASC_GTRGAMMA or GTRGAMMA [DEFAULT]",
action="callback", callback=test_models, type="string", default="GTRGAMMA")
parser.add_option("-p", "--processors", dest="processors",
help="number of processors to use with GTRGAMMA, defaults to 2",
action="store", type="int", default="2")
parser.add_option("-a", "--algorithm", dest="algorithm",
help="algorithm to use, either raxml-ng or raxml-HPC",
action="store", type="string", default="raxml-ng")
options, args = parser.parse_args()
mandatories = ["matrix"]
for m in mandatories:
if not options.__dict__[m]:
print("\nMust provide %s.\n" %m)
parser.print_help()
exit(-1)
main(options.matrix,options.model,options.processors,options.algorithm)
<file_sep>/wg_fast/__init__.py
#!/usr/bin/env python
__author__ = "<NAME>"
__credits__ = ["<NAME>"]
__license__ = "GPL"
__version__ = "3"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
__all__ = ['util']
| 774bb03111eb76abcfb7d726e1cfc7939ae1df1e | [
"Markdown",
"Python"
] | 9 | Markdown | jasonsahl/wgfast | 040e68652bfc67e0bb9409758e4c17d8862fd8e0 | 17e76c56cd50cd23ac9950c0dcab84b5493ac694 | |
refs/heads/master | <file_sep># Be sure to restart your server when you modify this file.
ScaffoldingTodo::Application.config.session_store :cookie_store, key: '_Scaffolding_Todo_session'
<file_sep>class UsersController < ApplicationController
before_filter :authenticate_user!
private
def correct_user
@user = User.find_by_id(params[:id])
redirect_to(root_path) unless current_user?(@user)
end
end<file_sep>module SessionsHelper1
def current_user?(user)
current_user==user
end
def correct_user(user_id)
@user = User.find(user_id)
redirect_to root_path unless current_user?(@user)
end
end<file_sep>json.extract! @todo, :id, :todo_item, :created_at, :updated_at
<file_sep>todoapp
=======
Intention:
- A user could sign up and sign in
- A user can cread a new ToDo
- A user can see only his own ToDos
- A user can mark a ToDo as "done"
Bonus:
- A user can see all his own todos and mark one as done on a single page by js/ajax
- There is ActiveAdmin as an admin Interface to manage all Todos of all users
Requirements:
- Devise should be use for loggin in and registration
- A User "A" can not see ToDos of user "B" | a3890f596f35882e4bdffbcd0856fbb13f22047e | [
"Markdown",
"Ruby"
] | 5 | Ruby | ninabreznik/todoapp | 08492d6955876ab19c49580a1d4882038008c5cd | 1d4e1c8f367124abfb0d29b28f7fcc4158401c8e | |
refs/heads/main | <repo_name>pypdeveloper/counter-app<file_sep>/src/App.js
import { useState } from "react";
import logo from "./logo.svg";
import "./App.css";
function App() {
const [counter, setCounter] = useState(0);
return (
<div className="App">
<img src={logo} className="App-logo" alt="logo" />
<h1>React</h1>
<p>{counter}</p>
<button className="button" onClick={() => setCounter(counter + 1)}>
Increase counter
</button>
<button className="button" onClick={() => setCounter(counter - counter)}>
Reset counter
</button>
</div>
);
}
export default App;
| 545b61c85c2f54ee99ec62042c326147dc4996fe | [
"JavaScript"
] | 1 | JavaScript | pypdeveloper/counter-app | a27debbbdd5e3169b80e8317fbf941f0e37b2a9e | 2dde2dbf0716265e3d83599eadaee88be4e21725 | |
refs/heads/main | <file_sep>const Engine = Matter.Engine;
const World = Matter.World;
const Bodies = Matter.Bodies;
const Constraint = Matter.Constraint;
var backImg;
var supHero,b1,b2,b3,b4,b5,b6,b7,b8,b9,b10,b11,b12,b13,b14,b15,b16,b17,b18,b19,b20,m1,m2;
function preload()
{
//preload the images here
backImg = loadImage("GamingBackground.png");
// m1Img = loadImage("Monster-01.png");
// m2Img = loadImage("Monster-02.png");
}
function setup()
{
createCanvas(3000, 800);
rectMode(CENTER);
engine = Engine.create();
world = engine.world;
// create sprites here
ground = new Ground(600,600,1200,20);
supHero = new Hero(200,500,150);
m1 = new Monster(500,500,50,50);
b1 = new Block(500,520,60,60);
//b1.addImage(m1Img);
b2 = new Block(500,535,60,60);
// b2.addImage(m1Img);
b3 = new Block(500,545,60,60);
// b3.addImage(m1Img);
b4 = new Block(500,555,60,60);
// b4.addImage(m1Img);
b5 = new Block(500,565,60,60);
// b5.addImage(m1Img);
b6 = new Block(500,575,60,60);
// b6.addImage(m1Img);
b7 = new Block(500,585,60,60);
// b7.addImage(m1Img);
b8 = new Block(515,520,60,60);
// b8.addImage(m1Img);
b9 = new Block(515,535,60,60);
// b9.addImage(m1Img);
b10 = new Block(515,545,60,60);
// b10.addImage(m1Img);
b11 = new Block(515,555,60,60);
// b11.addImage(m2Img);
b12 = new Block(515,565,60,60);
// b12.addImage(m2Img);
b13 = new Block(515,575,60,60);
// b13.addImage(m2Img);
b14 = new Block(515,585,60,60);
// b14.addImage(m2Img);
b15 = new Block(530,520,60,60);
// b15.addImage(m2Img);
b16 = new Block(530,535,60,60);
//b16.addImage(m2Img);
b17 = new Block(530,545,60,60);
// b17.addImage(m2Img);
b18 = new Block(530,555,60,60);
// b18.addImage(m2Img);
b19 = new Block(530,565,60,60);
// b19.addImage(m2Img);
b20 = new Block(530,575,60,60);
// b20.addImage(m2Img);
}
function draw()
{
background(backImg);
Engine.update(engine);
ground.display();
supHero.display();
m1.display();
b1.display();
b2.display();
b3.display();
b4.display();
b5.display();
b6.display();
b7.display();
b8.display();
b9.display();
b10.display();
b11.display();
b12.display();
b13.display();
b14.display();
b15.display();
b16.display();
b17.display();
b18.display();
b19.display();
b20.display();
}
function mouseDragged()
{
Matter.Body.setPosition(supHero.body, {x:mouseX, y:mouseY});
}
| c2861830139cdadbd329420b6138e1fbc67b3db8 | [
"JavaScript"
] | 1 | JavaScript | JVSRUTHVIK/PROJECT34 | 91320c76c8f061043e9336868de819526846ac86 | 18f8b25efc38eeabf4ddbd9917461a2aaa0d621b | |
refs/heads/master | <repo_name>moisesb/ImageLoader<file_sep>/app/src/main/java/br/com/moisesborges/imageloader/imagesearch/ImagesAdapter.java
package br.com.moisesborges.imageloader.imagesearch;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
import java.util.List;
import br.com.moisesborges.imageloader.R;
import br.com.moisesborges.imageloader.models.Image;
/**
* Created by moises.anjos on 15/12/2016.
*/
public class ImagesAdapter extends RecyclerView.Adapter<ImagesAdapter.ViewHolder> {
private List<Image> mImages = new ArrayList<>();
private Picasso mPicasso;
public ImagesAdapter(Picasso picasso) {
mPicasso = picasso;
}
public void clearImages() {
mImages.clear();
notifyDataSetChanged();
}
public void addImage(@NonNull Image image) {
mImages.add(image);
notifyItemInserted(mImages.size() - 1);
}
public boolean isEmpty() {
return mImages.size() == 0;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View layout = LayoutInflater.from(parent.getContext())
.inflate(R.layout.image_view_group_item, parent, false);
return new ViewHolder(layout);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
final Image image = mImages.get(position);
mPicasso.load(image.getThumbnailUrl())
.fit()
.centerCrop()
.error(R.drawable.ic_image_broken_variant_grey600_36dp)
.into(holder.mImageView, new Callback() {
@Override
public void onSuccess() {
}
@Override
public void onError() {
Log.d("Adapter", "could not load " + image.getContentUrl());
}
});
}
@Override
public int getItemCount() {
return mImages.size();
}
public class ViewHolder extends RecyclerView.ViewHolder {
final ImageView mImageView;
public ViewHolder(View itemView) {
super(itemView);
mImageView = (ImageView) itemView.findViewById(R.id.image_view);
}
}
}
<file_sep>/app/src/main/java/br/com/moisesborges/imageloader/di/ServiceModule.java
package br.com.moisesborges.imageloader.di;
import javax.inject.Singleton;
import br.com.moisesborges.imageloader.network.BingImageApi;
import br.com.moisesborges.imageloader.network.PixabayApi;
import br.com.moisesborges.imageloader.services.ImageService;
import dagger.Module;
import dagger.Provides;
/**
* Created by moises.anjos on 14/12/2016.
*/
@Module
public class ServiceModule {
@Provides
@Singleton
public ImageService providesImageService(BingImageApi bingImageApi, PixabayApi pixabayImageService) {
return new ImageService(bingImageApi, pixabayImageService);
}
}
| 8064707ce95f723eaadeba38645914bfb83def0e | [
"Java"
] | 2 | Java | moisesb/ImageLoader | e57e4b1000733f96917b855aa3071209be1cef6b | 209e6b51a26ab72a2d145cd96792d094df8c6de9 | |
refs/heads/master | <file_sep># Chika-CA-SS
My Graduate Thesis project work co-operate with Chika Corporation 智家 (チカ).
📍 With 4 members (<NAME>, <NAME>, <NAME>, Hoai Phuc), Chika Smart Home Joint Stock Company - 智 家 (チ カ) specializes in developing solutions and devices for smart homes.
👉 Some product list:
- Infrared controller: CA - IRX.
- Touch switch: CA - SWW and CA - SWR
- Central control unit: CA - HC.
- Sensors system: CA - SS.


⚙️ My sample product's schematics - CA-SS 💎
















Author: <NAME>
<file_sep>#include <Arduino.h>
#include <ESP8266WiFi.h>
#include <PubSubClient.h>
#include <Ticker.h>
#include <ArduinoJson.h>
#include <SPI.h>
/*
****** From SS01 to MQTT ******
+ Topic MQTT: CA-SS01’s id.
+ Message format:
{"alert":boolean, "state":boolean}
****** From MQTT callback to SS01 ******
+ Topic MQTT: CA-SS01’s id.
+ Message format: 1/0
1: turn alert on and if state door == 1 --> turn on warning
0: turn alert off and dont care about state door --> just send to MQTT state of door
*/
#define ledR 16
#define ledB 5
#define btn_config 4
#define mc35 9 //SD2
#define btn_allow 14
#define alarm 12
#define alarmLed 13
uint32_t timer = 0;
uint16_t longPressTime = 6000;
boolean buttonActive = false;
boolean sensorVal = false;
boolean sensorLoop = false;
boolean initialSensor = false;
boolean allowAlarm = false;
boolean buttonValue = false;
boolean door = false;
boolean doorLoop = false;
const char *mqtt_server = "chika.gq";
const int mqtt_port = 2502;
const char *mqtt_user = "chika";
const char *mqtt_pass = "<PASSWORD>";
const char *SS01_id = "bc84a8b9-e11a-4cd1-bc5c-b6d957880cbe";
//const char *doorState = "CA-Security";
//const char *doorCommand = "CA-Security/control";
Ticker ticker;
WiFiClient esp;
PubSubClient client(esp);
void tick();
void tick2();
void exitSmartConfig();
boolean startSmartConfig();
void longPress();
void callback(char *topic, byte *payload, unsigned int length);
void reconnect();
boolean checkSensor();
void checkButton();
void tickAlarm();
void setup()
{
Serial.begin(115200);
WiFi.setAutoConnect(true); // auto connect when start
WiFi.setAutoReconnect(true); // auto reconnect the old WiFi when leaving internet
pinMode(ledR, OUTPUT); // led red set on
pinMode(ledB, OUTPUT); // led blue set on
pinMode(btn_config, INPUT); // btn_config is ready
pinMode(btn_allow, INPUT_PULLUP); // button allow alarm ready
pinMode(alarm, OUTPUT);
pinMode(alarmLed, OUTPUT);
ticker.attach(1, tick2); // initial led show up
Serial.println("Waiting for Internet ...");
uint16_t i = 0;
digitalWrite(alarm, LOW);
digitalWrite(alarmLed, LOW);
while (!WiFi.isConnected()) // check WiFi is connected
{
i++;
delay(100);
if (i >= 100) // timeout and break while loop
break;
}
if (!WiFi.isConnected()) // still not connected
{
startSmartConfig();
}
else
{
ticker.detach(); // shutdown ticker
digitalWrite(ledR, LOW); // show led
digitalWrite(ledB, HIGH);
Serial.println("WIFI CONNECTED");
Serial.println(WiFi.SSID());
Serial.print("IP: ");
Serial.println(WiFi.localIP());
}
delay(1000);
client.setServer(mqtt_server, mqtt_port);
client.setCallback(callback);
pinMode(mc35, INPUT_PULLUP); // sensor is ready
}
void loop()
{
longPress();
if (WiFi.status() == WL_CONNECTED)
{
digitalWrite(ledB, HIGH);
digitalWrite(ledR, LOW);
if (client.connected())
{
client.loop();
// do something here
checkButton(); // Turn on or off alert?!
door = checkSensor(); // Check state of door?!
Serial.print("Door state: ");
Serial.println(door);
if (allowAlarm)
{
digitalWrite(alarmLed, HIGH); //turn on led
if (door || doorLoop)
{
doorLoop = true;
digitalWrite(alarm, HIGH);
delay(200);
}
}
else
{
doorLoop = false;
ticker.detach();
digitalWrite(alarm, LOW);
digitalWrite(alarmLed, LOW); //turn off led
}
}
else
{
reconnect();
}
}
else
{
Serial.println("WiFi Connected Fail");
WiFi.reconnect();
digitalWrite(ledB, LOW);
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
delay(500);
}
delay(100);
}
void tickAlarm()
{
boolean state = digitalRead(alarm);
digitalWrite(alarm, !state);
}
boolean checkSensor()
{
String sendMQTT;
char payload_sendMQTT[300];
StaticJsonDocument<300> JsonDoc;
sensorVal = digitalRead(mc35);
if (sensorVal != sensorLoop)
{
if (sensorVal)
{
JsonDoc["alert"] = allowAlarm;
JsonDoc["state"] = true;
serializeJson(JsonDoc, sendMQTT);
sendMQTT.toCharArray(payload_sendMQTT, sendMQTT.length() + 1);
client.publish(SS01_id, payload_sendMQTT, true);
sensorLoop = sensorVal;
return true;
}
else
{
JsonDoc["alert"] = allowAlarm;
JsonDoc["state"] = false;
serializeJson(JsonDoc, sendMQTT);
sendMQTT.toCharArray(payload_sendMQTT, sendMQTT.length() + 1);
client.publish(SS01_id, payload_sendMQTT, true);
sensorLoop = sensorVal;
return false;
}
}
else
{
return sensorVal;
}
}
void checkButton()
{
buttonValue = digitalRead(btn_allow);
//Serial.println(buttonValue);
if (!buttonValue)
{
while (!buttonValue)
{
buttonValue = digitalRead(btn_allow);
delay(100);
}
allowAlarm = !allowAlarm;
String sendMQTT;
char payload_sendMQTT[300];
StaticJsonDocument<300> JsonDoc;
JsonDoc["state"] = door;
JsonDoc["alert"] = allowAlarm;
serializeJson(JsonDoc, sendMQTT);
sendMQTT.toCharArray(payload_sendMQTT, sendMQTT.length() + 1);
client.publish(SS01_id, payload_sendMQTT, true);
}
}
void reconnect()
{
Serial.println("Attempting MQTT connection ...");
String clientId = "ESP8266Client-testX";
clientId += String(random(0xffff), HEX);
if (client.connect(clientId.c_str(), mqtt_user, mqtt_pass))
{
Serial.println("connected");
client.subscribe(SS01_id);
}
else
{
Serial.print("MQTT Connected Fail, rc = ");
Serial.print(client.state());
Serial.println("try again in 5 seconds");
}
}
void callback(char *topic, byte *payload, unsigned int length)
{
Serial.print("Message arrived [");
Serial.print(topic);
Serial.print("] ");
String payload_toStr;
Serial.println(payload_toStr);
String mtopic = (String)topic;
for (uint16_t i = 0; i < length; i++)
{
payload_toStr += (char)payload[i];
}
Serial.println(payload_toStr);
if (String(topic).equals(SS01_id))
{
if (payload[0] == '1')
{
allowAlarm = true;
}
else if (payload[0] == '0')
{
allowAlarm = false;
}
}
}
void tick()
{
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
}
void tick2()
{
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
digitalWrite(ledB, !state);
}
void exitSmartConfig()
{
WiFi.stopSmartConfig();
ticker.detach();
digitalWrite(ledR, LOW);
digitalWrite(ledB, HIGH);
}
boolean startSmartConfig()
{
uint16_t t = 0;
Serial.println("On SmartConfig ");
WiFi.beginSmartConfig();
delay(500);
ticker.attach(0.1, tick);
while (WiFi.status() != WL_CONNECTED)
{
t++;
Serial.print(".");
delay(500);
if (t > 100)
{
Serial.println("Smart Config fail");
ticker.attach(0.5, tick);
delay(3000);
exitSmartConfig();
return false;
}
}
Serial.println("WiFi connected ");
Serial.print("IP :");
Serial.println(WiFi.localIP());
Serial.println(WiFi.SSID());
exitSmartConfig();
return true;
}
void longPress()
{
// Serial.println(digitalRead(btn_config));
if (digitalRead(btn_config) == HIGH)
{
if (buttonActive == false)
{
buttonActive = true;
timer = millis();
Serial.println(timer);
}
if (millis() - timer > longPressTime)
{
Serial.println("SmartConfig Start");
digitalWrite(ledB, LOW);
startSmartConfig();
}
}
else
{
buttonActive = false;
}
}<file_sep>#include <Arduino.h>
#include <MQ135.h>
#include <Adafruit_Sensor.h>
#include <DHT.h>
#include <SPI.h>
#include <RF24.h>
#define MQ135_pin A2
#define dht_pin A1
#define dht_type DHT11
#define ledR 3
#define ledG 5
#define ledB 6
#define CE 9
#define CSN 10
RF24 radio(CE, CSN);
MQ135 mq135_sensor = MQ135(MQ135_pin);
DHT dht(dht_pin, dht_type);
const uint64_t address = 1002502019005;
// const byte address[6] = "00008";
float sensorValue[3];
uint16_t timer = 0;
void showBlue()
{
analogWrite(ledR, 0);
analogWrite(ledG, 255);
analogWrite(ledB, 0);
for (int i = 0; i <= 255; i++)
{
analogWrite(ledB, i);
analogWrite(ledG, 255 - i);
delay(3);
}
}
void showRed()
{
analogWrite(ledR, 0);
analogWrite(ledG, 255);
analogWrite(ledB, 0);
for (int i = 0; i <= 255; i++)
{
analogWrite(ledR, i);
analogWrite(ledG, 255 - i);
delay(3);
}
}
void showRed2Green()
{
analogWrite(ledR, 255);
analogWrite(ledG, 0);
analogWrite(ledB, 0);
for (int i = 0; i <= 255; i++)
{
analogWrite(ledG, i);
analogWrite(ledR, 255 - i);
analogWrite(ledB, 0);
delay(3);
}
}
void showBlue2Green()
{
analogWrite(ledR, 0);
analogWrite(ledG, 0);
analogWrite(ledB, 255);
for (int i = 0; i <= 255; i++)
{
analogWrite(ledG, i);
analogWrite(ledB, 255 - i);
analogWrite(ledR, 0);
delay(3);
}
}
void setup()
{
Serial.begin(9600);
dht.begin();
pinMode(ledR, OUTPUT);
pinMode(ledG, OUTPUT);
pinMode(ledB, OUTPUT);
//=================RF=====================
SPI.begin();
radio.begin();
radio.setRetries(15, 15);
radio.setPALevel(RF24_PA_MAX);
radio.openWritingPipe(address);
//========================================
}
void loop()
{
float h = dht.readHumidity();
float t = dht.readTemperature();
String output;
output += F("temperature : ");
output += t;
output += F("\t");
output += F("humidity :");
output += h;
Serial.println(output);
float rzero = mq135_sensor.getRZero(); // Hệ số trung bình
float correctedRZero = mq135_sensor.getCorrectedRZero(t, h); // Hệ số từ nhiệt độ độ ẩm
float resistance = mq135_sensor.getResistance(); // khoảng cách tối đa
float ppm = mq135_sensor.getPPM(); // lấy giá trị ppm thường
float correctedPPM = mq135_sensor.getCorrectedPPM(t, h); // lấy giá trị ppm sau khi tính với nhiệt độ độ ẩm
Serial.print("MQ135 RZero: ");
Serial.print(rzero);
Serial.print("\t Corrected RZero: ");
Serial.print(correctedRZero);
Serial.print("\t Resistance: ");
Serial.print(resistance);
Serial.print("\t PPM: ");
Serial.print(ppm);
Serial.print("\t Corrected PPM: ");
Serial.print(correctedPPM);
Serial.println("ppm");
Serial.println();
sensorValue[0] = t;
sensorValue[1] = h;
sensorValue[2] = correctedPPM;
timer++;
if(timer > 80){
Serial.println("send ...");
radio.write(sensorValue, sizeof(sensorValue));
timer = 0;
delay(1000);
}
if (correctedPPM < 1.5)
{
if (!digitalRead(ledB))
{
radio.write(sensorValue, sizeof(sensorValue));
showBlue();
}
}
else if (correctedPPM >= 1.5 && correctedPPM < 4.2)
{
if (!digitalRead(ledG))
{
radio.write(sensorValue, sizeof(sensorValue));
if (!digitalRead(ledR))
showBlue2Green();
else if (!digitalRead(ledB))
showRed2Green();
}
}
else if (correctedPPM >= 4.2)
{
if (!digitalRead(ledR))
{
radio.write(sensorValue, sizeof(sensorValue));
showRed();
}
}
delay(100);
}
<file_sep>#include <Arduino.h>
#include <ESP8266WiFi.h>
#include <PubSubClient.h>
#include <Ticker.h>
#define ledR 16
#define ledB 5
#define btn_config 4
#define mc35 0
#define btn_allow 14
#define alarm 12
#define alarmLed 13
uint32_t timer = 0;
uint16_t longPressTime = 6000;
boolean buttonActive = false;
boolean sensorVal = false;
boolean sensorLoop = false;
boolean initialSensor = false;
boolean allowAlarm = false;
boolean buttonValue = false;
boolean door = false;
boolean doorLoop = false;
const char *mqtt_server = "chika.gq";
const int mqtt_port = 2502;
const char *mqtt_user = "chika";
const char *mqtt_pass = "<PASSWORD>";
const char *doorState = "CA-Security";
const char *doorCommand = "CA-Security/control";
Ticker ticker;
WiFiClient esp;
PubSubClient client(esp);
void tick();
void tick2();
void exitSmartConfig();
boolean startSmartConfig();
void longPress();
void callback(char *topic, byte *payload, unsigned int length);
void reconnect();
boolean checkSensor();
void checkButton();
void tickAlarm();
void setup()
{
Serial.begin(115200);
WiFi.setAutoConnect(true); // auto connect when start
WiFi.setAutoReconnect(true); // auto reconnect the old WiFi when leaving internet
pinMode(ledR, OUTPUT); // led red set on
pinMode(ledB, OUTPUT); // led blue set on
pinMode(btn_config, INPUT); // btn_config is ready
pinMode(mc35, INPUT_PULLUP); // sensor is ready
pinMode(btn_allow, INPUT_PULLUP); // button allow alarm ready
pinMode(alarm, OUTPUT);
pinMode(alarmLed, OUTPUT);
ticker.attach(1, tick2); // initial led show up
Serial.println("Waiting for Internet");
uint16_t i = 0;
digitalWrite(alarm, HIGH);
digitalWrite(alarmLed, HIGH);
while (!WiFi.isConnected()) // check WiFi is connected
{
i++;
delay(100);
if (i >= 100) // timeout and break while loop
break;
}
if (!WiFi.isConnected()) // still not connected
{
startSmartConfig(); // start Smartconfig
}
else
{
ticker.detach(); // shutdown ticker
digitalWrite(ledR, LOW); // show led
digitalWrite(ledB, HIGH);
Serial.println("WIFI CONNECTED");
Serial.println(WiFi.SSID());
Serial.print("IP: ");
Serial.println(WiFi.localIP());
}
client.setServer(mqtt_server, mqtt_port);
client.setCallback(callback);
}
void loop()
{
longPress();
if (WiFi.status() == WL_CONNECTED)
{
digitalWrite(ledB, HIGH);
digitalWrite(ledR, LOW);
if (client.connected())
{
client.loop();
// do something here
door = checkSensor();
checkButton();
if (allowAlarm)
{
digitalWrite(alarmLed, LOW); //turn on led
if (door || doorLoop)
{
doorLoop = true;
tickAlarm();
delay(200);
}
}
else
{
doorLoop = false;
ticker.detach();
digitalWrite(alarm, HIGH);
digitalWrite(alarmLed, HIGH); //turn off led
}
}
else
{
reconnect();
}
}
else
{
Serial.println("WiFi Connected Fail");
WiFi.reconnect();
digitalWrite(ledB, LOW);
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
delay(500);
}
delay(100);
}
void tickAlarm()
{
boolean state = digitalRead(alarm);
digitalWrite(alarm, !state);
}
boolean checkSensor()
{
sensorVal = digitalRead(mc35);
if (!initialSensor)
{
if (sensorVal)
{
sensorLoop = true;
client.publish(doorState, "OPEN");
return true;
}
else
{
sensorLoop = false;
client.publish(doorState, "CLOSE");
return false;
}
initialSensor = true;
}
if (sensorVal && !sensorLoop)
{
client.publish(doorState, "OPEN");
sensorLoop = true;
return true;
}
else if (!sensorVal && sensorLoop)
{
client.publish(doorState, "CLOSE");
sensorLoop = false;
return false;
}
}
void checkButton()
{
buttonValue = digitalRead(btn_allow);
Serial.println(buttonValue);
if (!buttonValue)
{
while (!buttonValue)
{
buttonValue = digitalRead(btn_allow);
delay(100);
}
allowAlarm = !allowAlarm;
}
}
void reconnect()
{
Serial.println("Attempting MQTT connection ...");
String clientId = "ESP8266Client-testX";
clientId += String(random(0xffff), HEX);
if (client.connect(clientId.c_str(), mqtt_user, mqtt_pass))
{
Serial.println("connected");
client.subscribe(doorCommand);
}
else
{
Serial.print("MQTT Connected Fail, rc = ");
Serial.print(client.state());
Serial.println("try again in 5 seconds");
}
}
void callback(char *topic, byte *payload, unsigned int length)
{
Serial.print("Message arrived [");
Serial.print(topic);
Serial.print("] ");
String data;
String mtopic = (String)topic;
for (uint16_t i = 0; i < length; i++)
{
data += (char)payload[i];
}
Serial.println(data);
if(topic == doorCommand){
if(data == "ON") allowAlarm = true;
else allowAlarm = false;
}
}
void tick()
{
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
}
void tick2()
{
boolean state = digitalRead(ledR);
digitalWrite(ledR, !state);
digitalWrite(ledB, !state);
}
void exitSmartConfig()
{
WiFi.stopSmartConfig();
ticker.detach();
digitalWrite(ledR, LOW);
digitalWrite(ledB, HIGH);
}
boolean startSmartConfig()
{
uint16_t t = 0;
Serial.println("On SmartConfig ");
WiFi.beginSmartConfig();
delay(500);
ticker.attach(0.1, tick);
while (WiFi.status() != WL_CONNECTED)
{
t++;
Serial.print(".");
delay(500);
if (t > 100)
{
Serial.println("Smart Config fail");
ticker.attach(0.5, tick);
delay(3000);
exitSmartConfig();
return false;
}
}
Serial.println("WiFi connected ");
Serial.print("IP :");
Serial.println(WiFi.localIP());
Serial.println(WiFi.SSID());
exitSmartConfig();
return true;
}
void longPress()
{
if (digitalRead(btn_config) == HIGH)
{
if (buttonActive == false)
{
buttonActive = true;
timer = millis();
Serial.println(timer);
}
if (millis() - timer > longPressTime)
{
Serial.println("SmartConfig Start");
digitalWrite(ledB, LOW);
startSmartConfig();
}
}
else
{
buttonActive = false;
}
}<file_sep>#include <Arduino.h>
#include <RF24.h>
#include <SPI.h>
#define PIR_sensor 3
#define control_Device 4
#define signal_Led 5
#define default_delayTime 3000
RF24 radio(9, 10); //nRF24L01 (CE,CSN) connections PIN
const uint64_t address = 1002502019004; //CA-SS02: 40 + Timestamp
uint16_t delayTime = default_delayTime;
uint32_t data[3]; //data[0]: ON/OFF - data[1]: new_delayTime - data[2]: state_Device
void setup()
{
SPI.begin();
Serial.println("\nCA-SS02 say hello to your home <3 ! ");
Serial.begin(9600);
pinMode(PIR_sensor, INPUT);
pinMode(control_Device, OUTPUT);
pinMode(signal_Led, OUTPUT);
radio.begin();
radio.setRetries(15, 15);
radio.setPALevel(RF24_PA_MAX);
}
void checkCommand()
{
radio.openReadingPipe(1, address);
radio.startListening();
if (radio.available())
{
boolean state = digitalRead(signal_Led);
memset(&data, ' ', sizeof(data));
radio.read(&data, sizeof(data));
delayTime = data[1];
Serial.println("__________________");
Serial.print("Mode: ");
Serial.println(data[0]);
Serial.print("New delay time: ");
Serial.println(data[1]);
Serial.print("State device: ");
Serial.println(data[2]);
digitalWrite(signal_Led, !state);
delay(500);
digitalWrite(signal_Led, state);
}
}
void sendData()
{
radio.stopListening();
radio.openWritingPipe(address);
radio.write(&data, sizeof(data));
Serial.print("Data sent - State of device: ");
Serial.println(data[2]);
}
void loop()
{
checkCommand();
if (data[0])
{
digitalWrite(signal_Led, HIGH);
checkCommand();
boolean check_PIRSensor = digitalRead(PIR_sensor);
if (check_PIRSensor)
{
digitalWrite(control_Device, HIGH);
data[2] = 1;
sendData();
while (check_PIRSensor)
{
check_PIRSensor = digitalRead(PIR_sensor);
checkCommand();
delay(50);
}
delay(delayTime);
}
else
{
digitalWrite(control_Device, LOW);
if (digitalRead(control_Device) != data[2])
{
data[2] = 0;
sendData();
}
}
checkCommand();
}
else
{
digitalWrite(control_Device, LOW);
digitalWrite(signal_Led, LOW);
if (digitalRead(control_Device) != data[2])
{
data[2] = 0;
sendData();
}
}
delay(100);
}<file_sep>#include <Arduino.h>
#include <SPI.h>
#include <RF24.h>
#define pinFlame A0
#define pinGas A1
#define CE 9
#define CSN 10
RF24 radio(CE, CSN);
const uint64_t address = 1002502019006;
float sensorValue[2];
float flame, gas;
uint16_t timer = 0;
void setup() {
Serial.begin(9600);
//=================RF=====================
SPI.begin();
radio.begin();
radio.setRetries(15, 15);
radio.setPALevel(RF24_PA_MAX);
radio.openWritingPipe(address);
//========================================
}
void loop() {
flame = analogRead(A0);
gas = analogRead(A1);
String output;
output += F("Flame : ");
output += flame;
output += F("\t");
output += F("gas :");
output += gas;
Serial.println(output);
sensorValue[0] = flame;
sensorValue[1] = gas;
timer++;
if(timer > 50){
Serial.println("send ...");
radio.write(sensorValue, sizeof(sensorValue));
timer = 0;
}
delay(100);
}<file_sep>#include <Arduino.h>
#include <RF24.h>
#include <SPI.h>
#include <ESP8266WiFi.h>
#include <PubSubClient.h>
#include <Ticker.h>
Ticker ticker;
RF24 radio(2, 15); //nRF24L01 (CE,CSN) connections PIN
const uint64_t address = 1002502019004; //CA-SS02: 40 + Timestamp
boolean smartConfigStart = false;
const char *ssid = "username wifi";
const char *password = "<PASSWORD>";
//data[0]: ON/OFF - data[1]: new_delayTime - data[2]: device_State
uint32_t data[3];
uint32_t data_SS02[3];
const int smartConfig_LED = 16;
//Topic: product_id/button_id char[10] = "l" / "O"
const char *CA_SS02_delayTime = "CA-SS02/delayTime";
const char *CA_SS02_deviceState = "CA-SS02/deviceState";
const char *CA_SS02_ONOFF = "CA-SS02/ONOFF";
//Config MQTT broker information:
const char *mqtt_server = "chika.gq";
const int mqtt_port = 2502;
const char *mqtt_user = "chika";
const char *mqtt_pass = "<PASSWORD>";
//Setup MQTT - Wifi ESP12F:
WiFiClient esp_12F;
PubSubClient client(esp_12F);
void setup_Wifi()
{
delay(100);
Serial.println();
Serial.print("Connecting to ... ");
Serial.println(ssid);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(".");
}
Serial.println("");
Serial.println("WiFi connected!");
Serial.println("IP address: ");
Serial.println(WiFi.localIP());
}
/*--------------NEEDED FUNCTIONS--------------*/
void blinking()
{
bool state = digitalRead(smartConfig_LED);
digitalWrite(smartConfig_LED, !state);
}
void exitSmartConfig()
{
WiFi.stopSmartConfig();
ticker.detach();
}
boolean startSmartConfig()
{
int t = 0;
Serial.println("Smart Config Start");
WiFi.beginSmartConfig();
delay(500);
ticker.attach(0.1, blinking);
while (WiFi.status() != WL_CONNECTED)
{
t++;
Serial.print(".");
delay(500);
if (t > 120)
{
Serial.println("Smart Config Fail");
smartConfigStart = false;
ticker.attach(0.5, blinking);
delay(3000);
exitSmartConfig();
return false;
}
}
smartConfigStart = true;
Serial.println("WIFI CONNECTED");
Serial.print("IP: ");
Serial.println(WiFi.localIP());
Serial.println(WiFi.SSID());
exitSmartConfig();
return true;
}
void reconnect_mqtt()
{
while (!client.connected())
{
Serial.print("Attempting MQTT connection...");
String clientId = "CA-SS02 - ";
clientId += String(random(0xffff), HEX);
Serial.println(clientId);
if (client.connect(clientId.c_str(), mqtt_user, mqtt_pass))
{
Serial.println("Connected");
client.subscribe(CA_SS02_delayTime);
client.subscribe(CA_SS02_ONOFF);
}
else
{
Serial.print("Failed, rc=");
Serial.print(client.state());
Serial.println("Try again in 1 second");
delay(1000);
}
}
}
void callback(char *topic, byte *payload, unsigned int length)
{
//Topic list test is the value of variables: CA_HC_SS02
Serial.print("Topic [");
Serial.print(topic);
Serial.print("]: ");
uint16_t calculate_delayTime = 0;
for (unsigned int i = 0; i < length; i++)
{
Serial.print((char)payload[i]);
calculate_delayTime += ((uint16_t)payload[i] - 48) * (uint16_t)(pow(10, length - 1));
}
if ((char)topic[10] == 'l')
{
data[1] = calculate_delayTime * 1000;
Serial.print("\n\nNew delay time send to CA-SS02: ");
Serial.println(data[1]);
radio.stopListening();
radio.openWritingPipe(address);
radio.write(&data, sizeof(data));
}
if ((char)topic[10] == 'O')
{
if ((char)payload[0] == '1')
{
data[0] = true;
}
else
{
data[0] = false;
}
radio.stopListening();
radio.openWritingPipe(address);
radio.write(&data, sizeof(data));
}
}
void setup()
{
SPI.begin();
Serial.begin(115200);
pinMode(smartConfig_LED, OUTPUT);
WiFi.setAutoConnect(true);
WiFi.setAutoReconnect(true);
WiFi.mode(WIFI_STA);
// setup_Wifi();
delay(6000);
if (WiFi.status() != WL_CONNECTED)
{
startSmartConfig();
}
radio.begin();
radio.setRetries(15, 15);
radio.setPALevel(RF24_PA_MAX);
Serial.println("WIFI CONNECTED");
Serial.println(WiFi.SSID());
Serial.print("IP: ");
Serial.println(WiFi.localIP());
Serial.println("\nCA-HC-SS02 say hello to your home <3 !");
Serial.println("Trying connect MQTT ...");
client.setServer(mqtt_server, mqtt_port);
client.setCallback(callback);
data[1] = 0;
}
void loop()
{
if (WiFi.status() == WL_CONNECTED)
{
if (!client.connected())
{
reconnect_mqtt();
}
else
client.loop();
}
radio.openReadingPipe(1, address);
radio.startListening();
if (radio.available())
{
memset(&data_SS02, ' ', sizeof(data_SS02));
radio.read(&data_SS02, sizeof(data_SS02));
data[2] = data_SS02[2];
Serial.println("__________________");
Serial.print("Mode: ");
Serial.println(data[0]);
Serial.print("New delay time: ");
Serial.println(data[1]);
Serial.print("State device: ");
Serial.println(data[2]);
if (data[2])
client.publish(CA_SS02_deviceState, "1", true);
else
client.publish(CA_SS02_deviceState, "0", true);
}
delay(100);
} | 687ce0f7f210c245738152a6395da963e666cd2f | [
"Markdown",
"C++"
] | 7 | Markdown | thanhtung2105/Chika-CA-SS | 9c8e70afb9d9e1b082f8301655b4fd9bc142f437 | b430dad2b2cc305621894109f94c8096a69e848c | |
refs/heads/master | <repo_name>bubdm/Broker<file_sep>/samples/Broker.Samples/Handlers/BaseHandler.cs
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Handlers
{
public abstract class BaseHandler : IHandle<GreetingMessage>
{
public abstract Task HandleAsync(GreetingMessage message);
}
}
<file_sep>/src/Broker/IPipeline.cs
using System;
using System.Threading.Tasks;
namespace Broker
{
public interface IPipeline<in TMessage>
{
Task ExecuteAsync(TMessage message, Func<Task> next);
}
public interface IPipeline<in TMessage, TResult>
{
Task<TResult> ExecuteAsync(TMessage message, Func<Task<TResult>> next);
}
}
<file_sep>/samples/Broker.Samples/Pipelines/GenericPipeline.cs
using System;
using System.Threading.Tasks;
namespace Broker.Samples.Pipelines
{
public class GenericPipeline<TMessage> : IPipeline<TMessage>
{
public async Task ExecuteAsync(TMessage message, Func<Task> next)
{
Console.WriteLine("Before generic");
await next().ConfigureAwait(false);
Console.WriteLine("After generic");
}
}
}
<file_sep>/samples/Broker.Samples/Messages/GreetingMessage.cs
namespace Broker.Samples.Messages
{
public class GreetingMessage : IAudit
{
public string Name { get; set; }
public string User { get; set; }
}
}
<file_sep>/samples/Broker.Samples/Registars/AutofacRegistar.cs
using Autofac;
using Broker.Extensions.Autofac.DependencyInjection;
using Broker.Samples.Messages;
using Broker.Samples.Pipelines;
namespace Broker.Samples.Registars
{
internal class AutofacRegistar : IRegistar
{
public IBroker Register()
{
var builder = new ContainerBuilder();
builder.AddBroker();
builder.RegisterGeneric(typeof(GenericPipeline<>)).As(typeof(IPipeline<>));
builder.RegisterType(typeof(GreetingPipeline)).As(typeof(IPipeline<GreetingMessage>));
builder.RegisterGeneric(typeof(GenericQueryPipeline<,>)).As(typeof(IPipeline<,>));
builder.RegisterType(typeof(GreetingQueryPipeline)).As(typeof(IPipeline<GreetingMessage, string>));
var container = builder.Build();
var broker = container.Resolve<IBroker>();
return broker;
}
}
}
<file_sep>/src/Broker.Extensions.Microsoft.DependencyInjection/ServiceFactory.cs
using System;
using System.Collections.Generic;
using Microsoft.Extensions.DependencyInjection;
namespace Broker.Extensions.Microsoft.DependencyInjection
{
internal class ServiceFactory : IServiceFactory
{
private readonly IServiceProvider _provider;
public ServiceFactory(IServiceProvider provider)
{
_provider = provider;
}
public T GetService<T>()
{
return _provider.GetService<T>();
}
public IEnumerable<T> GetServices<T>()
{
return _provider.GetServices<T>();
}
}
}
<file_sep>/src/Broker.Extensions.Autofac.DependencyInjection/ServiceFactory.cs
using System.Collections.Generic;
using Autofac;
namespace Broker.Extensions.Autofac.DependencyInjection
{
public class ServiceFactory : IServiceFactory
{
private readonly IComponentContext _context;
public ServiceFactory(IComponentContext context)
{
_context = context;
}
public T GetService<T>()
{
return _context.Resolve<T>();
}
public IEnumerable<T> GetServices<T>()
{
return _context.Resolve<IEnumerable<T>>();
}
}
}
<file_sep>/src/Broker.Extensions.Autofac.DependencyInjection/ContainerBuilderExtensions.cs
using Autofac;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
namespace Broker.Extensions.Autofac.DependencyInjection
{
public static class ContainerBuilderExtensions
{
public static ContainerBuilder AddBroker(this ContainerBuilder builder) =>
AddBrokerInternal(builder, AppDomain.CurrentDomain.GetAssemblies().Where(a => !a.IsDynamic));
public static ContainerBuilder AddBroker(this ContainerBuilder builder, params Assembly[] assemblies) =>
AddBrokerInternal(builder, assemblies.AsEnumerable());
public static ContainerBuilder AddBroker(this ContainerBuilder builder, IEnumerable<Assembly> assemblies) =>
AddBrokerInternal(builder, assemblies);
private static ContainerBuilder AddBrokerInternal(ContainerBuilder builder, IEnumerable<Assembly> assemblies)
{
var assembliesToScan = (assemblies as Assembly[] ?? assemblies).Distinct().ToArray();
builder.RegisterModule(new BrokerAutofacModule(assembliesToScan));
return builder;
}
}
}
<file_sep>/src/Broker/Broker.cs
using System;
using System.Linq;
using System.Threading.Tasks;
namespace Broker
{
public class Broker : IBroker
{
private readonly IServiceFactory _factory;
public Broker(IServiceFactory factory)
{
_factory = factory;
}
public async Task SendAsync<TMessage>(TMessage message)
{
if (message == null)
{
throw new ArgumentNullException(nameof(message));
}
var handler = _factory.GetService<IHandle<TMessage>>();
if (handler == null)
{
throw new InvalidOperationException($"Message {message.GetType()} has no handlers registered");
}
var pipelines = _factory.GetServices<IPipeline<TMessage>>();
Task HandlerAction() => handler.HandleAsync(message);
var runner = pipelines
.Reverse()
.Aggregate((Func<Task>) HandlerAction,
(next, pipeline) => () => pipeline.ExecuteAsync(message, next));
await runner().ConfigureAwait(false);
}
public async Task PublishAsync<TMessage>(TMessage message)
{
if (message == null)
{
throw new ArgumentNullException(nameof(message));
}
var handlers = _factory.GetServices<IHandle<TMessage>>();
var pipelines = _factory.GetServices<IPipeline<TMessage>>().Reverse().ToList();
foreach (var handler in handlers)
{
Task HandlerAction() => handler.HandleAsync(message);
var runner = pipelines
.Aggregate((Func<Task>) HandlerAction,
(next, pipeline) => () => pipeline.ExecuteAsync(message, next));
await runner().ConfigureAwait(false);
}
}
public async Task<TResult> SendAsync<TMessage, TResult>(TMessage message)
{
if (message == null)
{
throw new ArgumentNullException(nameof(message));
}
var handler = _factory.GetService<IHandle<TMessage, TResult>>();
if (handler == null)
{
throw new InvalidOperationException($"Message {message.GetType()} has no handlers registered");
}
var pipelines = _factory.GetServices<IPipeline<TMessage, TResult>>();
Task<TResult> HandlerAction() => handler.HandleAsync(message);
var runner = pipelines
.Reverse()
.Aggregate((Func<Task<TResult>>) HandlerAction,
(next, pipeline) => () => pipeline.ExecuteAsync(message, next));
return await runner().ConfigureAwait(false);
}
}
}
<file_sep>/samples/Broker.Samples/Handlers/DerivedHandler.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Handlers
{
public class DerivedHandler : BaseHandler
{
public override Task HandleAsync(GreetingMessage message)
{
Console.WriteLine("Derived");
return Task.CompletedTask;
}
}
}
<file_sep>/src/Broker/IServiceFactory.cs
using System.Collections.Generic;
namespace Broker
{
public interface IServiceFactory
{
T GetService<T>();
IEnumerable<T> GetServices<T>();
}
}
<file_sep>/samples/Broker.Samples/Program.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
using Broker.Samples.Registars;
namespace Broker.Samples
{
class Program
{
static async Task Main()
{
var registars = new IRegistar[]
{
new ServiceCollectionRegistar(),
new AutofacRegistar()
};
var greetingMessage = new GreetingMessage { Name = "World", User = "User" };
foreach (var registar in registars)
{
Console.WriteLine($"Running {registar.GetType().Name}");
Console.WriteLine();
var broker = registar.Register();
await broker.SendAsync(greetingMessage);
await broker.PublishAsync(greetingMessage);
await broker.SendAsync<IAudit>(greetingMessage);
var result = await broker.SendAsync<GreetingMessage, string>(greetingMessage);
Console.WriteLine(result);
Console.WriteLine();
Console.WriteLine();
}
Console.ReadKey();
}
}
}
<file_sep>/samples/Broker.Samples/Registars/IRegistar.cs
namespace Broker.Samples.Registars
{
internal interface IRegistar
{
IBroker Register();
}
}
<file_sep>/samples/Broker.Samples/Pipelines/GenericQueryPipeline.cs
using System;
using System.Threading.Tasks;
namespace Broker.Samples.Pipelines
{
public class GenericQueryPipeline<TMessage, TResult> : IPipeline<TMessage, TResult>
{
public async Task<TResult> ExecuteAsync(TMessage message, Func<Task<TResult>> next)
{
Console.WriteLine("Before generic");
var result = await next();
Console.WriteLine("After generic");
return result;
}
}
}
<file_sep>/samples/Broker.Samples/Registars/ServiceCollectionRegistar.cs
using Broker.Extensions.Microsoft.DependencyInjection;
using Broker.Samples.Messages;
using Broker.Samples.Pipelines;
using Microsoft.Extensions.DependencyInjection;
namespace Broker.Samples.Registars
{
internal class ServiceCollectionRegistar : IRegistar
{
public IBroker Register()
{
var services = new ServiceCollection();
services.AddBroker();
services.AddTransient(typeof(IPipeline<>), typeof(GenericPipeline<>));
services.AddTransient(typeof(IPipeline<GreetingMessage>), typeof(GreetingPipeline));
services.AddTransient(typeof(IPipeline<,>), typeof(GenericQueryPipeline<,>));
services.AddTransient(typeof(IPipeline<GreetingMessage, string>), typeof(GreetingQueryPipeline));
var provider = services.BuildServiceProvider();
var broker = provider.GetService<IBroker>();
return broker;
}
}
}
<file_sep>/src/Broker/IBroker.cs
using System.Threading.Tasks;
namespace Broker
{
public interface IBroker
{
Task SendAsync<TMessage>(TMessage message);
Task<TResult> SendAsync<TMessage, TResult>(TMessage message);
Task PublishAsync<TMessage>(TMessage message);
}
}
<file_sep>/samples/Broker.Samples/Handlers/SecondHandler.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Handlers
{
public class SecondHandler : IHandle<GreetingMessage>
{
public Task HandleAsync(GreetingMessage message)
{
Console.WriteLine("Second");
return Task.CompletedTask;
}
}
}
<file_sep>/src/Broker/IHandle.cs
using System.Threading.Tasks;
namespace Broker
{
public interface IHandle<in TMessage>
{
Task HandleAsync(TMessage message);
}
public interface IHandle<in TMessage, TResult>
{
Task<TResult> HandleAsync(TMessage message);
}
}
<file_sep>/samples/Broker.Samples/Handlers/AuditHandler.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Handlers
{
public class AuditHandler : IHandle<IAudit>
{
public Task HandleAsync(IAudit message)
{
Console.WriteLine($"User: {message.User}");
return Task.CompletedTask;
}
}
}
<file_sep>/samples/Broker.Samples/Pipelines/GreetingPipeline.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Pipelines
{
public class GreetingPipeline : IPipeline<GreetingMessage>
{
public async Task ExecuteAsync(GreetingMessage message, Func<Task> next)
{
Console.WriteLine("Before greeting");
await next().ConfigureAwait(false);
Console.WriteLine("After greeting");
}
}
}
<file_sep>/samples/Broker.Samples/Handlers/GreetingQueryHandler.cs
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Handlers
{
public class GreetingHandleHandler : IHandle<GreetingMessage, string>
{
public Task<string> HandleAsync(GreetingMessage message)
{
return Task.FromResult($"Hello, {message.Name}");
}
}
}
<file_sep>/samples/Broker.Samples/Pipelines/GreetingQueryPipeline.cs
using System;
using System.Threading.Tasks;
using Broker.Samples.Messages;
namespace Broker.Samples.Pipelines
{
public class GreetingQueryPipeline : IPipeline<GreetingMessage, string>
{
public async Task<string> ExecuteAsync(GreetingMessage message, Func<Task<string>> next)
{
Console.WriteLine("Before typed");
var result = await next();
result += "_Added";
Console.WriteLine(result);
Console.WriteLine("After typed");
return result;
}
}
}
<file_sep>/samples/Broker.Samples/Messages/IAudit.cs
namespace Broker.Samples.Messages
{
public interface IAudit
{
string User { get; set; }
}
}
<file_sep>/src/Broker.Extensions.Microsoft.DependencyInjection/ServiceCollectionExtensions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Microsoft.Extensions.DependencyInjection;
namespace Broker.Extensions.Microsoft.DependencyInjection
{
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddBroker(this IServiceCollection services) =>
services.AddBroker(AppDomain.CurrentDomain.GetAssemblies().Where(a => !a.IsDynamic));
public static IServiceCollection AddBroker(this IServiceCollection services, params Assembly[] assemblies) =>
services.AddBroker(assemblies.AsEnumerable());
public static IServiceCollection AddBroker(this IServiceCollection services, IEnumerable<Assembly> assemblies)
{
RegisterRequiredServices(services);
RegisterHandlers(services, assemblies);
return services;
}
private static void RegisterRequiredServices(IServiceCollection services)
{
services.AddScoped<IServiceFactory>(p => new ServiceFactory(p));
services.AddScoped<IBroker, Broker>();
}
private static void RegisterHandlers(IServiceCollection services, IEnumerable<Assembly> assemblies)
{
assemblies = (assemblies as Assembly[] ?? assemblies).Distinct().ToArray();
var handlerTypes = new[] { typeof(IHandle<>), typeof(IHandle<,>) };
var descriptors =
from a in assemblies
from t in a.DefinedTypes
where t.IsClass && !t.IsAbstract
from i in t.ImplementedInterfaces
where i.IsGenericType && handlerTypes.Contains(i.GetGenericTypeDefinition())
select new { ServiceType = i, ImplementationType = t };
foreach (var descriptor in descriptors)
{
services.AddTransient(descriptor.ServiceType, descriptor.ImplementationType);
}
}
}
}
<file_sep>/src/Broker.Extensions.Autofac.DependencyInjection/BrokerAutofacModule.cs
using System.Reflection;
using Autofac;
using Module = Autofac.Module;
namespace Broker.Extensions.Autofac.DependencyInjection
{
public class BrokerAutofacModule : Module
{
private readonly Assembly[] _assemblies;
public BrokerAutofacModule(Assembly[] assemblies)
{
_assemblies = assemblies;
}
protected override void Load(ContainerBuilder builder)
{
RegisterRequiredServices(builder);
RegisterHandlers(builder, _assemblies);
}
private void RegisterRequiredServices(ContainerBuilder builder)
{
builder.RegisterType<Broker>().As<IBroker>().InstancePerLifetimeScope();
builder.RegisterType<ServiceFactory>().As<IServiceFactory>().InstancePerLifetimeScope();
}
private void RegisterHandlers(ContainerBuilder builder, Assembly[] assemblies)
{
var handlerTypes = new[] { typeof(IHandle<>), typeof(IHandle<,>) };
foreach (var handlerType in handlerTypes)
{
builder.RegisterAssemblyTypes(assemblies)
.AsClosedTypesOf(handlerType)
.AsImplementedInterfaces();
}
}
}
}
| 18c0252ca78d34da87110acadf3d591a66412122 | [
"C#"
] | 25 | C# | bubdm/Broker | 29fc94d741c299567c58e8aaab9c3246cbb908a2 | aec48e0c549fef45da5c44a96273f90c0e1fc4ea | |
refs/heads/master | <repo_name>Dreaded-Gnu/playground<file_sep>/phaser/iso tmx map/assets/javascript/state/login.js
var client = client || {};
client.state = client.state || {};
client.state.login = function ( game ) {
this.game = game;
this.map = null;
this.tileset = null;
this.groups = [];
this.cursors = null;
};
client.state.login.prototype = {
preload: function() {
// load map file from server
this.game.load.json( "map", "assets/map/test/test.json" );
this.game.load.spritesheet( "tiles", "assets/map/test/test.png", 64, 64, 24 );
},
create: function() {
var layer, tmp, currentLayerData, xIndex, yIndex, tilesetIndex,
foundTilesetIndex, tileIndex, tile, destX, destY, tileset,
maxX, maxY, minX, minY;
// cursor handling
this.cursors = this.game.input.keyboard.createCursorKeys();
// cache map data
this.map = this.game.cache.getJSON( "map" );
// determine max x and y for set of bounds
maxX = this.map.width * this.map.tilewidth - this.map.tilewidth * .75;
maxY = this.map.height * this.map.tileheight / 2 - this.map.tileheight * 2 * .75;
if ( "isometric" === this.map.orientation ) {
maxX += this.map.tilewidth * .5;
maxY = this.map.height * this.map.tileheight - this.map.tileheight * .25;
}
// determine min x and y
minX = 0;
minY = 0;
// handle diamond isometric special calculation
if ( "isometric" === this.map.orientation ) {
minX = -1 * maxX / 2 - this.map.tilewidth * .25;
minY = -1 * this.map.tileheight * 1.25;
}
// set bounds
this.game.world.setBounds( minX, minY, maxX, maxY );
// handle special centering for diamond isometric cards
if ( "isometric" === this.map.orientation ) {
this.game.camera.x = minX / 2;
this.game.camera.y = -1 * minY;
} else {
// center camera
this.game.camera.x = this.game.world.width / 2 - this.game.width / 2;
this.game.camera.y = this.game.world.height / 2 - this.game.height / 2;
}
// cache tileset
this.tileset = this.map.tilesets;
// loop through map for rendering
for ( layer = 0; layer < this.map.layers.length; ++layer ) {
// skip no tile layers
if ( this.map.layers[ layer ].type !== "tilelayer" ) {
continue;
}
// create new group for layer
tmp = this.game.add.group();
// cache current layer
currentLayerData = this.map.layers[ layer ].data;
// loop through map height
for ( yIndex = 0; yIndex < this.map.height; ++yIndex ) {
// loop through map width
for ( xIndex = 0; xIndex < this.map.width; ++xIndex ) {
// default no tileset found
foundTilesetIndex = null;
// get tile id
tileIndex = currentLayerData[ yIndex * this.map.width + xIndex ];
// determine tileset
for ( tilesetIndex = 0 ; tilesetIndex < this.tileset.length; ++tilesetIndex ) {
// tileset matching? cache it, and check following tilesets
if ( tileIndex >= this.tileset[ tilesetIndex ].firstgid ) {
foundTilesetIndex = tilesetIndex;
}
}
// no tileset found? => skip
if ( null === foundTilesetIndex ) {
console.log( "Skipping (" + xIndex + ", " + yIndex + ")" );
continue;
}
// cache found tileset
tileset = this.tileset[ foundTilesetIndex ];
if ( this.map.orientation === "isometric" ) {
// calculate destination coordinates
destX = ( xIndex - yIndex ) * ( tileset.tilewidth / 2 );
destY = ( xIndex + yIndex ) * ( tileset.tileheight / 2 );
}
// determine destination coordinates for staggered iso maps
else if ( this.map.orientation === "staggered" ) {
// default x and y position
destX = xIndex * tileset.tilewidth;
destY = yIndex * tileset.tileheight;
// consider per line adjustment at staggered iso maps depending on staggering axis and index
if ( "y" === this.map.staggeraxis ) {
destY = parseInt( destY / 2, 10 );
if ( "odd" === this.map.staggerindex ) {
destX += parseInt( ( yIndex & 1 ) * parseInt( tileset.tilewidth / 2, 10 ), 10 );
} else if ( "even" === this.map.staggerindex ) {
destX -= parseInt( ( yIndex & 1 ) * parseInt( tileset.tilewidth / 2, 10 ), 10 );
}
} else if ( "x" === this.map.staggeraxis ) {
destX = parseInt( destX / 2, 10 );
if ( "odd" === this.map.staggerindex ) {
destY += parseInt( ( xIndex & 1 ) * parseInt( tileset.tileheight / 2, 10 ), 10 );
} else if ( "even" === this.map.staggerindex ) {
destY -= parseInt( ( xIndex & 1 ) * parseInt( tileset.tileheight / 2, 10 ), 10 );
}
}
}
// consider difference for y, when tileset tile height is greater than map tile height
if ( tileset.tileheight > this.map.tileheight )
{
destY /= ( tileset.tileheight / this.map.tileheight );
}
// consider difference for X, when tileset tile width is greater than map tile width
if ( tileset.tilewidth > this.map.tilewidth )
{
destX /= ( tileset.tilewidth / this.map.tilewidth );
}
// consider rendering offsets
if ( "undefined" !== typeof tileset.tileoffset )
{
destX -= tileset.tileoffset.x;
destY -= tileset.tileoffset.y;
}
// add sprite to group
tile = this.game.add.sprite( destX, destY, "tiles", tileIndex - 1, tmp );
tile.anchor.set( .75, .75 );
}
}
// push back group
this.groups.push( tmp );
}
},
update: function () {
if ( this.cursors.up.isDown )
{
this.game.camera.y -= 4;
}
else if ( this.cursors.down.isDown )
{
this.game.camera.y += 4;
}
if ( this.cursors.left.isDown )
{
this.game.camera.x -= 4;
}
else if ( this.cursors.right.isDown )
{
this.game.camera.x += 4;
}
},
render: function () {
this.game.debug.text( this.game.time.fps || "--", 2, 14, "#a7aebe" );
},
shutdown: function() {
}
};
<file_sep>/phaser/iso tmx map/assets/javascript/state/boot.js
var client = client || {};
client.state = client.state || {};
client.state.boot = function ( game ) {
this.game = game;
};
client.state.boot.prototype = {
create: function() {
// set advanced timing
this.game.time.advancedTiming = true;
// kickstart game with login state
this.game.state.start( 'login' );
},
};
| 5221b70a96e14b58cfaf50ffa31ee032edde8d85 | [
"JavaScript"
] | 2 | JavaScript | Dreaded-Gnu/playground | b7a2998c8f1f367f1e9508010571edf3a725c472 | 4e3ba59ac27a956143ad3892892304711fc00f5e | |
refs/heads/master | <repo_name>nycjv321/reflection<file_sep>/src/test/java/com/nycjv321/reflection/MessageTest.java
package com.nycjv321.reflection;
import com.nycjv321.reflection.exceptions.MessageMissingException;
import org.testng.annotations.Test;
import javax.jws.WebParam;
import static org.testng.Assert.*;
/**
* Created by <NAME> <<EMAIL>> on 12/9/16.
*/
public class MessageTest {
private final Receiver<TestClass> receiver = Receiver.of(TestClass.class);
private Message message;
@Test(dependsOnGroups = "receiver", groups = "messages")
public void on() {
message = receiver.on("stuff");
assertNotNull(message);
}
@Test(dependsOnMethods = "on", dependsOnGroups = "receiver", groups = "messages", expectedExceptions = MessageMissingException.class)
public void onNull() {
receiver.on("Meep!");
}
@Test(dependsOnMethods = "on", dependsOnGroups = "receiver", groups = "messages", expectedExceptions = MessageMissingException.class)
public void onNullWithArguments() {
receiver.on("Meep!", boolean.class);
}
@Test(dependsOnMethods = "on", dependsOnGroups = "receiver", groups = "messages")
public void annotation() {
assertFalse(
message
.get(Test.class)
.enabled()
);
}
@Test(dependsOnMethods = "on", dependsOnGroups = "receiver", groups = "messages")
public void isFinal() {
assertTrue(
Receiver
.of(TestClass.class)
.isFinal("stuff",String.class)
);
}
/**
* Once we know {@code Receiver#on} works on methods without arguments,
* let's update our test message to represent messages that include arguments
* (∩`-´)⊃━☆゚.*・。゚
*/
@Test(dependsOnMethods = "on", dependsOnGroups = "receiver", groups = "messages")
public void onWithArgument() {
message = Receiver
.of(TestClass.class)
.on("stuff", String.class);
assertNotNull(message);
}
@Test(dependsOnMethods = "onWithArgument", dependsOnGroups = "receiver", groups = "messages")
public void argumentAnnotation() {
assertTrue(
message
.argumentAnnotation(WebParam.class)
.header()
);
}
@Test(dependsOnMethods = "onWithArgument", dependsOnGroups = "receiver", groups = "messages")
public void argumentAnnotationPredicate() {
assertNull(message.argumentAnnotation(WebParam.class, webParam -> !webParam.header()));
}
@Test(dependsOnMethods = "onWithArgument", dependsOnGroups = "receiver", groups = "messages")
public void argument() {
assertNotNull(message.criterion("arg"));
}
}<file_sep>/README.md
# Reflection
This project provides a set of fluent APIs for performing reflection.
## About
This project aims to provide a straight forward, human friendly api to perform reflective operations.
The goal of this project is to provide the necessary support to test annotation configuration.
Keeping this in mind, you may notice the current API focuses around Class, Method, and Parameter annotations.
## Dependencies
- `com.intellij.annotations` - used to establish potential nullity
- `Java 8`
## Example Usage
Take a look at the unit tests. Current coverage is at `100%`!
If you're too impatient to look at the unit tests (shame on you), See below:
// Extract the "arg" parameter's WebParam annotation.
// This metadata is found on the TestClass#stuff(String arg) method.
Receiver
.of(TestClass.class)
.on("stuff", String.class)
.criterion("arg").get(WebParam.class);
### Domain Language
In this API, abstractions of classes (T) are referred to Receivers of type T.
new Receiver<T>(clazz); // class representing a definition of T
Methods and their parameters are referred to as messages. To better understand methods as messages, take a look [here](https://en.wikipedia.org/wiki/Object-oriented_programming#Dynamic_dispatch.2Fmessage_passing).
Method parameters are referred to as "criterion". If this code is compiled using Java 8 and includes the `-parameters` argument, you can refer to parameters by name. See first example.
Alternatively, you may "search" for arguments. For example:
Receiver.of(TestClass.class)
.on("stuff", String.class)
.argumentAnnotation(WebParam.class, webParam -> !webParam.header());
In the above example, we search for an argument's WebParam annotation where the WebParam#header() is false.
The `Predicate` is an optional argument.
<file_sep>/src/main/java/com/nycjv321/reflection/Criterion.java
package com.nycjv321.reflection;
import org.jetbrains.annotations.Nullable;
import java.lang.annotation.Annotation;
import java.lang.reflect.Parameter;
/**
* Created by <NAME> <<EMAIL>> on 12/9/16.
*/
public final class Criterion implements Describable {
private final Parameter parameter;
Criterion(Parameter parameter) {
this.parameter = parameter;
}
@Override
@Nullable
public <T extends Annotation> T get(Class<T> annotation) {
return this.parameter.getAnnotation(annotation);
}
}
<file_sep>/src/main/java/com/nycjv321/reflection/Describable.java
package com.nycjv321.reflection;
import java.lang.annotation.Annotation;
/**
* Created by <NAME> <<EMAIL>> on 12/18/16.
*/
public interface Describable {
<T extends Annotation> T get(Class<T> annotation);
}
<file_sep>/src/test/java/com/nycjv321/reflection/ReceiverTest.java
package com.nycjv321.reflection;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
/**
* Created by <NAME> <<EMAIL>> on 12/9/16.
*/
public class ReceiverTest {
private Receiver<TestClass> receiver;
@Test(groups = "receiver")
public void of() throws Exception {
receiver = Receiver.of(TestClass.class);
assertNotNull(receiver);
}
@Test(dependsOnMethods = "of", groups = "receiver")
public void isFinal() throws Exception {
assertTrue(receiver.isFinal());
}
@Test(dependsOnMethods = "of", groups = "receiver")
public void get() throws Exception {
Test test = receiver.get(Test.class);
assertNotNull(test);
assertFalse(test.enabled());
}
@Test(dependsOnMethods = "get", groups = "receiver")
public void getNull() throws Exception {
assertNull(receiver.get(Deprecated.class));
}
} | 1e4b335f5d7100c2f18c7efd9aade2c3f78b53b8 | [
"Markdown",
"Java"
] | 5 | Java | nycjv321/reflection | 1802ea606971cfd8372a6d00f561acf9aa70097f | 16e8450451df12a7dbc52f4599ebd9a0595f4f32 | |
refs/heads/master | <file_sep>Pod::Spec.new do |s|
s.name = "CNLiveFrameworks"
s.version = "0.0.5"
s.summary = "CNLive-iOS SDK集合库"# 项目简介
s.homepage = "https://github.com/oldSixMrZhang/CNLiveFrameworks"
s.source = { :git => "https://github.com/oldSixMrZhang/CNLiveFrameworks.git", :tag => "#{s.version}" }
s.license = "MIT" # 开源证书
s.author = { "张旭" => "<EMAIL>" }
s.platform = :ios, "9.0" #平台及支持的最低版本
s.frameworks = "UIKit", "Foundation" #支持的框架
s.subspec 'CNLiveBaseKit' do |sp|
sp.vendored_frameworks = 'BaseLayer/CNLiveBaseKit.framework'
sp.dependency 'MJExtension','~> 3.0.15.1'
end
s.subspec 'CNLiveStat' do |sp|
sp.vendored_frameworks = 'CNLiveSDKs/CNLiveStat.framework'
end
s.subspec 'CNLiveUserSystemSDK' do |sp|
sp.vendored_frameworks = 'CNLiveSDKs/CNLiveUserSystemSDK.framework'
end
s.subspec 'CNLiveMapKit' do |sp|
sp.vendored_frameworks = 'MapLayer/CNLiveMapKit.framework'
end
end
| 2284648613a9c1aae418a55909f393e3195f11c4 | [
"Ruby"
] | 1 | Ruby | dawanzi/CNLiveFrameworks | 2c886c2dd2649b0af596928dcb8ee95fdc2af60f | 4a570e0deb01ddf98219127931547bcf4be22a7a | |
refs/heads/master | <file_sep># javascript-problems
Simple toy problems for fun and profit in JavaScript.
Actually, there is no profit.
This repo is a collection of simple to interesting challenges (usually algorithmic) that I've come across.
They are solved in JavaScript with an attempt at functional or dynamic programming concepts.
Please feel free to use or study any of the material. I don't claim to be an expert by any means, but if any of this helps you it'd be awesome to know!
If you find an issue, feel free to make a pull request!
<file_sep>// NOTES: Standard Function Definition
// REQUIRE: lodash/underscore
// FUNCTION: Find all the peaks in a given array.
// INPUT: Array
// RETURN: index, peak/value within array
// EX: [2,5]
// ERROR: false
// TIME_COMPLEXITY: O(n)
var peakFinder = function(array) {
// instantiate an array as result
var result = [];
// iterate over input
_.each(array, function(val,key,col){
// check left and right neighbors
if(col[key+1] < val && col[key-1] < val) {
// add information to results array
result.push([key,val]);
}
});
// ternary check: if results array is not empty give result array, else give false
return result.length ? result : false;
};
// NOTES: Prototypical Inheritance
// REQUIRE: lodash/underscore
// FUNCTION: Find all the peaks in a given array.
// INPUT: context/this
// RETURN: index, peak/value within array
// EX: [2,5]
// ERROR: false
// TIME_COMPLEXITY: O(n)
Array.prototype.findPeaks = function() {
// instantiate an array as result
var result = [];
// iterate over input
_.each(this, function(val,key,col){
// check left and right neighbors
if(col[key+1] < val && col[key-1] < val) {
// add information to results array
result.push([key,val]);
}
});
// ternary check: if results array is not empty give result array, else give false
return result.length ? result : false;
}; | 23747619eb858a187d3db5260fc03721f35de881 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | jimmyhsu/javascript-problems | fd03d18051e30ec0bb56de4295e6687828ffad43 | 8d4e680996890fa3c3028d05ab869b0f1ae55b06 | |
refs/heads/master | <file_sep># --------------
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# Code starts here
df = pd.read_csv(path)
df['state']=df['state'].map(lambda x: x.lower())
df['total'] = df['Jan']+df['Feb']+df['Mar']
sum_row = df[['Jan','Feb','Mar','total']].sum()
df_final = df.append(sum_row,ignore_index=True)
df_final
# Code ends here
# --------------
import requests
# Code starts here
url = 'https://en.wikipedia.org/wiki/List_of_U.S._state_abbreviations'
response = requests.get(url)
df1 = pd.read_html(response.content)[0]
df1=df1.iloc[11:,:]
df1=df1.rename(columns=df1.iloc[0,:]).iloc[1:,:]
df1['United States of America'] = df1['United States of America'].str.strip()
# Code ends here
# --------------
df1['United States of America'] = df1['United States of America'].astype(str).apply(lambda x: x.lower())
df1['US'] = df1['US'].astype(str)
mapping=df1.set_index('United States of America')['US'].to_dict()
df_final.insert(6, 'abbr',value='Nan')
df_final['abbr']=df_final['state'].map(mapping)
# --------------
# Code stars here
#add the df name to both below
df_final.loc[(df_final.state=='mississipi'),'abbr']='MS'
df_final.loc[(df_final.state=='tenessee'),'abbr']='TN'
# Code ends here
# --------------
# Code starts here
df_sub=df_final[['abbr','Jan','Feb','Mar','total']].groupby('abbr').sum()
print(df_sub.shape)
formatted_df=df_sub.applymap(lambda x: '$'+str(x))
print(formatted_df.shape)
# Code ends here
# --------------
# Code starts here
sum_row=pd.DataFrame(df_final[['Jan','Feb','Mar','total']].sum())
df_sub_sum= sum_row.transpose()
df_sub_sum=df_sub_sum.applymap(lambda x: '$'+str(x))
final_table = df_sub_sum.append(formatted_df)
final_table.rename(columns={'0': 'Total'}, inplace=True)
# Code ends here
# --------------
# Code starts here
# df_sub['total']=df[['Jan','Feb','Mar']].sum(axis=1)
# df_sub
df_sub['total'].plot(kind='pie')
plt.show()
# Code ends here
| 58f97110c1c677c3ea0e3d5b15ed36277c30cb77 | [
"Python"
] | 1 | Python | VarunNangalia/reconcile-a-report-using-pandas | a9ccd3e89d74e8cdeaeff767a0936a9fb1186d44 | a639ba17ad746896babaaab2c416e53cb837acb9 | |
refs/heads/master | <file_sep><?php
$connect = mysqli_connect("localhost","root","","nextech");
if ($connect->connect_error) {
die("Connection failed: " . $connect->connect_error);
}
if ($_SERVER["REQUEST_METHOD"] == "POST") {
$fname=$_POST['firstname'];
$lname=$_POST['lastname'];
$s_email=$_POST['email'];
$c_number=$_POST['contactnumber'];
$r_number=$_POST['rollnumber'];
$dep=$_POST['department'];
$sem=$_POST['semester'];
$s_psd=md5($_POST['password']);
$sql = "INSERT INTO studentregistrationdetails(firstname, lastname, email, contactnumber, rollnumber, department, semester, password)
VALUES('$fname','$lname','$s_email','$c_number','$r_number','$dep','$sem','$s_psd')";
if ($connect->query($sql) === TRUE) {
echo "New record created successfully";
} else {
echo "Error: " . $sql . "<br>" . $connect->error;
}
}
$connect->close();
?>
<file_sep># frontend-and-backend
this repository contains code written from scratch at both server and user end
<file_sep><!DOCTYPE html>
<html>
<head>
<title>Devslopes - Learn to Code</title>
</head>
<body>
<h1>Welcome to Devslopes!</h1>
<h2>Want to learn to code?</h2>
<h3>Then you have come to the right place!</h3>
<h4>So get started now!</h4>
<p>If you want to learn to code then there is no better place than Devslopes.
Devslopes is the world's most <b>effective</b> and <i>affordable</i> platform to learn to code.</p>
<p>Also Join our email list to ge awesome and <em>FREE</em> stuff!</p>
</body>
</html> | 1ba8d68abcfd4f7884d6e32fc5ca8785d3c974b6 | [
"Markdown",
"HTML",
"PHP"
] | 3 | PHP | techemayo/frontend-and-backend | fc73bf08e4be346a1e5374d6361c006d116f4e76 | 581336c536bc607600b4c64958a8eb68f46860bc | |
refs/heads/main | <file_sep>package comprehensive;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
public class InputGrammarFile {
// Data Members
ArrayList<String> sentenceReps;
HashMap<String, ArrayList<String>> nonTerminals;
//MersenneTwisterFast twister;
// XORSRandom xrand;
Random rand;
// Constructor
// public GrammarFile(File file)
public InputGrammarFile(String filename)
{
//twister = new MersenneTwisterFast();
//twister.setSeed(System.currentTimeMillis());
//xrand = new XORSRandom(System.currentTimeMillis());
rand = new Random();
Object[] grammarData = GFileProcessor.ProcessGrammarFile(filename);
sentenceReps = ((ArrayList<String>) grammarData[0]);
nonTerminals = ((HashMap<String, ArrayList<String>>) grammarData[1]);
/* DEBUG */
// System.out.println("<Sentence Frmats>: " + grammarData[0]);
// System.out.println("<GrammarFile Constructor, before cast>: "+ grammarData[1] + "\n");
}
// Method Members
public ArrayList<String> sentenceFormats() {
return this.sentenceReps;
}
/* DEBUG METHOD */
protected void outputNonTerminalLists() {
Iterator it = this.nonTerminals.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pair = (Map.Entry) it.next();
System.out.println("[" + pair.getKey() + ", " + pair.getValue() + "]");
}
}
private int randomIndex(int highest) {
//return twister.nextInt(highest + 1);
// return Math.abs(this.xrand.nextInt()) % ((highest > 0) ? highest : 1);
return rand.nextInt(highest+1);
}
public String getRandomProductionRule() {
return sentenceReps.get(randomIndex(sentenceReps.size() - 1));
}
/*
* NOTE: RandomPhraseGenerator should loop, calling this method, until all
* Non-Terminals are replaced with terminals!
*/
/**
* Randomly indexes an ArrayList of Terminals associated with the Non-Terminal
* parameter as a key to our map that associates Non-Terminals and Terminals of
* a grammar input file.
*
* PUNCTUATION NOTE: Must/does handle any punctuation attached to an argument.
* For example, if this method got passed "<plea>," , it would trim off the ','
* and hold onto it, replace "<plea>", and then concatenate the ',' back onto
* the replacement.
*
* @param nonTerminalKey
* @return
*/
public String getRandomReplacement(String nonTerminalKey) {
// Check for punctuation
// String punctuation = nonTerminalPunctuation(nonTerminalKey);
// Remove the punctuation from the Non-Terminal (~ map key)
nonTerminalKey = nonTerminalKey.substring(0, nonTerminalKey.indexOf('>') + 1);
// Get the list of terminals for the Non-Terminal passed/found
ArrayList<String> values = this.nonTerminals.get(nonTerminalKey);
/* DEBUG */
// System.out.println("<NTK>: " + nonTerminalKey);
// Generate a random index within the bounds of that list
int randomIndex = 0;
// Is the Production-Rule being referenced?
if (nonTerminalKey.compareTo("<start>") == 0)
return this.getRandomProductionRule();
else
randomIndex = this.randomIndex(values.size() - 1);
// Return the randomly-indexed replacement
return values.get(randomIndex);
}
/**
* Pulls off any end to the Non-Terminal tag that is punctuation.
*
* @param nt
* @return
*/
protected String nonTerminalPunctuation(String nt) {
// Find the closing '>'
int closeIndex = nt.indexOf('>');
// Substring the end from the close
String str = nt.substring(closeIndex).trim();
return str;
}
}
| 8af1b9dab0712290391ed5f90dc68f7546469fbe | [
"Java"
] | 1 | Java | kirstenlspringer/sentenceGenerator | 25155c78d1064c980d12d7082a03aca3a2d762cd | 6b6826074c050e9b5ab267f95e3dbdd1cf8555c4 | |
refs/heads/master | <repo_name>vikramkumarofficial/PixelArtMaker<file_sep>/designs.js
//*********PIXEL ART MAKER*********
function makeGrid(row,col) {
//Making grid of rows and coloumns
let grid=" ";
for(let i=0;i<row;i++)
{grid+="<tr>";
for(j=0;j<col;j++)
{grid+="<td></td>";
}grid+="</tr>";
}document.getElementById('pixelCanvas').innerHTML=grid;
}
$(document.body).on('click','td',function(e){
//filling grid with colors
let color=document.getElementById('colorPicker').value;
$(e.target).css('background-color', color);
});
$("#submit").click(function(e){
e.preventDefault();
//picking Height and Width
let height=document.getElementById('inputHeight').value;
let width=document.getElementById('inputWidth').value;
makeGrid(height,width);
});
| 6aafeaa35d257910a7339687287c4f905bd4d4e1 | [
"JavaScript"
] | 1 | JavaScript | vikramkumarofficial/PixelArtMaker | 620f35c13a66c363cf6b96ceb669e679d0ab6036 | 6ffc804987a11d0cc578f096a4351f948f205926 | |
refs/heads/master | <repo_name>SherrybabyOne/Koa-learn<file_sep>/z.js
function takeLongTime() {
return new Promise(resolve => {
setTimeout(() => resolve('long time gone'), 1000)
})
}
async function test() {
const res = await takeLongTime()
console.log(res)
}
test()
new Promise(resolve => console.log('aaa')) | 3d073435ceff35d4563c09d2df6f640ea31f8901 | [
"JavaScript"
] | 1 | JavaScript | SherrybabyOne/Koa-learn | 5637b6dd9db5df999fcc02cafb103263163842e2 | 30c8c8b0522a9afd5fe25b1779716239dfe2af91 | |
refs/heads/master | <repo_name>carriemathieu/reverse-each-word-online-web-sp-000<file_sep>/reverse_each_word.rb
def reverse_each_word(string)
array = string.split
new_array = []
array.collect do |word|
opp = word.reverse
new_array << opp
end
return new_array.join(" ")
end | 4efcaea74b00b713ca6c498d9d9af210667147c3 | [
"Ruby"
] | 1 | Ruby | carriemathieu/reverse-each-word-online-web-sp-000 | be8f2cc9f0a977d08e28f6aaf3a3e5b0e597f4bb | 09d19ff6e99e79e84db1d7f7237ed731e3a1f7e3 | |
refs/heads/master | <repo_name>SamyCoenen/.NetEncryptCommWPF<file_sep>/BasicSec/BasicSec/ContactVerwijderen.xaml.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
namespace BasicSec
{
/// <summary>
/// Interaction logic for ContactVerwijderen.xaml
/// </summary>
public partial class ContactVerwijderen : Window
{
public List<string> contacten = new List<string>(); //lijst van alle zenders en ontvangers
public ContactVerwijderen()
{
ContactenUpdate();
InitializeComponent();
ButtonCheck();
listBoxContacten.ItemsSource = contacten;
}
private void buttonVerwijderen_Click(object sender, RoutedEventArgs e)
{
Directory.Delete(@".\Contacten\" + listBoxContacten.SelectedItem.ToString(), true);
contacten.Clear();
ContactenUpdate();
listBoxContacten.Items.Refresh();
ButtonCheck();
}
public void ContactenUpdate()
{
foreach (string naam in Directory.GetDirectories(@".\Contacten"))
{
contacten.Add(naam.Remove(0, 12));
}
}
public void ButtonCheck()
{
if (contacten.Count == 0)
{
buttonVerwijderen.IsEnabled = false;
}
else
{
buttonVerwijderen.IsEnabled = true;
listBoxContacten.SelectedIndex = 0;
}
}
}
}
<file_sep>/BasicSec/BasicSec/TcpServer.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Media;
using System.Windows.Threading;
namespace BasicSec
{
class TcpServer
{
private TcpListener _server;
private AutoResetEvent connectionWaitHandle = new AutoResetEvent(false);
private Boolean _isRunning;
private string path = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);
public TcpServer(int port,TextBox statusTextBox,TextBox textTextBox)
{
Console.WriteLine("i come here");
_server = new TcpListener(IPAddress.Any, port);
_server.Start();
_isRunning = true;
while (true)
{
//IAsyncResult result = _server.BeginAcceptTcpClient(HandleAsyncConnection, _server);
//connectionWaitHandle.WaitOne(); // Wait until a client has begun handling an event
//connectionWaitHandle.Reset(); // Reset wait handle or the loop goes as fast as it can (after first request)
TcpClient client = _server.AcceptTcpClient();
StreamReader sReader = new StreamReader(client.GetStream(), Encoding.UTF8);
String sData = null;
while (client.Connected)
{
// reads from stream
while (sReader.EndOfStream == false)
{
sData += sReader.ReadLine();
}
if (sData.Equals("De text hash is OK"))
{
Application.Current.Dispatcher.Invoke(
DispatcherPriority.Background,
new Action(() =>
// shows content on the console.
textTextBox.Text = File.ReadAllText(path+"\\CryptoSavedChatFile.txt")
));
}
Application.Current.Dispatcher.Invoke(
DispatcherPriority.Background,
new Action(() =>
// shows content on the console.
statusTextBox.Text = System.DateTime.Now +": "+ sData + Environment.NewLine + statusTextBox.Text
));
client.Close();
}
// to write something back.
// sWriter.WriteLine("Meaningfull things here");
// sWriter.Flush();
}
}
//public void HandleAsyncConnection(IAsyncResult obj)
//{
// // retrieve client from parameter passed to thread
// TcpClient client = _server.AcceptTcpClient();
// // sets two streams
// StreamReader sReader = new StreamReader(client.GetStream(), Encoding.UTF8);
// // you could use the NetworkStream to read and write,
// // but there is no forcing flush, even when requested
// Boolean bClientConnected = true;
// String sData = null;
// while (bClientConnected)
// {
// // reads from stream
// sData = sReader.ReadLine();
// // shows content on the console.
// ((MainWindow) System.Windows.Application.Current.MainWindow).listStatus.Text +=Environment.NewLine + new DateTime().TimeOfDay+sData;
// Console.WriteLine("Client > " + sData);
// // to write something back.
// // sWriter.WriteLine("Meaningfull things here");
// // sWriter.Flush();
// }
//}
}
}
<file_sep>/BasicSec/BasicSec/ContactToevoegen.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.IO;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
namespace BasicSec
{
/// <summary>
/// Interaction logic for ContactenToevoegen.xaml
/// </summary>
public partial class ContactenToevoegen : Window
{
public ContactenToevoegen()
{
InitializeComponent();
}
private void button_Click(object sender, RoutedEventArgs e)
{
string naam = textBoxNaam.Text;
string ip = textBoxIP.Text;
if (Directory.Exists(@".\Contacten\" + naam))
{
MessageBox.Show("Dit contact bestaat al");
}
else
{
Directory.CreateDirectory(@".\Contacten\" + naam);
using (StreamWriter outputFile = new StreamWriter(@".\Contacten\" + naam + @"\IP.txt"))
{
outputFile.WriteLine(ip);
MessageBox.Show("Dit contact is goed toegevoegd!");
}
}
}
}
}
<file_sep>/BasicSec/BasicSec/MainWindow.xaml.cs
using Microsoft.Win32;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Windows.Threading;
using MahApps.Metro.Controls;
namespace BasicSec
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : MetroWindow
{
private string filepath=null;
private List<string> contacten = new List<string>(); //lijst van alle zenders en ontvangers
private NetworkStream stream;
private byte[] buf;
public MainWindow()
{
InitializeComponent();
Thread thread = new Thread(() =>
{
TcpServer server = new TcpServer(8889, listStatus,textBoxBoodschap);
//System.Windows.Threading.Dispatcher.Run();
});
thread.Start();
if (!Directory.Exists(@".\Contacten"))
{
Directory.CreateDirectory(@".\Contacten");
}
ContactenUpdate();
listBoxZenders.ItemsSource = contacten;
listBoxZenders.SelectedIndex = 0;
//listBoxOntvangers.ItemsSource = contacten;
//listBoxOntvangers.SelectedIndex = 1;
// IPHostEntry ipHost = Dns.GetHostEntry(Dns.GetHostName());
}
//RadioButtons
//private void radioButton(object sender, RoutedEventArgs e)
//{
// if (radioButtonEncrypteren.IsChecked == true)
// {
// radioButtonDecrypteren.IsChecked = false;
// buttonEncrypterenDecrypteren.Content = "Encrypteren";
// labelHashCheck.Visibility = System.Windows.Visibility.Hidden;
// }
// else if (radioButtonDecrypteren.IsChecked == true)
// {
// radioButtonEncrypteren.IsChecked = false;
// buttonEncrypterenDecrypteren.Content = "Decrypteren";
// labelHashCheck.Visibility = System.Windows.Visibility.Visible;
// }
//}
//encrypteren en decrypteren
private void buttonSend_Click(object sender, RoutedEventArgs e)
{
// if (radioButtonDecrypteren.IsChecked == true)
// {
// if (File.Exists(@".\Boodschap.txt"))
// {
// File.Delete(@".\Boodschap.txt");
// }
// StreamWriter outputFile = new StreamWriter(@".\Boodschap.txt");
// outputFile.WriteLine(textBoxBoodschap.Text);
// outputFile.Close();
// outputFile.Dispose();
//string destinationip = GetIP(listBoxOntvangers.SelectedItem.ToString());
//string destinationusername = listBoxOntvangers.SelectedItem.ToString();
// NaarServerSturen(sourceip + ";" + destinationip + ";" + filelocation + ";" + sourceusername + ";" + destinationusername);
// }
// else if (radioButtonEncrypteren.IsChecked == true)
// {
// if (File.Exists(@".\Boodschap.txt"))
// {
// File.Delete(@".\Boodschap.txt");
// }
// StreamWriter outputFile = new StreamWriter(@".\Boodschap.txt");
// outputFile.WriteLine(textBoxBoodschap.Text);
// outputFile.Close();
// outputFile.Dispose();
// string sourceip = GetIP(listBoxZenders.SelectedItem.ToString());
// string destinationip = GetIP(listBoxOntvangers.SelectedItem.ToString());
// string filelocation = System.IO.Path.GetFullPath(@".\Boodschap.txt");
// string sourceusername = listBoxZenders.SelectedItem.ToString();
// string destinationusername = listBoxOntvangers.SelectedItem.ToString();
//string filelocation = System.IO.Path.GetFullPath(@".\Boodschap.txt");
//string destinationusername = listBoxZenders.SelectedItem.ToString();
string sourceip = GetIP(listBoxZenders.SelectedItem.ToString());
string textPath = System.IO.Path.GetFullPath("./CryptoSavedChatFile.txt");
if (textBoxBoodschap.IsEnabled == true)
{
File.WriteAllText(textPath, textBoxBoodschap.Text);
NaarServerSturen(sourceip + ";" + textPath);
}
else if (textBoxBoodschap.IsEnabled==false)
{
NaarServerSturen(sourceip +";" + filepath );
}
}
public string GetIP(string naam)
{
StreamReader reader = new StreamReader(@".\Contacten\" + naam + @"\IP.txt");
string ip = reader.ReadLine();
reader.Close();
reader.Dispose();
return ip;
}
public void NaarServerSturen(string text)
{
TcpClient client = new TcpClient("127.0.0.1", 8888); //connecteren met de server
stream = client.GetStream();
buf = Encoding.UTF8.GetBytes(text + "\n");
stream.Write(buf, 0, text.Length + 1);
stream.Close();
}
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
}
public void ContactenUpdate()
{
foreach (string naam in Directory.GetDirectories(@".\Contacten"))
{
contacten.Add(naam.Remove(0, 12));
}
buttonCheck();
}
public void buttonCheck()
{
if (contacten.Count >= 1)
{
listBoxZenders.SelectedIndex = 0;
//listBoxOntvangers.SelectedIndex = 1;
buttonEncrypterenDecrypteren.IsEnabled = true;
}
else buttonEncrypterenDecrypteren.IsEnabled = false;
}
void ContactenUpdaten(object sender, EventArgs e)
{
contacten.Clear();
ContactenUpdate();
listBoxZenders.Items.Refresh();
//listBoxOntvangers.Items.Refresh();
}
private void BoodschapOpenen_Click(object sender, RoutedEventArgs e)
{
OpenFileDialog openFileDialog1 = new OpenFileDialog();
//openFileDialog1.Filter = "txt files (*.txt)|*.txt";
openFileDialog1.Multiselect = false;
if (openFileDialog1.ShowDialog() == true)
{
filepath = openFileDialog1.FileName;
textBoxBoodschap.Text = filepath;
textBoxBoodschap.IsEnabled = false;
}
}
private void BoodschapOpslaan_Click(object sender, RoutedEventArgs e)
{
SaveFileDialog saveFileDialog = new SaveFileDialog();
saveFileDialog.Filter = "Text file (*.txt)|*.txt";
saveFileDialog.InitialDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments);
if (saveFileDialog.ShowDialog() == true)
File.WriteAllText(saveFileDialog.FileName, textBoxBoodschap.Text);
}
private void Afsluiten_Click(object sender, RoutedEventArgs e)
{
this.Close();
}
private void ContactToevoegen_Click(object sender, RoutedEventArgs e)
{
ContactenToevoegen nieuwContact = new ContactenToevoegen();
nieuwContact.Show();
nieuwContact.Closed += new EventHandler(ContactenUpdaten);
}
private void ContactVerwijderen_Click(object sender, RoutedEventArgs e)
{
if (contacten.Count > 0)
{
ContactVerwijderen wegContact = new ContactVerwijderen();
wegContact.Show();
wegContact.Closed += new EventHandler(ContactenUpdaten);
}
else MessageBox.Show("Maak eerst contacten aan");
}
private void MetroWindow_Closed(object sender, EventArgs e)
{
NaarServerSturen("EXIT");
Application.Current.Shutdown();
Environment.Exit(0);
}
}
}
| 8209b4d0e2f82d3855a3384bc525c35f56613b05 | [
"C#"
] | 4 | C# | SamyCoenen/.NetEncryptCommWPF | 998e7ab0176b797a922c1ed01271c967ddaef4ed | 1f48b3ed6864de06da5a771386eb571ff038b591 | |
refs/heads/main | <file_sep>import * as fbadmin from 'firebase-admin';
import * as functions from 'firebase-functions';
import * as fireorm from 'fireorm';
fbadmin.initializeApp(functions.config().firebase)
const firestore = fbadmin.firestore()
const storage = fbadmin.storage().bucket()
fireorm.initialize(firestore);
export const db = firestore;
export const fbStorage = storage
export const getRepository = fireorm.getRepository;
<file_sep>import { logger } from "../utilities/logger";
import * as querystring from "querystring";
import * as functions from "firebase-functions";
import * as crypto from "crypto";
export const verifyHmac = function (providedHmacString: string, queryObj: any): boolean {
const LOG = logger('shopify-verifyHmac');
const conf = functions.config().shopify;
const map = Object.assign({}, queryObj);
delete map["signature"];
delete map["hmac"];
const message = querystring.stringify(map);
const providedHmac = Buffer.from(providedHmacString, "utf-8");
const generatedHash = Buffer.from(
crypto
.createHmac("sha256", conf.api_secret)
.update(message)
.digest("hex"),
"utf-8"
);
let hashEquals = false;
// timingSafeEqual will prevent any timing attacks. Arguments must be buffers
try {
hashEquals = crypto.timingSafeEqual(generatedHash, providedHmac);
// timingSafeEqual will return an error if the input buffers are not the same length.
} catch (e) {
LOG.error(e);
hashEquals = false;
}
if (!hashEquals) {
LOG.warn(`HMAC validation failed - provided hmac: ${providedHmacString} - calculated: ${generatedHash}\n
API secret: ${conf.shopify.api_secret}\n
Message: ${message}`);
}
return hashEquals;
}<file_sep>import Shopify = require("shopify-api-node");
import { Store } from "../models/Store";
import { constants } from "./constants";
//this way we uniformize the Shopify version
export const getNewShopifyClient = function (mp: Store): Shopify {
if(mp.accessToken == null) {
throw new Error(`AccessToken for MP: ${mp.id} is null - aborting`);
}
return new Shopify({
shopName: mp.id,
accessToken: mp.accessToken,
apiVersion: constants.shopifyApiVersion
})
};
export const getNewShopifyClientWithToken = function (mp:string,token:string): Shopify {
return new Shopify({
shopName: mp,
accessToken: token,
apiVersion: constants.shopifyApiVersion
})
};
<file_sep>
import * as express from "express";
import { Store } from "../models/Store";
import { logger } from "../utilities/logger";
import * as functions from "firebase-functions";
import { verifyHmac } from './verifyHmac';
import * as uaParser from "useragent";
import { constants } from "../utilities/constants";
export const shopifyLoginRequiredMiddleware = async function (
req: express.Request,
resp: express.Response,
next: Function
) {
const LOG = logger('shopifyLoginRequiredMiddleware');
const { shop, hmac } = req.query as any
const shopDomain = req.signedCookies ? req.signedCookies.shopifyStore : null
if (shopDomain != null && shop != shopDomain && hmac != null) {
const hashEquals = verifyHmac(hmac, req.query);
if (hashEquals) {
const mp = await Store.getRepository().findById(shop);
if (mp != null) {
innerLogin(mp, req, resp);
return next();
} else {
LOG.warn(`shop and valid hmac found - but shop wasn't found locally - redirecting to connect`)
return redirectToConnect(shop, resp);
}
}
}
if (shopDomain == null) {
return redirectToConnect(shop, resp);
}
try {
innerLogin(await getStorePartner(shopDomain), req, resp);
} catch (e) {
LOG.error(`Couldn't log in to ${shopDomain} redirecting to connect`, e);
return redirectToConnect(shop, resp);
}
return next();
};
const innerLogin = function (mp: Store, req: express.Request, resp: express.Response) {
//TODO - take care of the sameSite flag based on
// the UA on the request
applyLoginCookie(mp.id, req, resp);
resp.locals.SHOPIFY_API_KEY = functions.config().shopify.api_key;
resp.locals.SHOP_DOMAIN = mp.id;
resp.locals.mp = mp;
}
const redirectToConnect = function (shop: string | null, resp: express.Response): void {
const redirectUri =
shop != null
? `${constants.basePath}/shopify/connect?shop=${encodeURIComponent(
shop
)}`
: `${constants.basePath}/shopify/connect`;
return resp.redirect(redirectUri);
}
export async function getStorePartner(
shopDomain: string
): Promise<Store> {
// const LOG = logger('shopifyController#getMerchantPartner');
const repo = Store.getRepository();
let mp = await repo.findById(shopDomain);
if (mp == null) {
throw new Error(`Couldn't find MP for shopDomain: ${shopDomain}`);
}
return mp;
}
export const applyLoginCookie = function (mpId: String, req: express.Request, resp: express.Response) {
resp.cookie("shopifyStore", mpId, secureCookieOptions(req, resp));
}
export const secureCookieOptions = function (req: express.Request, resp: express.Response): express.CookieOptions {
let options: express.CookieOptions = {
signed: true,
secure: true,
};
if (_shouldApplySameSiteNone(req.headers["user-agent"] || "")) {
options = {
signed: true,
secure: true,
sameSite: 'none'
};
}
return options;
}
const _shouldApplySameSiteNone = function (uaString: string) {
const LOG = logger("_shouldApplySameSiteNone");
try {
const userAgent = uaParser.parse(uaString);
if (userAgent.family.toLowerCase().indexOf('chrom') != -1 && parseFloat(userAgent.major) < 67) {
return false;
}
const os = userAgent.os;
if (os.family.toLowerCase().indexOf("max os x") != -1 && os.major == "10" && os.minor == "14") {
return false;
}
if (os.family.toLowerCase().indexOf("ios") != -1 && os.major == "12") {
return false;
}
return true;
} catch (ex) {
LOG.error("caught error - returning should apply", ex)
return true;
}
}<file_sep>import { Collection , IEntity } from 'fireorm';
import {getRepository} from './../utilities/database';
@Collection()
export class Store implements IEntity {
id!: string;
shop?: string;
accessToken?: string | null;
shopifyScopes: string = "";
creationDate?: Date;
webhooksVerifiedAt?: Date | null;
disabled: boolean = false;
public static getRepository() {
return getRepository(Store);
}
public toString(): string {
return `MerchantPartner Id: ${this.id}`;
}
public constructor(init?: Partial<Store>) {
Object.assign(this, init);
}
}
<file_sep>import * as winston from "winston";
import {LoggingWinston} from "@google-cloud/logging-winston";
const format = winston.format;
const loggingWinston = new LoggingWinston({
level: 'debug'
});
export const logger = function (label: string): winston.Logger {
return winston.createLogger({
level: 'debug',
transports: [
new winston.transports.Console({
format: format.combine(
format.label({label: label, message:true}),
format.colorize(),
format.simple()
)
}),
loggingWinston
]
});
}
<file_sep>import { Store } from "../models/Store";
import { constants } from "./constants";
import { logger } from "./logger";
import { getNewShopifyClient } from "./shopifyClient";
export const addScriptTag = async function (store: Store) {
const LOG = logger('addScriptTag');
try {
const scriptUrl = `https://${constants.defaultRequestHost}${constants.basePath}/assets/script.js`
LOG.info(`ScriptUrl :${scriptUrl}`)
const shopify = getNewShopifyClient(store)
await shopify.scriptTag.create({
event: "onload",
src: scriptUrl
})
}
catch (e) {
LOG.error('Script tag not loaded')
LOG.error(JSON.stringify(e))
}
}<file_sep>import * as functions from 'firebase-functions';
import * as express from "express";
import { shopifyController } from './controllers/shopifyController';
import { testController } from './controllers/testController';
import { shopifyLoginRequiredMiddleware } from './middlewares/shopifyLoginMiddleware';
import exphbs = require("express-handlebars");
import { constants } from './utilities/constants';
const STATIC_PATH = "assets";
import cookieParser = require("cookie-parser");
const app = express();
app.use(cookieParser(constants.jwtSecret));
app.engine("handlebars", exphbs.create({
helpers:{
static: function (options: any) {
return `${constants.basePath}/${STATIC_PATH}/${options.fn()}`;
},
}
}).engine);
app.set("view engine", "handlebars");
app.use(express.json());
//serve static assets on dir assets
app.use(`/${STATIC_PATH}`, express.static("assets"));
export const indexGETRoute = app.get("/", shopifyLoginRequiredMiddleware, async (req, res) => {
res.render("home")
});
app.use("/test", testController);
app.use("/shopify", shopifyController);
exports.appv2 = functions.runWith({
memory: '256MB',
timeoutSeconds: 500,
}).https.onRequest(app);
<file_sep>import * as express from "express";
import { constants, getRandomArbitrary } from "../utilities/constants";
import * as functions from 'firebase-functions';
import {secureCookieOptions,applyLoginCookie} from "../middlewares/shopifyLoginMiddleware";
import {verifyHmac} from "../middlewares/verifyHmac";
import * as request from "request-promise-native";
import { Store } from "../models/Store";
import { addScriptTag } from "../utilities/scriptTag";
import { subscribeToWebhooks } from "../utilities/webhooks";
const router = express.Router();
export const shopifyController = router;
const SHOPIFY_SCOPES = "read_products,read_script_tags,write_script_tags";
//connect and install route
export const connectGETRoute = router.get("/connect", (req, res) => {
const shopDomain: string = req.query.shop as any
if (shopDomain) {
const state = getRandomArbitrary(8, 13);
const redirectUri = `${constants.schema}://${constants.defaultRequestHost}${constants.basePath}/shopify/callback`;
const shopUrl = shopDomain.endsWith("myshopify.com")
? shopDomain
: `${shopDomain}.myshopify.com`;
const installUri =
`${constants.schema}://${shopUrl}/admin/oauth/authorize?client_id=${
functions.config().shopify.api_key
}` +
`&scope=${SHOPIFY_SCOPES}` +
`&state=${state}` +
`&redirect_uri=${redirectUri}`;
res.cookie("state", state, secureCookieOptions(req, res));
return res.render('redirect', {
redirectUrl: installUri,
forceParentRedirect: true
});
}
return res.render("shopify/connect");
});
//verify connect and save details to db or update db
export const callbackGETRoute = router.get("/callback", (req, res) => {
const {shop, hmac, code, state} = req.query as any
const stateCookie = req.signedCookies.state;
const conf = functions.config().shopify;
if (state !== stateCookie) {
console.warn(`State: ${state} stateCookie: ${stateCookie}`);
return res.status(403).send("Request origin cannot be verified");
}
if (shop && hmac && code) {
const hashEquals = verifyHmac(hmac, req.query);
if (!hashEquals) {
return res.status(400).send("HMAC validation failed");
}
// DONE: Exchange temporary code for a permanent access token
const accessTokenRequestUrl =
"https://" + shop + "/admin/oauth/access_token";
const accessTokenPayload = {
client_id: conf.api_key,
client_secret: conf.api_secret,
code
};
return request
.post(accessTokenRequestUrl, {json: accessTokenPayload})
.then(accessTokenResponse => {
const mp = new Store({
id: shop,
accessToken: accessTokenResponse.access_token
});
const mpRepo = Store.getRepository();
return mpRepo.findById(shop).then(async existingMp => {
if (existingMp != null) {
existingMp.accessToken = accessTokenResponse.access_token;
await mpRepo.update(existingMp);
} else {
await mpRepo.create(mp);
await addScriptTag(mp)
await subscribeToWebhooks(mp)
}
applyLoginCookie(shop, req, res);
return res.redirect(`${constants.basePath}/`);
});
})
.catch(error => {
console.error(JSON.stringify(error));
return res.status(error.statusCode).send(error?.error?.error_description);
});
} else {
return res.status(400).send("Required parameters missing");
}
});
<file_sep>import * as express from "express";
import { constants } from "../utilities/constants";
import { getNewShopifyClientWithToken } from "../utilities/shopifyClient";
const router = express.Router();
export const scriptGetRoute = router.get(
"/script/:id",
async (req, res, next) => {
const id = req.params.id !== "1" ? req.params.id : null
const domain = ""
const token = ""
const shopify = getNewShopifyClientWithToken(domain, token)
id ? await shopify.scriptTag.delete(parseInt(id)) : ""
console.log(await shopify.scriptTag.list())
return res.sendStatus(200)
});
export const addScriptGetRoute = router.get(
"/add-script/",
async (req, res, next) => {
const domain = "aero-apps-sandbox.myshopify.com"
const token = "sh<PASSWORD> <KEY>"
const shopify = getNewShopifyClientWithToken(domain, token)
//create
const scriptUrl = `https://${constants.defaultRequestHost}${constants.basePath}/assets/script.js`
await shopify.scriptTag.create({
event: "onload",
src: scriptUrl
})
return res.sendStatus(200)
});
export const testController = router;
<file_sep>ngrok http -subdomain=suggestr-shopify 8080<file_sep>import { Store } from "../models/Store";
import { logger } from "./logger";
import { getNewShopifyClient } from "./shopifyClient";
export const subscribeToWebhooks = async function (store: Store) {
const LOG = logger('subscribeToWebhooks')
try {
LOG.info('Products subscribed')
const shopify = getNewShopifyClient(store)
await shopify.webhook.create({
address: "https://example.com",
topic: "products/update"
})
}
catch (e) {
LOG.error("Subsciption to webhooks unsucessfull")
LOG.error(JSON.stringify(e))
}
}<file_sep>
firebase functions:config:set shopify.api_key="" shopify.api_secret="" default.req_host=""
<file_sep>import * as express from "express";
import * as functions from "firebase-functions";
export const constants = {
basePath: process.env.NODEMON_DEBUG ? '' : '/appv2',
port: null,
shopifyApiVersion: '2019-07',
schema: process.env.FUNCTIONS_EMULATOR ? (process.env.MDEBUG_COMMAND ? "https" : "https") : "https",
project: process.env.GCLOUD_PROJECT || "",
allowedHosts: [
"localhost",
"suggestr-shopify.ngrok.io",
"us-central1-pohoda-sandbox-dev.cloudfunctions.net"
],
defaultRequestHost: process.env.RUNNING_SCRIPT ? "" : functions.config().default.req_host,
functionLocation: 'us-central1',
jwtSecret: process.env.JWT_SECRET || "different JWT asdl kjalkjklasd kjla123 123123 ",
};
export const requestHost = function (request: express.Request): string {
if (constants.allowedHosts.indexOf(request.hostname) == -1) {
throw new Error(`Host: ${request.hostname} not in allowedHosts`);
}
return request.hostname;
};
export const getRandomArbitrary = function (min: number, max: number) {
return Math.round(Math.random() * (max - min) + min);
};
| e17638cd0564a9c5060c954ed8a10c6347985361 | [
"TypeScript",
"Shell"
] | 14 | TypeScript | vimalgopal/sample-task | 81205fdaad7e4622e0ec0d40bf0de8922ecff623 | e7a7db08753bd3402902a02b7ee2bfd7766a67b0 | |
refs/heads/master | <repo_name>fdzr/test<file_sep>/sala_reuniones/widgets.py
from django import forms
from django.conf import settings
class DatePicker(forms.DateInput):
class Media:
css = {
'all': ('%scss/datepicker.min.css' % settings.STATIC_URL,)
}
js = ('%sjs/bootstrap-datepicker.min.js' % settings.STATIC_URL,
'%sjs/bootstrap-datepicker.es.js' % settings.STATIC_URL
)
class TimePicker(forms.TimeInput):
class Media:
css = {
'all': ('%scss/jquery-clockpicker.min.css' % settings.STATIC_URL,)
}
js = ('%sjs/jquery-clockpicker.min.js' % settings.STATIC_URL,)
<file_sep>/sala_reuniones/migrations/0001_initial.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2019-01-19 18:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='HorarioDisponibilidad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hora_inicio', models.TimeField(verbose_name='Hora de inicio')),
('hora_fin', models.TimeField(verbose_name='Hora de terminar')),
],
options={
'ordering': ['hora_inicio'],
'verbose_name_plural': 'Horario disponibilidades',
},
),
migrations.CreateModel(
name='Insumo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=30)),
],
options={
'ordering': ['nombre'],
},
),
migrations.CreateModel(
name='Reserva',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateField()),
('hora_inicio', models.TimeField(verbose_name='Hora inicial')),
('hora_final', models.TimeField(verbose_name='Hora final')),
('capacidad', models.IntegerField(verbose_name='Capacidad de personas')),
('confirmada', models.BooleanField(default=False)),
],
options={
'ordering': ['fecha'],
},
),
migrations.CreateModel(
name='SalaReuniones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=50)),
('ubicacion', models.CharField(max_length=100, verbose_name='Ubicaci\xf3n')),
('capacidad', models.IntegerField()),
('estado', models.CharField(choices=[('nd', 'No disponible'), ('d', 'Disponible'), ('r', 'Reservada'), ('c', 'Confirmada')], default='d', max_length=20)),
('horario_disponibilidad', models.ManyToManyField(to='sala_reuniones.HorarioDisponibilidad')),
('insumos', models.ManyToManyField(blank=True, default=['Proyector', 'Pizarr\xf3n'], to='sala_reuniones.Insumo')),
],
options={
'ordering': ['nombre'],
'verbose_name_plural': 'Sala de reuniones',
},
),
migrations.AddField(
model_name='reserva',
name='sala_reuniones',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sala_reuniones.SalaReuniones'),
),
migrations.AddField(
model_name='reserva',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
<file_sep>/sala_reuniones/templates/sala_reuniones/reserva_confirm_delete.html
{% extends "sala_reuniones/base.html" %}
{% block content %}
<div class="row">
<div class="panel panel-default">
<div class="panel-heading">Eliminar</div>
<div class="panel-body">
<form action="" method="POST">{% csrf_token %}
<p>Seguro q desea eliminar la reserva del salón
<span class="label label-warning">
<em>{{ object.sala_reuniones.nombre}}</em>
</span>
para el
<span class="label label-default">{{object.fecha}}</span>
de <span class="label label-default">{{object.hora_inicio}} - {{object.hora_final}}</span>?
</p>
<br>
<br>
<br>
<div class="form-group">
<button class="btn btn-block btn-danger" type="submit">Eliminar</button>
</div>
</form>
</div>
</div>
</div>
{% endblock content %}<file_sep>/sala_reuniones/tests/tests_views.py
# -*- encoding:utf-8 -*-
# -*- coding:utf-8 -*-
from datetime import datetime, date
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.urls import resolve
from django.contrib.auth.models import User
from sala_reuniones.views import HomeView, ListarReservasViews
from sala_reuniones.models import Insumo, HorarioDisponibilidad, SalaReuniones, Reserva
from sala_reuniones.forms import ReservaForm
class HomeViewTest(TestCase):
def test_home_view(self):
response = self.client.get(reverse('home'))
return self.assertEqual(200, response.status_code)
def test_home_view_with_class(self):
view = resolve('/')
return self.assertEqual(view.func.view_class, HomeView)
class RedirectViewTest(TestCase):
def setUp(self):
self.form_data = {'username': 'Frank', 'email': '<EMAIL>', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>'}
def test_no_users_yet(self):
self.assertQuerysetEqual([], User.objects.all())
def test_register_redirection_successful(self):
response = self.client.post(reverse('registrarse'), self.form_data)
self.assertEqual(302, response.status_code)
def test_register_new_user(self):
response = self.client.post(reverse('registrarse'), self.form_data)
self.assertTrue(User.objects.all())
def test_register_new_user_with_name(self):
response = self.client.post(reverse('registrarse'), self.form_data)
self.assertEqual('Frank', User.objects.filter(username='Frank').first().username)
class LoginViewTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='test', is_superuser=True)
self.user.set_password('<PASSWORD>')
self.user.save()
self.form_data = {'username': 'test', 'password': '<PASSWORD>'}
def test_login_user_redirect_home(self):
response = self.client.post(reverse('entrar'), self.form_data)
self.assertRedirects(response, reverse('home'))
class LogoutViewTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='test', is_superuser=True)
self.user.set_password('<PASSWORD>')
self.user.save()
self.client = Client()
self.client.login(username='test', password='<PASSWORD>')
def test_logout_with_status_code(self):
response = self.client.get(reverse('salir'))
self.assertEqual(302, response.status_code)
def test_logout_redirection_home(self):
response = self.client.get(reverse('salir'))
self.assertEqual(response.url, reverse('home'))
class AdicionarReservaViewTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='test', is_superuser=True)
self.user.set_password('<PASSWORD>')
self.user.save()
self.insumo = Insumo.objects.create(nombre=u'Pizarrón')
self.insumo2 = Insumo.objects.create(nombre='Proyector')
self.horario_disponibilidad1 = HorarioDisponibilidad.objects.create(hora_inicio='10:00', hora_fin='12:00')
self.horario_disponibilidad2 = HorarioDisponibilidad.objects.create(hora_inicio='13:00', hora_fin='15:00')
self.sala_reunion = SalaReuniones.objects.create(nombre='Sala 1', ubicacion='Norte', capacidad=10)
self.sala_reunion.horario_disponibilidad.add(self.horario_disponibilidad1)
self.sala_reunion.insumos.add(self.insumo)
self.sala_reunion.save()
self.client = Client()
self.client.login(username='test', password='<PASSWORD>')
def test_add_reservation(self):
form_data = {'fecha': '2019-01-29',
'hora_inicio': '10:00',
'hora_final': '12:00',
'capacidad': 10,
'sala_reuniones': self.sala_reunion.pk,
'user': self.user.pk
}
form = ReservaForm(form_data, pk=self.sala_reunion.pk)
self.assertTrue(form.is_valid())
def test_add_reservation_check_reservation(self):
form_data = {'fecha': '2019-01-29',
'hora_inicio': '10:00',
'hora_final': '12:00',
'capacidad': 10,
'sala_reuniones': self.sala_reunion.pk,
'user': self.user.pk
}
form = ReservaForm(form_data, pk=self.sala_reunion.pk)
form.save()
self.assertTrue(Reserva.objects.all())
def test_check_errors_add_reservation_date_past(self):
form_data = {'fecha': '2019-01-20',
'hora_inicio': '10:00',
'hora_final': '12:00',
'capacidad': 10,
'sala_reuniones': self.sala_reunion.pk,
'user': self.user.pk
}
form = ReservaForm(form_data, pk=self.sala_reunion.pk)
self.assertEqual(form.errors['fecha'][0], u'Este campo no admite fechas en el pasado.')
def test_list_reservations(self):
response = self.client.get(reverse('sala_reuniones:listar_reservas'))
self.assertEqual(200, response.status_code)
def test_list_reservations_check_view(self):
view = resolve('/sala_reuniones/listar/reservas/')
self.assertEqual(view.func.view_class, ListarReservasViews)
class EditarReservaViewTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='test', is_superuser=True)
self.user.set_password('<PASSWORD>')
self.user.save()
self.insumo = Insumo.objects.create(nombre=u'Pizarrón')
self.insumo2 = Insumo.objects.create(nombre='Proyector')
self.horario_disponibilidad1 = HorarioDisponibilidad.objects.create(hora_inicio='10:00', hora_fin='12:00')
self.horario_disponibilidad2 = HorarioDisponibilidad.objects.create(hora_inicio='13:00', hora_fin='15:00')
self.sala_reunion = SalaReuniones.objects.create(nombre='Sala 1', ubicacion='Norte', capacidad=10)
self.sala_reunion.horario_disponibilidad.add(self.horario_disponibilidad1)
self.sala_reunion.insumos.add(self.insumo)
self.sala_reunion.save()
self.reserva = Reserva.objects.create(fecha='2019-01-20', hora_inicio='10:00',
hora_final='12:00', capacidad=10, sala_reuniones=self.sala_reunion,
user=self.user)
self.form_data = {'fecha': '2019-01-28',
'hora_inicio': '10:00',
'hora_final': '12:00',
'capacidad': 50,
'sala_reuniones': self.sala_reunion.pk,
'user': self.user.pk
}
self.client = Client()
self.client.login(username='test', password='<PASSWORD>')
def test_edit_with_url(self):
response = self.client.post(reverse('sala_reuniones:editar_reserva',
kwargs={'pk': self.reserva.pk}), self.form_data)
self.assertEqual(302, response.status_code)
def test_edit_field_capacidad(self):
response = self.client.post(reverse('sala_reuniones:editar_reserva',
kwargs={'pk': self.reserva.pk}), self.form_data)
reserva = Reserva.objects.get(pk=self.reserva.pk).capacidad
self.assertEqual(50, reserva)
class EliminarReservasViewTest(TestCase):
def setUp(self):
self.user = User.objects.create(username='test', is_superuser=True)
self.user.set_password('<PASSWORD>')
self.user.save()
self.insumo = Insumo.objects.create(nombre=u'Pizarrón')
self.insumo2 = Insumo.objects.create(nombre='Proyector')
self.horario_disponibilidad1 = HorarioDisponibilidad.objects.create(hora_inicio='10:00', hora_fin='12:00')
self.horario_disponibilidad2 = HorarioDisponibilidad.objects.create(hora_inicio='13:00', hora_fin='15:00')
self.sala_reunion = SalaReuniones.objects.create(nombre='Sala 1', ubicacion='Norte', capacidad=10)
self.sala_reunion.horario_disponibilidad.add(self.horario_disponibilidad1)
self.sala_reunion.insumos.add(self.insumo)
self.sala_reunion.save()
self.reserva = Reserva.objects.create(fecha='2019-01-20', hora_inicio='10:00',
hora_final='12:00', capacidad=10, sala_reuniones=self.sala_reunion,
user=self.user)
self.client = Client()
self.client.login(username='test', password='<PASSWORD>')
def test_delete_status_code(self):
response = self.client.post(reverse('sala_reuniones:eliminar_reserva', kwargs={'pk': self.reserva.pk}))
self.assertEqual(response.status_code, 302)
def test_delete_redirection(self):
response = self.client.post(reverse('sala_reuniones:eliminar_reserva', kwargs={'pk': self.reserva.pk}))
self.assertRedirects(response, reverse('sala_reuniones:listar_reservas'))
def test_delete_reserva_query(self):
response = self.client.post(reverse('sala_reuniones:eliminar_reserva', kwargs={'pk': self.reserva.pk}))
self.assertQuerysetEqual([], Reserva.objects.all())
<file_sep>/sala_reuniones/urls.py
from django.conf.urls import url
from .views import (AdicionarReservaView,
ListarReservasViews,
ReservasFullcalendario,
EditarReservaView,
EliminarReservasView)
urlpatterns = [
url(r'^adicionar/reserva/(?P<pk>\d+)/$', AdicionarReservaView.as_view(), name='adicionar_reserva'),
url(r'^listar/reservas/$', ListarReservasViews.as_view(), name='listar_reservas'),
url(r'^listar/reservas/calendario/$', ReservasFullcalendario.as_view(), name='calendario_reservas'),
url(r'^editar/reserva/(?P<pk>\d+)/$', EditarReservaView.as_view(), name='editar_reserva'),
url(r'^eliminar/reserva/(?P<pk>\d+)/$', EliminarReservasView.as_view(), name='eliminar_reserva')
]
<file_sep>/sala_reuniones/models.py
# -*- encoding:utf-8 -*-
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.conf import settings
# Create your models here.
class Insumo(models.Model):
nombre = models.CharField(max_length=30)
class Meta:
ordering = ['nombre']
def __unicode__(self):
return unicode(self.nombre)
def __str__(self):
return self.nombre
class HorarioDisponibilidad(models.Model):
hora_inicio = models.TimeField(verbose_name='Hora de inicio')
hora_fin = models.TimeField(verbose_name='Hora de terminar')
class Meta:
ordering = ['hora_inicio']
verbose_name_plural = 'Horario disponibilidades'
def __unicode__(self):
return unicode(str(self.hora_inicio) + ' - ' + str(self.hora_fin))
def __str__(self):
return str(self.hora_inicio) + ' - ' + str(self.hora_fin)
class SalaReuniones(models.Model):
nombre = models.CharField(max_length=50)
ubicacion = models.CharField(max_length=100, verbose_name=u'Ubicación')
capacidad = models.IntegerField()
horario_disponibilidad = models.ManyToManyField(HorarioDisponibilidad)
insumos = models.ManyToManyField(Insumo, default=['Proyector', u'Pizarrón'], blank=True)
class Meta:
ordering = ['nombre']
verbose_name_plural = 'Sala de reuniones'
def __unicode__(self):
return unicode(self.nombre)
def __str__(self):
return self.nombre
class Reserva(models.Model):
NO_DISPONIBLE = 'nd'
DISPONIBLE = 'd'
RESERVADA = 'r'
CONFIRMADA = 'c'
ESTADO_CHOICES = (
(NO_DISPONIBLE, 'No disponible'),
(DISPONIBLE, 'Disponible'),
(RESERVADA, 'Reservada'),
(CONFIRMADA, 'Confirmada')
)
fecha = models.DateField()
hora_inicio = models.TimeField(verbose_name='Hora inicial')
hora_final = models.TimeField(verbose_name='Hora final')
capacidad = models.IntegerField(verbose_name='Capacidad de personas')
sala_reuniones = models.ForeignKey(SalaReuniones)
confirmada = models.BooleanField(default=False)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
estado = models.CharField(max_length=20, choices=ESTADO_CHOICES, default=DISPONIBLE)
class Meta:
ordering = ['fecha', 'sala_reuniones']
def __str__(self):
return (str(self.fecha) + ' ' + self.sala_reuniones.nombre + ' ' + str(self.hora_inicio.strftime('%H:%M')) + ' ' + str(self.hora_final))
# .strftime('%d-%b-%Y')
<file_sep>/sala_reuniones/forms.py
# -*- encoding:utf-8 -*-
# -*- coding:utf-8 -*-
from datetime import datetime
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import Reserva, SalaReuniones
from .widgets import DatePicker, TimePicker
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', '<PASSWORD>', '<PASSWORD>']
class ReservaForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Reserva
fields = ['fecha', 'hora_inicio', 'hora_final', 'capacidad', 'sala_reuniones', 'confirmada', 'user']
widgets = {
'fecha': DatePicker,
'hora_inicio': TimePicker,
'hora_final': TimePicker
}
def __init__(self, *args, **kwargs):
if 'pk' in kwargs:
pk_sala = kwargs.pop('pk')
super(ReservaForm, self).__init__(*args, **kwargs)
if not self.instance.id:
sala_reunion = SalaReuniones.objects.get(pk=pk_sala)
self.fields['user'].required = False
self.fields['sala_reuniones'].initial = sala_reunion
def clean_fecha(self):
fecha = self.cleaned_data['fecha']
if fecha < datetime.now().date():
raise forms.ValidationError('Este campo no admite fechas en el pasado.')
return fecha
def clean_hora_final(self):
hora_final = self.cleaned_data['hora_final']
try:
hora_inicio = self.cleaned_data['hora_inicio']
except KeyError:
hora_inicio = ''
if hora_inicio:
if hora_final < hora_inicio:
raise forms.ValidationError('La hora final no puede ser menor que la hora inicial.')
return hora_final
def clean_sala_reuniones(self):
sala_reunion = self.cleaned_data['sala_reuniones']
try:
hora_inicio = self.cleaned_data['hora_inicio']
hora_final = self.cleaned_data['hora_final']
except KeyError:
return sala_reunion
for horario in sala_reunion.horario_disponibilidad.all():
if hora_inicio < horario.hora_inicio or hora_inicio > horario.hora_fin:
self.add_error('hora_inicio', 'No hay disponibilidad a esta hora.')
if hora_final < horario.hora_inicio or hora_final > horario.hora_fin:
self.add_error('hora_final', 'No hay disponibilidad en este horario.')
return sala_reunion
def clean(self):
try:
fecha = self.cleaned_data['fecha']
hora_inicio = self.cleaned_data['hora_inicio']
hora_final = self.cleaned_data['hora_final']
except KeyError:
return self.cleaned_data
reserva = Reserva.objects.filter(fecha=fecha, hora_inicio=hora_inicio, hora_final=hora_final)
if reserva:
raise forms.ValidationError('Ya existe una reserva en esa fecha y rango de horarios.')
return self.cleaned_data
<file_sep>/sala_reuniones/admin.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import HorarioDisponibilidad, SalaReuniones, Insumo, Reserva
# Register your models here.
admin.site.register(SalaReuniones)
admin.site.register(Insumo)
admin.site.register(HorarioDisponibilidad)
admin.site.register(Reserva)<file_sep>/sala_reuniones/views.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, redirect
from django.views.generic import ListView, CreateView, View, TemplateView, UpdateView, DeleteView
from django.urls import reverse, reverse_lazy
from django.contrib import messages
from django.contrib.auth.models import User
from .models import SalaReuniones, Reserva
from .forms import UserRegisterForm, ReservaForm
from braces.views import LoginRequiredMixin, FormValidMessageMixin
# Create your views here.
class HomeView(ListView):
model = SalaReuniones
template_name = 'sala_reuniones/home.html'
context_object_name = 'salas_reuniones'
class RegisterView(View):
template_name = 'sala_reuniones/register.html'
def get(self, request, *args, **kwargs):
form = UserRegisterForm()
return render(request, self.template_name, {'form': form})
def post(self, request):
form = UserRegisterForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
messages.success(request, 'Cuenta creada para %s !!!' % username)
form.save()
return redirect('home')
return render(request, self.template_name, {'form': form})
class AdicionarReservaView(LoginRequiredMixin, CreateView):
model = Reserva
form_class = ReservaForm
success_url = reverse_lazy('sala_reuniones:listar_reservas')
def form_valid(self, form):
formulario = form.save(commit=False)
formulario.user = self.request.user
if formulario.confirmada:
formulario.estado = Reserva.CONFIRMADA
else:
formulario.estado = Reserva.RESERVADA
formulario.save()
return super(AdicionarReservaView, self).form_valid(form)
def get_context_data(self, *args, **kwargs):
context_data = super(AdicionarReservaView, self).get_context_data(*args, **kwargs)
sala_reunion = SalaReuniones.objects.get(pk=self.kwargs['pk'])
context_data['sala_reunion'] = sala_reunion
context_data['horarios'] = sala_reunion.horario_disponibilidad.all()
return context_data
def get_form_kwargs(self):
kwargs = super(AdicionarReservaView, self).get_form_kwargs()
kwargs['pk'] = self.kwargs['pk']
return kwargs
class ListarReservasViews(ListView):
model = Reserva
context_object_name = 'reservas'
class ReservasFullcalendario(TemplateView):
template_name = 'sala_reuniones/fullcalendar.html'
def get_context_data(self, *args, **kwargs):
context_data = super(ReservasFullcalendario, self).get_context_data(*args, **kwargs)
context_data['reservas'] = Reserva.objects.all()
return context_data
class EditarReservaView(LoginRequiredMixin, FormValidMessageMixin, UpdateView):
model = Reserva
form_class = ReservaForm
form_valid_message = 'Se ha editado la reserva satisfactoriamente.'
success_url = reverse_lazy('sala_reuniones:listar_reservas')
def get_context_data(self, *args, **kwargs):
reserva = Reserva.objects.get(pk=self.kwargs['pk'])
context_data = super(EditarReservaView, self).get_context_data(*args, **kwargs)
context_data['sala_reunion'] = reserva.sala_reuniones
context_data['horarios'] = reserva.sala_reuniones.horario_disponibilidad.all()
context_data['editar'] = True
context_data['pk'] = self.object.pk
return context_data
class EliminarReservasView(LoginRequiredMixin, DeleteView):
model = Reserva
success_url = reverse_lazy('sala_reuniones:listar_reservas')
| 51ba30e3e284f2855ed6ef5b562bc61408e0221f | [
"Python",
"HTML"
] | 9 | Python | fdzr/test | f7e741778ff38fdf5eb618ee4b5cba6371910ed6 | 2c778fc5c87f76dd48a6e1903ee35966caea5bac | |
refs/heads/master | <repo_name>silviacloud/php-oop-prodotto<file_sep>/index.php
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=, initial-scale=1.0">
<title>Prodotto_OOP</title>
</head>
<body>
<!-- Provare a immaginare una classe come quella vista a lezione, definendo le variabili d'ambiente per disegnare un ipotetico prodotto di magazzino;
Definire anche costruttore completo (tutti le variabili che avete creato) + printMe per fare il log, nell'ottica di quanto visto questa mattina a lezione -->
<?php
class Canvas{
private $width;
private $heigth;
private $material;
private $price;
public function __construct ($width, $height, $material, $price){
$this -> width = $width;
$this -> height = $height;
$this -> material = $material;
$this -> price = $price;
}
public function printMe(){
echo 'Tela larga ' . $this -> width . ', alta ' . $this -> height . ' e realizzata in ' . $this -> material . '.' . '<br><br>' . 'Prezzo: ' . $this -> price . '<br><br>';
}
}
$canvas1 = new Canvas('25cm', '30cm', 'cotone', '€30');
$canvas1 -> printMe();
$canvas2 = new Canvas('30cm', '40cm', 'cotone', '€40');
$canvas2 -> printMe();
?>
</body>
</html> | 5402db2d83a969c993b5e7afcb20fd3fc69cb91f | [
"PHP"
] | 1 | PHP | silviacloud/php-oop-prodotto | effd835ae32392e19e579ec27d79f9c2b644148a | 615ea7fa853ce23c9f4456f68310b754f6e32342 | |
refs/heads/master | <repo_name>krati-1993/FRONTEND-WEB-DEVELOPMENT-FUNDAMENTALS-Assignment1-Session5<file_sep>/README.md
# FRONTEND-WEB-DEVELOPMENT-FUNDAMENTALS-Assignment1-Session5
FRONTEND-WEB-DEVELOPMENT-FUNDAMENTALS-Session5 Assignment1
<file_sep>/app.js
var name = "Name";
var age = "Age";
var dateofbirth = "Date of Birth";
var placeofbirth = "Place of Birth";
console.log(name +' , '+ age +' , '+ dateofbirth +' and '+ placeofbirth); | 5ed000e8697d18e54ee1ba1ca04db5a771e3f917 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | krati-1993/FRONTEND-WEB-DEVELOPMENT-FUNDAMENTALS-Assignment1-Session5 | 9e0de02119fabf208e89a6f13d5a161ccc75cbf3 | f71fb34111c8603c032cb26e6043010025e64d65 | |
refs/heads/master | <repo_name>sbakht/Text-Typing-Game<file_sep>/main.rb
def mistakes(original, inputted)
original = original.split
inputted = inputted.split
correctWordCount = 0
original.each_with_index do |word,i|
correctWordCount += 1 if word == inputted[i]
end
percentWordsCorrect = (correctWordCount/original.length.to_f*100.0).round()
return correctWordCount, percentWordsCorrect
end
totalTime = 0
totalWordsCorrect = 0
totalWordCount = 0
totalCharCount = 0
points = 0
File.new('scores.txt','r').each_line do |line|
points = line.to_i
end
File.open(ARGV[0]).each_line do |line|
line = line.strip.gsub(/[.?!]/, '\0|').split('|')
line.each do |sentence|
sentence = sentence.strip
puts sentence
t1 = Time.now
input = STDIN.gets.chomp
correctWordCount, percentWordsCorrect = mistakes(sentence, input)
t2 = Time.now
wordCount = sentence.count(' ') + 1
totalWordCount += wordCount
totalWordsCorrect += correctWordCount
puts "Words Correct: #{correctWordCount}/#{wordCount} words"
puts "Words Correct: #{percentWordsCorrect}%"
charCount = sentence.length
totalCharCount += charCount
puts "Character Count: #{charCount}"
timeTaken = t2 - t1
totalTime += timeTaken
puts "Time Taken: #{timeTaken.round(2)}"
output = File.new('output.txt','a')
if input == sentence
puts "CORRECT"
elsif input.split == sentence.split
puts "CORRECT WITH EXTRA SPACE"
output.puts(sentence)
output.puts(input)
elsif input.gsub(/\s+/,'') == sentence.gsub(/\s+/,'')
puts "CORRECT WITH MISSING SPACE"
output.puts(sentence)
output.puts(input)
elsif input == ""
puts "NO ATTEMPT"
else
puts "WRONG"
output.puts(sentence)
output.puts(input)
end
output.close
puts "\n"
end
end
points += totalWordsCorrect
File.new('scores.txt','w') << points
puts "Total Words Correct: #{totalWordsCorrect}/#{totalWordCount}"
puts "Total Character Count: #{totalCharCount}"
puts "Total Time: #{totalTime.round(2)}"
puts "Total Points: #{points}" | 5220a67ea694f75e860cd79d42492c4ce48700fa | [
"Ruby"
] | 1 | Ruby | sbakht/Text-Typing-Game | 4b4b8d65cf0290bd8ae8fc1a1eabfea879e76dbc | ed9b2a63acd7ca0005aaae5a1519f5deb9242fa2 | |
refs/heads/main | <file_sep>const { Router } = require('express');
const { check } = require('express-validator');
const { usersDelete, usersPost, usersGet, usersGetById, userPut, usersPutError } = require('../controller/users');
const { validarCampos, validarRol, existeEmail, existeIdUsuario } = require('../custom-middleware/validator-campos');
const router = Router();
router.get('/', usersGet);
router.get('/:id', [
check('id', "No es un ID mongo Adecuado").isMongoId(),
check('id').custom(existeIdUsuario),
validarCampos
], usersGetById);
router.post('/', [
check('nombre', "El nombre es Obligatorio !!!").notEmpty(),
check('email', "El correo no es Valido!!!").isEmail(),
check('password', "<PASSWORD>!!!").not<PASSWORD>(),
check('rol').custom(validarRol),
check('email').custom(existeEmail),
//check('rol', "El rol es Obligatorio !!!").notEmpty(),
validarCampos
], usersPost);
router.put('/:usuarioId', [
check('usuarioId', "No es un ID mongo Adecuado").isMongoId(),
check('usuarioId').custom(existeIdUsuario),
// check('rol').custom(validarRol),
validarCampos
], userPut);
router.delete('/:id', [
check('id', "No es un ID mongo Adecuado").isMongoId(),
check('id').custom(existeIdUsuario),
validarCampos
], usersDelete);
router.put('*', usersPutError);
module.exports = router;<file_sep>const { response, request } = require('express');
const Usuario = require('../model/user');
const createCustomHash = require('../helpers/encrypt');
const usersGet = async(req = request, res = response) => {
const { from = 0, limit = 3 } = req.query;
const [usuarios, count] = await Promise.all([Usuario
.find({ estado: true })
.skip(Number(from))
.limit(Number(limit)),
Usuario.countDocuments({ estado: true })
]);
/* const usuarios = await Usuario
.find({ estado: true })
.skip(Number(from))
.limit(Number(limit));
const count = await Usuario.countDocuments({ estado: true });*/
/* res.json({
VideoJuego: 'Contra',
id,
name,
page
});*/
res.json({
count,
usuarios,
});
};
const usersGetById = async(req = request, res = response) => {
const { id } = req.params;
const { password, nombre, rol, email, estado } = await Usuario.findById(id);
res.json({
usuario: {
nombre,
rol,
email,
estado,
password
},
});
};
const usersPost = async(req, res = response) => {
const { nombre, email, password, rol } = req.body;
const usuario = new Usuario({ nombre, email, password, rol });
// crypt the password
console.log('encriptando');
usuario.password = createCustomHash(password);
console.log('encriptado');
await usuario.save();
res.json({
msj: 'Hello World POST 5',
usuario
});
};
const userPut = async(req = request, res = response) => {
const { usuarioId } = req.params;
const { password, google, email, estado, ...resto } = req.body;
if (password) {
resto.password = createCustomHash(password);
}
const usuario = await Usuario.findByIdAndUpdate(usuarioId, resto);
res.json({
msj: 'Hello World PUT 2',
usuario
});
};
const usersPutError = (req, res = response) => {
const dirArray = __dirname.split("\\");
const dirRoot = dirArray.splice(0, dirArray.length - 1).join('\\');
res.sendFile(`${dirRoot}/public/error.html`);
//res.send('Nada que ver ' + dirRoot);
};
const usersDelete = async(req, res = response) => {
const { id } = req.params;
const usuario = await Usuario.findByIdAndUpdate(id, { estado: false });
const { nombre } = usuario;
res.json({
nombre
});
};
module.exports = {
usersGet,
usersGetById,
usersPost,
usersDelete,
userPut,
usersPutError
};<file_sep>const { Schema, model } = require('mongoose');
const schemaRole = Schema({
rol: {
type: String,
require: [true, "El rol "]
}
});
module.exports = model('rol', schemaRole);
<file_sep>const { validationResult } = require('express-validator');
const Rol = require('../model/rol');
const Usuario = require('../model/user');
const validarCampos = (req, res, next) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json(errors);
}
next();
};
const validarRol = async(rol = '') => {
const existe = await Rol.findOne({ rol });
if (!existe) {
throw new Error('No existe rol: ' + rol);
}
};
const existeEmail = async(email) => {
const existeemail = await Usuario.findOne({ email });
if (existeemail) {
throw new Error("El correo ya esta registrado: " + email);
//return res.status(400).json({ msg: "El correo ya esta registrado" });
}
};
const existeIdUsuario = async(usuarioId) => {
const usuarioBD = await Usuario.findById(usuarioId);
if (!usuarioBD) {
throw new Error("ID usuario no existe: " + usuarioId);
//res.status(400).json({ error: "ID usuario no existe" });
}
};
module.exports = {
validarCampos,
validarRol,
existeEmail,
existeIdUsuario
};<file_sep>## Temas puntuales de la sección
### Aquí cubriremos varios temas como:
- Definir los alcances de nuestro RESTServer CRUD
- Encriptación de contraseñas
- Validaciones personalizadas
- Creación de roles
- Conexiones con MLAB
- Despliegue de base de datos en la nube
- Conexión con Robo 3T con base de datos en la nube
- Configuración de variables de entorno
- Borrado de archivos
- Eliminado físico de la base de datos
- Eliminación por estado en un campo de la colección | 71127b7794997a02104aebcdd5bb24867400b6dd | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | inerhead/Rest-Server-ManageCollection | 0ce16d33629dbed8b6b2ebcf79d220b4fe6ae332 | 5bc4f06a4e37a0d0f16438c09494da953a516de2 | |
refs/heads/master | <repo_name>itma96/cpanel<file_sep>/connect.inc.php
<?php
if(!mysql_connect('localhost','root','') || !mysql_select_db('mydatabase')) {
die(mysql_error());
}
?><file_sep>/showusers.php
<?php
require 'connect.inc.php';
require 'core.inc.php';
?>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<style>
body {
background: #0ca3d2;
}
table {
width: 100%
}
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
th, td {
padding: 5px;
text-align: center;
}
table#t01 tr:nth-child(even) {
background-color: #FFFFFF;
}
table#t01 tr:nth-child(odd) {
background-color: #A69F9F;
}
table#t01 th {
background-color: gray;
color: black;
}
a#DeleteButton {
background-color: red;
-moz-border-radius: 2px;
border-radius: 2px;
cursor: pointer;
border: 3px solid red;
font-weight: bold;
text-decoration: none;
}
a:link {
color: #000000 ;
}
/* visited link */
a:visited {
color: #000000 ;
}
/* mouse over link */
a:hover {
color: #000000 ;
}
/* selected link */
a:active {
color: #000000 ;
}
</style>
</head>
<body>
<div style=" margin:0 auto; width: 300px; font-size: 130%; background: white; border-radius: 5px;">
<h1><center>Lista Angajati</center></h1>
</div>
</body>
<form action="http://localhost/www/index.html" method="get">
<table id="t01" class="table-box">
<tr>
<th style="width: 120px; "><center>ID</center></th>
<th style="width: 120px; "><center>Nume</center></th>
<th style="width: 120px; "><center>Prenume</center></th>
<th style="width: 120px; "><center>Data nasterii</center></th>
<th style="width: 120px; "><center>Data angajarii</center></th>
<th style="width: 120px; "><center>Salariu</center></th>
<th style="width: 100px; "><center></center></th>
</tr>
<?php
$query = "SELECT `id`, `nume`, `prenume`, `data_nastere`, `data_angajare`, `salariu` FROM `employees`";
$query_result = mysql_query($query);
//$query_result = mysql_result($query_run);
while($row = mysql_fetch_array($query_result)) {
echo '<tr>';
echo '<td style = \'width: 120px;\'><center>'.$row['id'].'</center></th>';
echo '<td style = \'width: 120px;\'><center>'.$row['nume'].'</center></th>';
echo '<td style = \'width: 120px;\'><center>'.$row['prenume'].'</center></th>';
echo '<td style = \'width: 120px;\'><center>'.$row['data_nastere'].'</center></th>';
echo '<td style = \'width: 120px;\'><center>'.$row['data_angajare'].'</center></th>';
echo '<td style = \'width: 120px;\'><center>'.$row['salariu'].'</center></th>';
echo '<td><a id="DeleteButton" href=\'deleterow.php?row_id='.$row['id'].'\'>Delete</a></td>';
echo '</tr>';
}
?>
</table>
</form>
</body>
</html>
<file_sep>/adduser.php
<?php
require 'connect.inc.php';
require 'core.inc.php';
echo $lastname = $_SESSION['post_data']['nume'];
echo $firstname = $_SESSION['post_data']['prenume'];
echo $password = $_SESSION['post_data']['parola'];
$data_nasterii = $_SESSION['post_data']['data nasterii'];
$data_angajarii = $_SESSION['post_data']['data angajarii'];
$salariu = $_SESSION['post_data']['salariu'];
$username = $firstname .'.'. $lastname;
$query_run = mysql_query("SELECT `username` FROM `employees` WHERE `username`='$username'");
if(mysql_num_rows($query_run)>=1)
{
header("Location: index.php?message=User already exists!");
exit;
}
else
{
if(empty($firstname) || empty($lastname)) {
header("Location: index.php?message=Please provide a proper name!");
exit;
}
if(empty($password)) {
header("Location: index.php?message=Please provide a proper password!");
exit;
}
$query = "INSERT INTO `employees` VALUES ('','".mysql_real_escape_string($username)."',
'".mysql_real_escape_string($password)."','".mysql_real_escape_string($lastname)."',
'".mysql_real_escape_string($firstname)."','".mysql_real_escape_string($data_nasterii)."',
'".mysql_real_escape_string($data_angajarii)."','".mysql_real_escape_string($salariu)."')";
if($query_run = mysql_query($query)) {
header("Location: index.php?message=User has been added!");
}
}
?><file_sep>/core.inc.php
<?php
ob_start();
session_start();
$current_page = $_SERVER['SCRIPT_NAME'];
?><file_sep>/controller.php
<?php
require 'connect.inc.php';
require 'core.inc.php';
$_SESSION['post_data'] = $_POST;
if(!empty($_POST['AddButton'])) {
header('Location: adduser.php');
exit;
}
if(!empty($_POST['ShowButton'])) {
header('Location: showusers.php');
exit;
}
if(!empty($_POST['LogoutButton'])) {
header('Location: logout.php');
exit;
}
?><file_sep>/deleterow.php
<?php
require 'connect.inc.php';
require 'core.inc.php';
$id = $_GET['row_id'];
$query = "DELETE FROM `employees` WHERE `id`='$id'";
if($query_run = mysql_query($query)) {
mysql_query("ALTER TABLE `employees` AUTO_INCREMENT=1");
header("Location: showusers.php");
}
?><file_sep>/index.php
<?php
require 'connect.inc.php';
require 'core.inc.php';
if(isset($_SESSION['user_id']) && !empty($_SESSION['user_id'])) {
$user_id = $_SESSION['user_id'];
$query_run = mysql_query("SELECT `username` FROM `employees` WHERE `id`='$user_id'");
$username = mysql_result($query_run,0);
} else {
include 'loginform.inc.php';
}
?>
<!html>
<head>
<meta charset="UTF-8">
<title>index page</title>
<link href="index_menu.css" rel="stylesheet"/>
</head>
<body>
<form action="controller.php" method="POST">
<div class="form-box">
<br>
<?php
if(!empty($_GET['message'])) {
$message = $_GET['message'];
echo '<font color=\'red\'>'.$message.'</font>';
}
?>
<br>
<br>
Nume angajat:<br>
<input type="text" name="nume">
<br>
<br>
Prenume angajat:<br>
<input type="text" name="prenume">
<br>
<br>
Parola:<br>
<input type="password" name="<PASSWORD>">
<br>
<br>
Data nasterii:<br>
<input type="date" name="data nasterii">
<br>
<br>
Data angajarii:<br>
<input type="date" name="data angajarii">
<br>
<br>
Salariu:<br>
<input type="text" name="salariu">
<br>
<br>
</div>
<div class="info-panel">
<?php
if(isset($username) && !empty($username)) {
echo '<center><bold><font size=\'5\'>'.$username.'</font></bold></center>';
} else {
include 'logout.php';
}
?>
<img src="default_profile_pic.jpg" alt="Profile" style="width:150px;height:150px;border:2px solid #021a40;border-radius:5px">
<br>
<br>
<input type="submit" name="AddButton" value="Adauga angajat" style="width: 150px; height: 53px; ">
<br>
<br>
<input type="submit" name="ShowButton" value="Afiseaza angajati" style="width: 150px; height: 53px; ">
<br>
<br>
<input type="submit" name="LogoutButton" value="Logout" style="width: 150px; height: 53px; ">
</div>
</form>
</body>
</html>
<file_sep>/loginform.inc.php
<?php
if(isset($_POST['username']) && isset($_POST['password'])) {
$username = $_POST['username'];
$password = $_POST['<PASSWORD>'];
if(!empty($username) && !empty($password)) {
$query = "SELECT `id` FROM `employees` WHERE `username`='$username' AND `password`='$password'";
if($query_run = mysql_query($query)) {
if(mysql_num_rows($query_run)==0) {
echo 'Invalid username/password!';
} else if(mysql_num_rows($query_run)==1) {
$user_id = mysql_result($query_run,0,'id');
$_SESSION['user_id'] = $user_id;
header('Location: index.php');
}
}
} else {
echo 'You must provide a username and a password!';
}
}
?>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>Login Form</title>
<link rel="stylesheet" href="login_form.css">
<!--[if lt IE 9]><script src="//html5shim.googlecode.com/svn/trunk/html5.js"></script><![endif]-->
</head>
<body>
<div class="login">
<h1>Login</h1>
<form method="POST" action="<?php echo $current_file; ?>" >
<p><input type="text" name="username" value="" placeholder="Username"></p>
<p><input type="password" name="password" value="" placeholder="<PASSWORD>"></p>
<p class="submit"><input type="submit" name="commit" value="Login"></p>
</form>
</div>
</body>
</html> | 7045b4ffcd05fbb214779361e36b60ccf42fc9d3 | [
"PHP"
] | 8 | PHP | itma96/cpanel | 69a53cf346ff295ca8dc7946846ae775cab72080 | ea0e6ff61c817af853ec33db1bddef3e338afe58 | |
refs/heads/master | <repo_name>AntonArtomov/AngularMentoringHometask<file_sep>/src/app/course-list/course-list/course-list.component.ts
import { Component, OnInit } from '@angular/core';
import { CourseListItem } from 'src/app/course-list/course-list-item';
import { CourseService} from '../course.service'
@Component({
selector: 'app-course-list',
templateUrl: './course-list.component.html',
styleUrls: ['./course-list.component.css']
})
export class CourseListComponent implements OnInit {
public courses: CourseListItem[] = []
constructor(private courseService: CourseService) { }
ngOnInit() {
this.courses = this.courseService.getCourseItems();
}
onCourseDeleted(course){
console.log(course.Id + ' is deleted')
}
onLoadMoreClick() {
console.log("load more clicked");
}
}
<file_sep>/src/app/course-list/course-list-item/course-list-item.component.ts
import { Component, OnInit, Input, Output, EventEmitter} from '@angular/core';
import { CourseListItem } from 'src/app/course-list/course-list-item';
@Component({
selector: 'course-list-item',
templateUrl: './course-list-item.component.html',
styleUrls: ['./course-list-item.component.css']
})
export class CourseListItemComponent implements OnInit {
@Input() public courseItem: CourseListItem;
@Output() courseDeleted: EventEmitter<number> = new EventEmitter();
constructor() { }
ngOnInit() {
}
deleteCourse() {
this.courseDeleted.emit();
}
}
<file_sep>/src/app/course-list/course.service.ts
import { Injectable } from '@angular/core';
import { CourseListItem } from 'src/app/course-list/course-list-item';
@Injectable({
providedIn: 'root'
})
export class CourseService {
constructor() { }
public getCourseItems(): CourseListItem[] {
return [
{
Id:1,
Title: "Video Course 1",
CreationDate: new Date("Jan 1,2018"),
Duration: 88,
Description: "something very interesting, part 1. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla"
},
{
Id:2,
Title: "Video Course 2",
CreationDate: new Date("Jun 8,2018"),
Duration: 98,
Description: "something very interesting, part 1. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla"
},
{
Id:3,
Title: "Video Course 3",
CreationDate: new Date("Aug 22,2018"),
Duration: 58,
Description: "something very interesting, part 1. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla"
},
{
Id:4,
Title: "Video Course 4",
CreationDate: new Date("Sep 3,2018"),
Duration: 128,
Description: "something very interesting, part 1. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla. Bla-bla-bla"
}
]
}
}
| 9ccbec44a8648268c8b17d0b565c125ab6960048 | [
"TypeScript"
] | 3 | TypeScript | AntonArtomov/AngularMentoringHometask | cfe6059e411ef455fcd6dcfc344859c1f6d299b8 | db902dbf0b91b1a243d94f52bc00d70f97806826 | |
refs/heads/master | <repo_name>Szaharov/DMS2CloudIntegration<file_sep>/WSComponents/src/WSComponents.Tests/Utility.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WSComponents.Tests
{
public static class Utility
{
public static int DEFAULT_TIMEOUT = 5000;
public static void WaitWithPeriodicalCallback(ref bool canExit, int TotalTime, Action callback)
{
int retryCount = TotalTime;
while (!canExit)
{
System.Threading.Thread.Sleep(1);
callback();
retryCount -= 1;
if (retryCount == 0)
{
throw new Exception("Waiting for exit timed out"); ;
}
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CeaseCollaborationDialog/CeaseCollaborationDialog.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
namespace Workshare.Components.Views.CeaseCollaborationDialog
{
/// <summary>
/// Interaction logic for CeaseCollaborationDialog.xaml
/// </summary>
public partial class CeaseCollaborationDialog : OwnWindow
{
private IModuleView _view;
public CeaseCollaborationDialog()
{
InitializeComponent();
}
public CeaseCollaborationDialog(ModuleBase module, CommandInvoker commandInvoker, IEnumerable<IDMSItem> items, IModuleView view)
{
InitializeComponent();
_view = view;
DataContext = new CeaseCollaborationDialogViewModel(module, commandInvoker, items, this, _view);
this.Loaded += CeaseCollaborationDialog_Loaded;
this.Closed += CeaseCollaborationDialog_Closed;
}
void CeaseCollaborationDialog_Closed(object sender, EventArgs e)
{
_view.TopWindow = IntPtr.Zero;
_view.SuppressProgressDialog(false);
}
void CeaseCollaborationDialog_Loaded(object sender, RoutedEventArgs e)
{
var vm = (CeaseCollaborationDialogViewModel)DataContext;
_view.SuppressProgressDialog(true);
if (vm.ScanCommand.CanExecute(""))
{
vm.ScanCommand.Execute("");
}
}
private void OwnWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
var vm = (CeaseCollaborationDialogViewModel)DataContext;
if (vm != null) vm.OnViewClosing();
}
private void RadioButton_Checked(object sender, RoutedEventArgs e)
{
var vm = (CeaseCollaborationDialogViewModel)DataContext;
this.Tag = (sender as RadioButton).Tag;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/Visitors/ScanVisitor.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.DmsItems.Visitors
{
public class ScanVisitor:DMSVisitor
{
public override PageIterator Iterator
{
set
{
Result.Iterator = value;
}
get { return Result.Iterator; }
}
public SummaryProcessResult Result { get; private set; }
public ScanOptions Options { set; get; }
private readonly Stack<ItemMap> _resultStack = new Stack<ItemMap>();
readonly DmsScanStrategyBase _dmsScanStrategy;
public ScanVisitor(DmsScanStrategyBase dmsScanStrategy)
{
_dmsScanStrategy = dmsScanStrategy;
Options = new ScanOptions();
Options.UseForceRequest = true;
}
public void Initialize(SummaryProcessResult result)
{
this.Result = result ?? new SummaryProcessResult();
this._resultStack.Clear();
this._resultStack.Push(Result);
}
void HandleMapError(ItemMap map, Exception ex)
{
Logger.WriteError(string.Format("Error during scaning item map."), ex);
map.ProcessState = ProcessState.Error;
map.Error = ex;
}
FileMap GetExistedAlreadyScannedFileMap(string localId, ItemMap parentMap)
{
return parentMap.Maps.OfType<FileMap>().FirstOrDefault(a =>string.Equals(a.GetLocalId(),localId));
}
FolderMap GetExistedAlreadyScannedFolderMap(string localId, ItemMap parentMap)
{
return parentMap.Maps.OfType<FolderMap>().FirstOrDefault(a => string.Equals(a.GetLocalId(), localId));
}
FolderMap GetExistedAlreadyScannedFolderMapByWsId(int wsId, ItemMap parentMap)
{
return parentMap.Maps.OfType<FolderMap>().FirstOrDefault(a => string.Equals(a.GetWsId(), wsId));
}
FileMap GetExistedAlreadyScannedFileMapByWsId(int wsId, ItemMap parentMap)
{
return parentMap.Maps.OfType<FileMap>().FirstOrDefault(a => string.Equals(a.GetWsId(), wsId));
}
bool IsInUnchangeableState(ItemMap map)
{
return map.ProcessState == ProcessState.Error || map.ProcessState == ProcessState.Processed;
}
public override bool VisitEnter(BaseDMSFile baseDMSFile)
{
Logger.WriteTrace(string.Format("VisitEnter file {0}", baseDMSFile));
var parentresult = _resultStack.Peek();
var localFile = (IDMSFile) baseDMSFile;
var existed = GetExistedAlreadyScannedFileMap(localFile.DMSId.ToString(CultureInfo.InvariantCulture), parentresult);
var alreadyScanned = existed != null;
var fileMap = existed??new FileMap();
fileMap.LocalFile = localFile;
if (!alreadyScanned)
{
parentresult.AddIf(fileMap);
}
_resultStack.Push(fileMap);
try
{
if (Options.Cancellation.IsCancelled) return false;
var options = new DmsFileScanOptions() { UseForceRequest = Options.UseForceRequest };
var res = _dmsScanStrategy.Scan(baseDMSFile, fileMap, options);
if (!IsInUnchangeableState(fileMap))
{
fileMap.ProcessState = res.Result;
}
fileMap.Error = res.Error;
return !options.SkipChildren;
}
catch (Exception ex)
{
HandleMapError(fileMap, ex);
return false;
}
}
public override void Visit(BaseDMSFile dmsfile)
{
Logger.WriteTrace(string.Format("Visit file {0}", dmsfile));
if (Options.Cancellation.IsCancelled) return;
var currentFileMap = (FileMap)_resultStack.Peek();
try
{
var options = new DmsFileScanOptions() { UseForceRequest = Options.UseForceRequest };
var res = _dmsScanStrategy.ScanForNewVersionsOnWs(dmsfile, currentFileMap, options);
if (!IsInUnchangeableState(currentFileMap))
{
currentFileMap.ProcessState = res.Result;
currentFileMap.Error = res.Error;
}
}
catch (Exception ex)
{
HandleMapError(currentFileMap, ex);
}
}
public override void VisitLeave(BaseDMSFile baseDMSFile)
{
Logger.WriteTrace(string.Format("VisitLeave file {0}", baseDMSFile));
_resultStack.Pop();
}
public override bool VisitEnter(BaseDMSFolder baseDMSFolder)
{
Logger.WriteTrace(string.Format("VisitEnter folder {0}", baseDMSFolder));
var dmsFolder = (IDMSFolder) baseDMSFolder;
var parentMap = _resultStack.Peek();
var existed = GetExistedAlreadyScannedFolderMap(dmsFolder.DMSId.ToString(CultureInfo.InvariantCulture), parentMap);
var folderMap = existed ?? new FolderMap();
parentMap.AddIf(folderMap);
_resultStack.Push(folderMap);
folderMap.LocalFolder = dmsFolder;
try
{
if (Options.Cancellation.IsCancelled) return false;
var param = new DmsFolderScanOptions() { UseForceRequest = Options.UseForceRequest };
var res=_dmsScanStrategy.Scan(baseDMSFolder, folderMap, param);
folderMap.ProcessState = res.Result;
folderMap.Error = res.Error;
return !param.SkipChildren;
}
catch (Exception ex)
{
HandleMapError(folderMap, ex);
return true;
}
}
public override void Visit(BaseDMSFolder dmsFolder)
{
Debug.WriteLine(string.Format("Visit folder {0}", dmsFolder));
}
public override void VisitLeave(BaseDMSFolder baseDMSFolder)
{
Debug.WriteLine(string.Format("VisitEnter _folder {0}", baseDMSFolder));
_resultStack.Pop();
}
public override void Visit(BaseDmsVersion baseDmsVersion)
{
Debug.WriteLine(string.Format("Visit version {0}", baseDmsVersion));
var parentMap = (FileMap) _resultStack.Peek();
if (Options.Cancellation.IsCancelled) return;
var versionMap = new VersionMap();
parentMap.AddIf(versionMap);
try
{
versionMap.LocalVersion = (IDmsVersion) baseDmsVersion;
var options = new DmsVersionScanOptions() { UseForceRequest = Options.UseForceRequest };
var res = _dmsScanStrategy.Scan(baseDmsVersion, versionMap, options);
versionMap.ProcessState = res.Result;
versionMap.Error = res.Error;
}
catch (Exception ex)
{
HandleMapError(versionMap, ex);
}
}
public override void GetNewEntries(out List<WsFolder> wsfolders, out List<WsFile> wsfiles)
{
var currentMap = (FolderMap)_resultStack.Peek();
_dmsScanStrategy.GetWsItemsToVisit(currentMap, out wsfolders, out wsfiles, Options.UseForceRequest);
}
public override bool VisitEnter(WsFolder wsFolder, out List<WsFolder> wsFolders, out List<WsFile> wsFiles)
{
wsFolders=new List<WsFolder>();
wsFiles=new List<WsFile>();
var parentMap = _resultStack.Peek();
var existed = GetExistedAlreadyScannedFolderMapByWsId(wsFolder.Id, parentMap);
if (existed == null)
{
existed = new FolderMap {WsFolder = wsFolder};
parentMap.AddIf(existed);
}
_resultStack.Push(existed);
try
{
wsFolders = existed.WsFolder.SubFolders.ToList();
wsFiles = existed.WsFolder.Files.ToList();
return true;
}
catch (Exception ex)
{
HandleMapError(existed, ex);
return false;
}
}
public override void VisitLeave(WsFolder wsFolder)
{
_resultStack.Pop();
}
public override void Visit(WsFolder wsFolder)
{
}
public override void Visit(WsFile file)
{
var parentMap = _resultStack.Peek();
var existed = GetExistedAlreadyScannedFileMapByWsId(file.Id, parentMap);
if (existed != null) return;
var map = new FileMap {WsFile = file};
try
{
parentMap.AddIf(map);
var wsVersions = map.WsFile.Versions.ToList();
foreach (var ver in wsVersions)
{
var verMap = new VersionMap {WsVersion = ver};
map.AddIf(verMap);
}
}
catch (Exception ex)
{
HandleMapError(map, ex);
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/BaseDMSFolder.cs
namespace Workshare.Integration.Processor.DmsItems
{
public abstract class BaseDMSFolder : BaseDMSItem
{
//TODO remove this class
}
public class SingleChildDMSFolder : BaseDMSFolder
{
internal readonly BaseDMSFolder _folder;
public SingleChildDMSFolder Next { get; set; }
public BaseDMSFile File { get; set; }
public SingleChildDMSFolder()
{
}
public SingleChildDMSFolder(BaseDMSFolder folder)
{
_folder = folder;
}
}
}
<file_sep>/iManageIntegration/Src/Workshate.HookEvents/ProcessIManageEvents.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using System.Threading;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.Worksite.iManExt2;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.Interfaces;
using Workshare.Components.WSLogger;
using Workshare.IManage;
namespace Workshare.HookEvents
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("125A2A8C-375E-4B95-ADC4-7EF1EAF8C247")]
[ComVisible(true)]
public class ProcessIManageEvents
{
public static readonly string GETSELECTEDITEMS = "SelectedIManObjects";
public static readonly string GETDESTINATIONOBJECT = "DestinationObject";
public static readonly string GETNRTDOCUMENTS = "SelectedNRTDocuments";
private const int MAXWAITTIME = 5000;
Thread _newFileChecker;
Thread _waitForFileTask;
readonly object _lockObject = new object();
private static bool _inProgress = false;
private static Queue<HookWorkUnit> _hooks= new Queue<HookWorkUnit>();
private readonly System.Web.Script.Serialization.JavaScriptSerializer jss = new System.Web.Script.Serialization.JavaScriptSerializer();
private readonly string tmpFilePart = Path.GetTempPath() + "WsCheckoutFiles.tmp";
private static bool _firstLaunch = true;
private static List<CheckoutFile> _CheckoutFiles;
public ProcessIManageEvents()
{
if (_firstLaunch && (IsRunDescSite && !HasRunnedFileSite || IsRunFileSite && !HasRunnedDescSite))
{
_firstLaunch = false;
File.WriteAllText(tmpFilePart, "[]");
}
if (_CheckoutFiles == null)
{
if (File.Exists(tmpFilePart)) _CheckoutFiles = jss.Deserialize<List<CheckoutFile>>(System.IO.File.ReadAllText(tmpFilePart));
if (_CheckoutFiles == null) _CheckoutFiles = new List<CheckoutFile>();
}
}
bool _continueWatching;
bool ContinueWatching
{
get
{
lock (_lockObject)
{
return _continueWatching;
}
}
set
{
lock (_lockObject)
{
_continueWatching = value;
}
}
}
private static void ExecutePendingWorkUnits()
{
_inProgress = false;
lock (_hooks)
{
if (_hooks.Any())
{
var hookUnit = _hooks.Dequeue();
hookUnit.Execute();
}
}
}
public class HookWorkUnit
{
public void Execute()
{
_inProgress = true;
try
{
m_action();
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
}
public HookWorkUnit(Action action)
{
m_action = action;
}
protected Action m_action;
}
public static bool IsRunFileSite
{
get
{
return Process.GetCurrentProcess().MainModule.ModuleName.ToLower().Contains("outlook");
}
}
public static bool IsRunDescSite
{
get
{
return Process.GetCurrentProcess().MainModule.ModuleName.ToLower().Contains("manage");
}
}
public static bool HasRunnedFileSite
{
get
{
foreach (var p in Process.GetProcesses())
{
try { if (p.MainModule.ModuleName.ToLower().Contains("outlook")) return true; }
catch { }
}
return false;
}
}
public static bool HasRunnedDescSite
{
get
{
foreach (var p in Process.GetProcesses())
{
try { if (p.MainModule.ModuleName.ToLower().Contains("manage")) return true; }
catch { }
}
return false;
}
}
private static int _lastImportedDocNumber = -1;
public static int LastImportedDocNumber
{
get { return _lastImportedDocNumber; }
set { _lastImportedDocNumber = value; }
}
public static List<object> GetImportedDocuments(ContextItems context)
{
try
{
var docs = new List<object>() { context.Item("ImportedDocument") };
if (docs != null && docs.Count == 1)
{
if (((IManDocument)docs[0]).Number != LastImportedDocNumber)
{
return docs;
}
}
}
catch (ArgumentException ex)
{
Logger.Write("ImportedDocument list is empty", Severity.Information);
}
return new List<object>();
}
private static string GetServerIp(string hostname)
{
IPHostEntry entry = Dns.GetHostEntry(hostname);
return entry.AddressList.FirstOrDefault().ToString().Replace(".", "_");
}
private static string SetServerKey(IManDocument _file)
{
try
{
if (_file != null)
{
var session = _file.Database.Session;
var nrtdms = session.DMS.ObjectID;
IPAddress server_ip;
var ipAddres = "!server:";
if (IPAddress.TryParse(session.ServerName, out server_ip))
{
ipAddres += session.ServerName.Replace(".", "_");
}
else
{
ipAddres += GetServerIp(session.ServerName);
}
return nrtdms + ipAddres;
}
return "";
}
catch
{
return "";
}
}
public static void ProcessEvent(IEnumerable items, EventOperation operation, string command)
{
Logger.Write("Start ProcessEvent", Severity.Information);
if (items != null)
{
List<object> docs = items.Cast<object>().ToList();
if (docs.Count>0 && string.IsNullOrEmpty(Workshare.IManage.Application.iManInstance.ServerKey))
{
Workshare.IManage.Application.iManInstance.ServerKey = SetServerKey(docs[0] as IManDocument);
}
if (docs.Count == 1)
{
if (operation == EventOperation.AddNewDoc)
{
LastImportedDocNumber = ((IManDocument)docs[0]).Number;
}
}
foreach (var itm in items)
{
if (itm is IManDocument && (operation != EventOperation.MoveFile || operation == EventOperation.MoveFile && !(itm as IManDocument).CheckedOut))
{
string str = (itm as IManDocument).Number.ToString();
Logger.Write("ProcessEvent for doc with ID=" + str, Severity.Information);
Workshare.IManage.Application.iManInstance.View.ShowFileSyncDialog(itm, operation, command);
}
}
}
}
public static void ProcessEvent(ContextItems items, EventOperation operation, string command)
{
Logger.Write("------------------- ProcessEvent Start -------------------", Severity.Information);
Logger.WriteTrace("EventOperation : " + operation.ToString());
Logger.WriteTrace("Command : " + command);
Logger.WriteTrace("");
var docs = items.Item(ProcessIManageEvents.GETSELECTEDITEMS) as IEnumerable;
Logger.Write("------------------- ProcessEvent Stop -------------------", Severity.Information);
ProcessEvent(docs, operation, command);
}
public void HookFileSaveCmd(object cmd)
{
Logger.WriteTrace(string.Format("Raised save file iManage event"));
if (HooksDisabler.DisableHooks)
{
Logger.WriteTrace(string.Format("Hooks are disabled"));
return;
}
var view = Application.iManInstance.View;
ContinueWatching = false;
if (!IsRunFileSite && !IsRunDescSite)
{
Logger.WriteTrace(string.Format("Raised save file iManage event"));
ContinueWatching = false;
if (!IsRunFileSite && !IsRunDescSite)
{
var icmd = cmd as IManFileSaveDlg;
// ??? SetServerKey(icmd);
if (icmd != null && icmd.NewDocument != null)
{
var doc = icmd.NewDocument as IManDocument;
if (doc != null)
{
var path = doc.CheckoutPath;
var unit = new HookWorkUnit(() =>
{
if (!File.Exists(path))
{
_waitForFileTask = new Thread(() =>
{
try
{
Logger.WriteTrace(
string.Format(
"Document {0} was not found in check out path {1}. Start waiting thread.",
doc.Name, path));
var passed = 0;
while (!File.Exists(path) && passed < MAXWAITTIME)
{
passed += 100;
Thread.Sleep(100);
}
Logger.WriteTrace(
string.Format(
"Document {0} was found in check out path {1} after {2} seconds of waiting",
doc.Name, path, passed == 0 ? 0 : passed/100));
ProcessEvent(new List<IManDocument> {icmd.NewDocument as IManDocument},
EventOperation.AddNewDoc, "HookFileSaveCmd -> OfficeFileSaveCmd 1");
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
ExecutePendingWorkUnits();
var newFile = new CheckoutFile();
newFile.Path = doc.CheckoutPath;
newFile.Number = doc.Number;
newFile.Version = doc.Version;
newFile.lastAction = CheckoutFile.LastAction.Default;
newFile.EditDateTicks = new FileInfo(doc.CheckoutPath).LastWriteTime.Ticks;
_CheckoutFiles.Add(newFile);
});
_waitForFileTask.TrySetApartmentState(ApartmentState.STA);
// as "ProcessEvent" will create the application entry point from the background thread
// we should create Application before in order to have the dispatcher acossiated with the main thread.
_waitForFileTask.Start();
}
else
{
ProcessEvent(new List<IManDocument> {icmd.NewDocument as IManDocument},EventOperation.AddNewDoc, "HookFileSaveCmd -> OfficeFileSaveCmd 2");
ExecutePendingWorkUnits();
}
});
if (!_inProgress)
{
unit.Execute();
}
else
{
_hooks.Enqueue(unit);
}
}
}
}
}
}
private CheckoutFile GetCheckoutFile(int number)
{
return _CheckoutFiles.FirstOrDefault(f => f.Number == number);
}
public void HookCheckoutCmd(object cmd, object dlg)
{
if (!IsRunDescSite && !IsRunFileSite) return;
var chin_cmd = cmd as CheckinCmd;
var docs = chin_cmd.Context.Item(GETNRTDOCUMENTS) as IEnumerable;
foreach (IManDocument doc in docs)
{
var cFile = GetCheckoutFile(doc.Number);
if (cFile == null)
{
cFile = new CheckoutFile();
cFile.Number = doc.Number;
_CheckoutFiles.Add(cFile);
}
cFile.Path = doc.CheckoutPath;
cFile.Version = doc.Version;
cFile.EditDateTicks = new FileInfo(doc.CheckoutPath).LastWriteTime.Ticks;
}
File.WriteAllText(tmpFilePart, jss.Serialize(_CheckoutFiles));
}
public void HookCheckinCmd_PostOnOK(object cmd)
{
Logger.WriteTrace(string.Format("HookCheckinCmd_PostOnOK"));
if (HooksDisabler.DisableHooks)
{
Logger.WriteTrace(string.Format("Hooks are disabled"));
return;
}
var chin_cmd = cmd as CheckinCmd;
if (chin_cmd != null)
{
var docs = chin_cmd.Context.Item(GETNRTDOCUMENTS) as IEnumerable;
if (!IsRunFileSite && !IsRunDescSite)
{
try
{
var _docs = new List<IManDocument>();
foreach (var doc in docs)
{
var file = GetCheckoutFile((doc as IManDocument).Number);
if (file == null) continue;
if (file.ToUpload)
{
_docs.Add(doc as IManDocument);
file.ToUpload = false;
}
else
{
((Workshare.IManage.Presenter.ModulePresenter)Application.iManInstance.Presenter).UpdateSyncInfo(doc);
}
if (file.ToRemove)
{
_CheckoutFiles.Remove(file);
File.WriteAllText(tmpFilePart, jss.Serialize(_CheckoutFiles));
}
else
{
if (file.lastAction != CheckoutFile.LastAction.Default)
{
file.EditDateTicks = new FileInfo(file.Path).LastWriteTime.Ticks;
file.lastAction = CheckoutFile.LastAction.Default;
}
}
}
if (_docs.Count > 0)
{
var unit = new HookWorkUnit(() =>
{
ProcessEvent(docs, EventOperation.UpdateCurrent, "HookCheckInCmd -> WordCheckinCmd");
ExecutePendingWorkUnits();
});
if (!_inProgress)
{
unit.Execute();
}
else
{
_hooks.Enqueue(unit);
}
}
}
catch { }
}
else
{
//when file was opened in iManage and changed
object calledFromIntegration = ProcessIManageEvents.GetCmdContextItemValueByName(chin_cmd.Context, "IManExt.CalledFromIntegration");
if (calledFromIntegration != null && Convert.ToBoolean(calledFromIntegration) == true && docs.OfType<object>().Count() == 1)
{
var doc = docs.OfType<IManDocument>().FirstOrDefault();
if (doc != null)
{
var file = GetCheckoutFile(doc.Number);
if (file.ToUpload)
{
ProcessEvent(new List<IManDocument>() { doc }, EventOperation.AddNewVersion, "HookCheckInCmd -> Update File Which Has Changed After Its Openning");
}
}
}
else
{
//-----------
var updateversion = (bool)chin_cmd.Context.Item("IManExt.CheckinAsNewDocOrNewVer");
if (!updateversion)
{
//user just update current version of the document
ProcessEvent(docs, ((updateversion) ? EventOperation.AddNewVersion : EventOperation.UpdateCurrent), "HookCheckInCmd -> Replace Original");
}
else
{
// try to get newly added document. if we can get this - then user select new document option
var importedDocs = GetImportedDocuments(chin_cmd.Context);
if (importedDocs.Count != 0)
{
// Hooks: 21543 - FileSite - "New docunemt"
ProcessEvent(importedDocs, EventOperation.AddNewDoc, "HookCheckInCmd -> New Document");
}
else
{
// new document wasn't created and user select new version opton
ProcessEvent(docs, ((updateversion) ? EventOperation.AddNewVersion : EventOperation.UpdateCurrent), "HookCheckInCmd -> New Version");
}
}
//-------------------
}
}
}
}
public void HookFileSaveCmd_PostOnOK(object dlg)
{
if (!IsRunFileSite && !IsRunDescSite)
{
var save_dlg = dlg as IIManFileSaveDlg;
var doc = save_dlg.NewDocument as IManDocument;
var file = GetCheckoutFile(doc.Number);
if (file != null && file.lastAction != CheckoutFile.LastAction.Default)
{
file.lastAction = CheckoutFile.LastAction.Default;
file.EditDateTicks = new FileInfo(file.Path).LastWriteTime.Ticks;
}
}
}
public void HookCheckinCmd(object cmd, object dlg)
{
Logger.WriteTrace(string.Format("HookCheckInCmd"));
if (HooksDisabler.DisableHooks)
{
Logger.WriteTrace(string.Format("Hooks are disabled"));
return;
}
var view = Application.iManInstance.View;
ContinueWatching = false;
var chin_cmd = cmd as CheckinCmd;
if (chin_cmd != null)
{
//chin_cmd.Context.OfType<object>().ToList().ForEach(p => Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
var docs = chin_cmd.Context.Item(GETNRTDOCUMENTS) as IEnumerable;
if (!IsRunFileSite && !IsRunDescSite)
{
var chin_dlg = dlg as ICheckinDlg;
try
{
foreach (IManDocument doc in docs)
{
var file = GetCheckoutFile(doc.Number);
Logger.Write("file is" + (file == null ? "null" : file.ToString()), Severity.Information);
if (file != null && doc != null)
{
if (chin_dlg != null && chin_dlg.Database != null && chin_dlg.CheckinOption == CheckinDisposition.nrReplaceOriginal)
{
Logger.Write("file replace original", Severity.Information);
file.lastAction = CheckoutFile.LastAction.ReplaceOriginal;
file.ToUpload = true;
}
else if (file != null && file.Version == doc.Version && file.lastAction == CheckoutFile.LastAction.Default && new FileInfo(file.Path).LastWriteTime.Ticks != file.EditDateTicks)
{
Logger.Write("ticks are not equal", Severity.Information);
file.ToUpload = true;
file.ToRemove = true;
}
}
}
}
catch (Exception ex)
{
Logger.WriteError("error happen... ", ex);
}
Logger.Write("checkin cmd finish", Severity.Information);
}
else
{
//when file was opened in iManage and changed
object calledFromIntegration = ProcessIManageEvents.GetCmdContextItemValueByName(chin_cmd.Context, "IManExt.CalledFromIntegration");
if (calledFromIntegration != null && Convert.ToBoolean(calledFromIntegration) == true && docs.OfType<object>().Count() == 1)
{
var doc = docs.OfType<IManDocument>().FirstOrDefault();
if (doc != null)
{
var file = GetCheckoutFile(doc.Number);
if (new FileInfo(file.Path).LastWriteTime.Ticks != file.EditDateTicks)
{
file.ToUpload = true;
}
else
{
file.ToUpload = false;
}
}
}
}
}
}
public void HookNewVersionCmd(object cmd, object dlg)
{
Logger.WriteTrace(string.Format("HookCheckInCmd"));
if (HooksDisabler.DisableHooks)
{
Logger.WriteTrace(string.Format("Hooks are disabled"));
return;
}
var view = Application.iManInstance.View;
ContinueWatching = false;
var ver_cmd = cmd as NewVersionCmd;
var ver_dlg = dlg as INewVersionDlg;
if (ver_cmd != null && ver_dlg != null && ver_dlg.NRTDocument != null)
{
try
{
var curDoc = ver_dlg.NRTDocument as IManDocument;
if (!IsRunFileSite && !IsRunDescSite)
{
//chin_cmd.Context.OfType<object>().ToList().ForEach(p => Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
var lastDoc = curDoc.LatestVersion;
var file = GetCheckoutFile(curDoc.Number);
if (file != null)
{
file.Path = lastDoc.CheckoutPath;
file.Version = lastDoc.Version;
file.lastAction = CheckoutFile.LastAction.NewVersion;
file.ToUpload = true;
}
}
else
{
object NoCmdUI = ProcessIManageEvents.GetCmdContextItemValueByName(ver_cmd.Context, "IManExt.NewVersionCmd.NoCmdUI");
if (NoCmdUI == null)//'New Version Profile' dialog was called from "Copy"->"Copy As New Version" context's file menu
{
ProcessEvent(new List<IManDocument>() { curDoc }, EventOperation.AddNewVersion, "HookNewVersionCmd -> Copy As New Version");
}
}
}
catch (Exception)
{
}
}
}
private void FolderChecker(IManFolder folder, int fileMaxID)
{
int waittime = 0;
Logger.Write("Start FolderChecker", Severity.Information);
while (ContinueWatching && waittime <= MAXWAITTIME)
{
Thread.Sleep(500);
if (ContinueWatching)
{
waittime += 500;
var docs = new List<IManDocument>();
Logger.Write("Scan folder contents", Severity.Information);
foreach (object content in folder.Contents)
{
if (content is IManDocument)
{
docs.Add((IManDocument) content);
}
}
int cmax = docs.Max(x => x.Number);
Logger.Write("find max doc id = "+cmax.ToString(), Severity.Information);
if (cmax >= fileMaxID)
{
Logger.Write("Get appropriate doc", Severity.Information);
IManDocument doc = docs.Where(x => x.Number == cmax).FirstOrDefault();
if (doc != null)
{
ContinueWatching = false;
// as "ProcessEvent" will create the application entry point from the background thread
// we should create Application before in order to have the dispatcher acossiated with the main thread.
ProcessEvent(new List<IManDocument>() {doc}, EventOperation.AddNewDoc, "FolderChecker -> OutlookAddDoc");
}
}
}
}
ExecutePendingWorkUnits();
}
public void HookNewProfileCmd_PostOnOK(object cmd)
{
Logger.WriteTrace(string.Format("HookNewProfileDlg"));
if (HooksDisabler.DisableHooks)
{
Logger.WriteTrace(string.Format("Hooks are disabled"));
return;
}
var profCmd = cmd as NewProfileCmd;
if (profCmd != null)
{
object callByFileSaveAs = ProcessIManageEvents.GetCmdContextItemValueByName(profCmd.Context, "CallByFileSaveAs");
if (IsRunFileSite && (callByFileSaveAs == null || Convert.ToBoolean(callByFileSaveAs) == false))
{
try
{
var fld = ProcessIManageEvents.GetCmdContextItemValueByName(profCmd.Context, "DestinationObject") as IManFolder;
List<IManDocument> docs = new List<IManDocument>();
foreach (object content in fld.Contents)
{
if (content is IManDocument)
{
docs.Add((IManDocument)content);
}
}
ContinueWatching = true;
//don'use new Thread - in case of Outlook when we drag&drop files the login dialog is displayed only once when we just closed it last time without logging
//_newFileChecker = new Thread(new ThreadStart(() =>
//{
try
{
int maxid = docs.Count == 0 ? -1 : docs.Max(x => x.Number);
Logger.WriteWarning(maxid.ToString());
var unit = new HookWorkUnit(() => FolderChecker(fld, maxid));
if (!_inProgress)
{
unit.Execute();
}
else
{
_hooks.Enqueue(unit);
}
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
//}));
//_newFileChecker.TrySetApartmentState(ApartmentState.STA);
//_newFileChecker.Start();
//System.Windows.Forms.MessageBox.Show(docs.Max(x=>x.Number).ToString());
}
catch (Exception ex)
{
}
}
}
}
public static object GetCmdContextItemValueByName(ContextItems context, string itemName)
{
object ret = null;
if (context != null)
{
var cmdContextEnum = context.GetEnumerator();
while (cmdContextEnum.MoveNext())
{
var currItem = cmdContextEnum.Current as IContextItem;
if (currItem.Name.Equals(itemName, StringComparison.OrdinalIgnoreCase))
return currItem.Value;
}
}
else
{
Logger.WriteTrace(string.Format("GetNewProfileCmdContextItemValueByName: Trying get value from empty context. ItemName: {0}", itemName));
}
return ret;
}
public class CheckoutFile
{
public enum LastAction { Default, ReplaceOriginal, NewVersion }
public string Path;
public long EditDateTicks;
public int Number;
public int Version;
public LastAction lastAction;
public bool ToUpload;
public bool ToRemove;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/TreePathFinder.cs
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.Processor.DmsItems
{
public class TreePathFinder
{
public TreePathFinder()
{
}
public void FindPath(BaseDMSFolder folder, SingleChildDMSFolder currentFolder)
{
var parent = (BaseDMSFolder)(((IDMSItem)folder).ParentFolder);
if (parent == null )
{
return;
}
currentFolder.Next = new SingleChildDMSFolder(parent);
FindPath(parent, currentFolder.Next);
}
public SingleChildDMSFolder Reverse(SingleChildDMSFolder item)
{
if (item == null || item.Next == null)
{
return item;
}
var firstItem = Reverse(item.Next);
var currentItem = item.Next;
item.Next = null;
currentItem.Next = item;
return firstItem;
}
}
}
<file_sep>/WSComponents/src/WSCloudService/JsonParser.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public static class JsonParser
{
#region Private Methods
private static FolderDetails ExtractFolder(string folder)
{
var found = new FolderDetails();
found.Ancestry = ExtractIntValue(folder, "ancestry");
found.Name = ExtractStringValue(folder, "name");
found.Id = ExtractIntValue(folder, "id");
found.Url = ExtractStringValue(folder, "url");
found.Description = ExtractStringValue(folder, "description");
found.ParentId = ExtractIntValue(folder, "parent_id");
found.FileCounter = ExtractIntValue(folder, "file_counter");
found.MemberCount = ExtractIntValue(folder, "members_counter");
found.NotifyOwner = bool.Parse(ExtractStringValue(folder, "notify_owner"));
var isdeleted = false;
if (bool.TryParse(ExtractStringValue(folder, "is_deleted"), out isdeleted))
{
found.IsDeleted = isdeleted;
}
string DateExpire = ExtractStringValue(folder, "expires_at");
if (DateExpire == "null")
{
found.ExpiresAt = null;
}
else
{
found.ExpiresAt = DateTime.Parse(DateExpire);
}
found.Permissions = ExtractPermissions(folder, found.Id);
found.Owner = ExtractUserData(ExtractNode(folder, "owner"));
found.Creator = ExtractUserData(ExtractNode(folder, "creator"));
found.ChildFoldersCounter = ExtractIntValue(folder, "folder_counter");
return found;
}
private static UserDetails ExtractUserData(string node)
{
UserDetails res = new UserDetails();
res.Email = ExtractStringValue(node, "email");
res.UserId = ExtractStringValue(node, "uuid");
res.UserName = ExtractStringValue(node, "name");
return res;
}
public static UserDetails ParseUserInfo(string userinfo)
{
UserDetails res = new UserDetails();
res.UserName = ExtractStringValue(userinfo, "name");
return res;
}
private static Permissions ExtractPermissions(string permissions, int folder_id)
{
var res = new Permissions();
res.FolderId = folder_id;
res.PermissionSet = new Dictionary<string, bool>();
res.PermissionSet.Add("can_download_original", ExtractStringValue(permissions, "can_download_original") == "true");
res.PermissionSet.Add("can_upload_changes", ExtractStringValue(permissions, "can_upload_changes") == "true");
res.PermissionSet.Add("can_view_tracking", ExtractStringValue(permissions, "can_view_tracking") == "true");
res.PermissionSet.Add("can_manipulate", ExtractStringValue(permissions, "can_manipulate") == "true");
res.PermissionSet.Add("can_comment", ExtractStringValue(permissions, "can_comment") == "true");
res.PermissionSet.Add("can_invite_with_link", ExtractStringValue(permissions, "can_invite_with_link") == "true");
res.PermissionSet.Add("can_access_anonymously", ExtractStringValue(permissions, "can_access_anonymously") == "true");
res.PermissionSet.Add("can_download_pdf", ExtractStringValue(permissions, "can_download_pdf") == "true");
res.PermissionSet.Add("can_delete", ExtractStringValue(permissions, "can_delete") == "true");
res.PermissionSet.Add("inherits_from_parent", ExtractStringValue(permissions, "inherits_from_parent") == "true");
return res;
}
private static string ExtractStringValue(string node, string name)
{
var startPos = node.IndexOf(string.Format("\"{0}\":", name)) + name.Length + 3;
bool isInQuotes = false;
var found = string.Empty;
for (int i = startPos; i < node.Length; i++)
{
if (node[i] == '\"'
&& isInQuotes == false)
{
isInQuotes = true;
continue;
}
else if (node[i] == '\"'
&& isInQuotes && node[i - 1] != '\\')
{
isInQuotes = false;
break;
}
if (node[i] == '}' &&
isInQuotes == false)
{
break;
}
if (node[i] == ',' &&
isInQuotes == false)
{
break;
}
found += node[i];
}
return found.Replace("\\\"", "\"");
}
private static string ExtractAuthorizationValue(string node, string name)
{
var startPos = node.IndexOf(string.Format("\\\"{0}\\\":", name)) + name.Length + 5;
bool isInQuotes = false;
var found = string.Empty;
for (int i = startPos; i < node.Length; i++)
{
if (node[i] == '\\'
&& isInQuotes == false)
{
isInQuotes = false;
continue;
}
else if (node[i] == '\\'
&& isInQuotes)
{
isInQuotes = false;
break;
}
if (node[i] == '\"'
&& isInQuotes == false)
{
isInQuotes = true;
continue;
}
else if (node[i] == '\"'
&& isInQuotes)
{
isInQuotes = false;
break;
}
if (node[i] == '}' &&
isInQuotes == false)
{
break;
}
if (node[i] == ',' &&
isInQuotes == false)
{
break;
}
found += node[i];
}
return found;
}
private static int ExtractIntValue(string node, string name)
{
var temp = ExtractStringValue(node, name);
int id;
if (int.TryParse(temp, out id))
{
return id;
}
return 0;
}
private static long ExtractLongValue(string node, string name)
{
var temp = ExtractStringValue(node, name);
long id;
if (long.TryParse(temp, out id))
{
return id;
}
return 0;
}
private static string ExtractNode(string node, string name)
{
var startPos = node.IndexOf(string.Format("\"{0}\":{{", name)) + name.Length + 5;
var endPos = node.IndexOf("},", startPos);
if (endPos < 0)
{
endPos = node.Length;
}
var subNode = node.Substring(startPos, endPos - startPos);
return subNode;
}
private static string ExtractNodeWithSubNodes(string node, string name)
{
var startPos = node.IndexOf(string.Format("\"{0}\":{{", name)) + name.Length + 5;
var endPos = node.IndexOf("}},", startPos);
if (endPos < 0)
{
endPos = node.Length;
return node.Substring(startPos, endPos - startPos);
}
endPos++;
var subNode = node.Substring(startPos, endPos - startPos);
return subNode + ",";
}
private static List<string> ExtractArrayNode(string node, string name)
{
var start = node.IndexOf(string.Format("\"{0}\":[{{", name));
if (start > -1)
{
var startPos = start + name.Length + 5;
var endPos = node.LastIndexOf("}]}");
if (endPos < 0)
{
endPos = node.Length;
}
var subNode = node.Substring(startPos, endPos - startPos);
var elements = subNode.Split(new[] { "},{" }, StringSplitOptions.RemoveEmptyEntries);
return elements.ToList();
}
else
{
return new List<string>();
}
}
private static FileComment ExtractFileComment(string result)
{
var found = new FileComment();
found.Id = ExtractIntValue(result, "id");
found.CreatedAt = ExtractDate(result, "created_at");
found.UserName = ExtractStringValue(result, "name");
found.FileId = ExtractIntValue(result, "file_id");
found.PageNumber = ExtractIntValue(result, "page_number");
found.FileVersion = ExtractIntValue(result, "file_version");
found.Body = ExtractStringValue(result, "body");
found.ReplyToId = ExtractIntValue(result, "reply_to_id");
found.Positional = ParsePositional(result);
return found;
}
internal static Positional ParsePositional(string data)
{
Positional positional;
positional.x = ExtractIntValue(data, "x_position");
positional.y = ExtractIntValue(data, "y_position");
positional.width = ExtractIntValue(data, "width");
positional.height = ExtractIntValue(data, "height");
return positional;
}
private static FileVersionDetails ExtractVersion(string result, bool isChunked)
{
var found = new FileVersionDetails();
found.Name = ExtractStringValue(result, "file_name");
found.Size = ExtractLongValue(result, "file_size");
found.Id = ExtractIntValue(result, "id");
found.FileId = ExtractIntValue(result, "file_id");
found.Version = ExtractIntValue(result, "version");
found.ImageUrl = ExtractStringValue(result, "page1_full");
found.ThumbUrl = ExtractStringValue(result, "page1_thumb");
found.Creator = ExtractUserData(ExtractNode(result, "creator"));
DateTime created_at;
DateTime.TryParse(ExtractStringValue(result, "created_at"), out created_at);
found.CreateDate = created_at;
if (isChunked == false)
{
var node = ExtractNode(result, "upload");
found.AclType = ExtractStringValue(node, "acl");
found.Action = ExtractStringValue(node, "action");
found.AwsAccessKey = ExtractStringValue(node, "AWSAccessKeyId");
found.Key = ExtractStringValue(node, "key");
found.Policy = ExtractStringValue(node, "policy");
found.Signature = ExtractStringValue(node, "signature");
found.SuccessRedirect = ExtractStringValue(node, "success_action_redirect");
found.ContentType = ExtractStringValue(node, "Content-Type");
found.Cache = ExtractStringValue(node, "Cache-Control");
found.Encryption = ExtractStringValue(node, "x-amz-server-side-encryption");
}
else
{
var node = ExtractNode(result, "complete_file");
found.CompleteId = ExtractIntValue(node, "id");
}
found.ApiVersion = ApiVersion.One;
return found;
}
private static FileVersionDetails ExtractVersion3(string result, bool isChunked)
{
var found = new FileVersionDetails();
found.Name = ExtractStringValue(result, "file_name");
found.Id = ExtractIntValue(result, "id");
found.FileId = ExtractIntValue(result, "file_id");
found.Version = ExtractIntValue(result, "version");
found.ImageUrl = ExtractStringValue(result, "page1_full");
found.ThumbUrl = ExtractStringValue(result, "page1_thumb");
found.Creator = ExtractUserData(ExtractNode(result, "creator"));
DateTime created_at;
DateTime.TryParse(ExtractStringValue(result, "created_at"), out created_at);
found.CreateDate = created_at;
if (isChunked == false)
{
var node = ExtractNode(result, "complete_file");
var uploadNode = ExtractNode(node, "upload");
found.Action = ExtractStringValue(uploadNode, "action");
found.HttpVerb = ExtractStringValue(uploadNode, "http_verb");
found.SuccessRedirect = ExtractStringValue(uploadNode, "success_action_redirect");
var authNode = ExtractNode(uploadNode, "auth_headers");
found.ContentLength = ExtractAuthorizationValue(authNode, "content-length");
found.Authorization = ExtractAuthorizationValue(authNode, "authorization");
found.ContentType = ExtractAuthorizationValue(authNode, "content-type");
found.Encryption = ExtractAuthorizationValue(authNode, "x-amz-server-side-encryption");
found.AuthDate = ExtractAuthorizationValue(authNode, "x-amz-date");
}
else
{
var node = ExtractNode(result, "complete_file");
found.CompleteId = ExtractIntValue(node, "id");
}
found.ApiVersion = ApiVersion.Three;
return found;
}
#endregion
public static UserDetails ParseCurrentUserData(string user)
{
var res = new UserDetails();
res.UserId = ExtractStringValue(user, "uuid");
res.Email = ExtractStringValue(user, "email");
res.UserName = string.Format("{0} {1}", ExtractStringValue(user, "first_name"), ExtractStringValue(user, "last_name"));
res.RootFolderId = ExtractIntValue(user, "root_folder_id");
var nodeAccount = ExtractNode(user, "account");
var domain = ExtractStringValue(nodeAccount, "name");
res.Domain = string.Equals(domain, "null") ? string.Empty : domain;
res.AccountId = ExtractStringValue(nodeAccount, "id");
res.AccountUuId = ExtractStringValue(nodeAccount, "uuid");
return res;
}
public static List<UserDetails> ParseAccountUsersData(string users)
{
var ret = new List<UserDetails>();
var usersNode = ExtractArrayNode(users, "users");
foreach (var user in usersNode)
{
ret.Add(new UserDetails()
{
UserId = ExtractStringValue(user, "uuid"),
Email = ExtractStringValue(user, "email"),
UserName = string.Format("{0} {1}", ExtractStringValue(user, "first_name"), ExtractStringValue(user, "last_name"))
});
}
return ret;
}
public static WSServer ParseWSServerData(string data)
{
WSServer res = new WSServer();
res.ServerUrl = ExtractStringValue(data, "server");
res.QuesryString = ExtractStringValue(data, "query_string");
var nd = ExtractNode(data, "query_params");
res.ctype = ExtractStringValue(nd, "ctype");
res.token = ExtractStringValue(nd, "token");
return res;
}
public static FileDetails ParseFileData(string file, int folderId)
{
var newFile = new FileDetails();
newFile.Id = ExtractIntValue(file, "id");
newFile.FriendlyName = ExtractStringValue(file, "name");
newFile.Name = ExtractStringValue(file, "file_name");
newFile.RemoteUrl = ExtractStringValue(file, "url");
//newFile.FolderId = folderId;
newFile.UpdateDate = ExtractDate(file, "updated_at");
newFile.CreatedAt = ExtractDate(file, "created_at");
newFile.FolderId = ExtractIntValue(file, "folder_id");
newFile.VersionNumber = ExtractIntValue(file, "version");
newFile.DownloadPassword = ExtractStringValue(file, "password");
newFile.Creator = ExtractUserData(ExtractNode(file, "creator"));
newFile.Updater = ExtractUserData(ExtractNode(file, "updater"));
string deleted = ExtractStringValue(file, "is_deactivated");
newFile.IsDeleted = (string.IsNullOrEmpty(deleted) || deleted == "false") ? false : true;
return newFile;
}
private static DateTime ExtractDate(string result, string name)
{
var str = ExtractStringValue(result, name);
DateTime d;
DateTime.TryParse(str, out d);
return d;
}
public static IEnumerable<FileDetails> ParseFilesResult(string result, int folderId)
{
var files = new List<FileDetails>();
var fileNodes = ExtractArrayNode(result, "files");
var pagintation = ExtractNode(result, "pagination");
foreach (var file in fileNodes)
{
files.Add(ParseFileData(file, folderId));
}
return files;
}
public static IEnumerable<UserDetails> ParseMembersResult(string result)
{
var members = new List<UserDetails>();
var ancestors = ExtractArrayNode(result, "members");
foreach (var ancestor in ancestors)
{
members.Add(ExtractUserData(ancestor));
}
return members;
}
public static IEnumerable<FolderDetails> ParseFoldersResult(string result)
{
var found = new List<FolderDetails>();
if (result.Contains("["))
{
var indexof = result.IndexOf("[");
if (indexof + 2 < result.Length)
{
var array = result.Substring(indexof + 2);
if (array.Length > 3)
{
array = array.Remove(array.Length - 3);
var folders = array.Split(new[] { "},{" }, StringSplitOptions.RemoveEmptyEntries);
foreach (var folder in folders)
{
found.Add(ExtractFolder(folder));
}
}
}
}
return found;
}
public static FolderDetails ParseFolderResult(string result)
{
return ExtractFolder(result);
}
public static FolderDetails ParseFolderResult(byte[] result)
{
var returnResponse = Encoding.UTF8.GetString(result);
return ExtractFolder(returnResponse);
}
public static FileVersionDetails ParseVersionResult(byte[] results, bool isChunked)
{
var returnResponse = Encoding.UTF8.GetString(results);
var node = ExtractNode(returnResponse, "upload");
var result = ExtractVersion(returnResponse, isChunked);
if (ExtractStringValue(node, "action") != @"https://skydox-manuel.s3.amazonaws.com/")
{
result.ApiVersion = ApiVersion.Three;
}
return result;
}
public static FileVersionDetails ParseVersionResult3(byte[] results, bool isChunked)
{
var returnResponse = Encoding.UTF8.GetString(results);
var node = ExtractNode(returnResponse, "upload");
return ExtractVersion3(returnResponse, isChunked);
}
public static List<FileVersionDetails> ParseVersionsResult(string result)
{
var versions = new List<FileVersionDetails>();
var versionNodes = ExtractArrayNode(result, "file_versions");
foreach (var node in versionNodes)
{
versions.Add(ExtractVersion(node, true));
}
return versions;
}
public static List<FileComment> ParseFileCommentsResult(string result)
{
var found = new List<FileComment>();
if (result.Contains("["))
{
var indexof = result.IndexOf("[");
if (indexof + 2 < result.Length)
{
var array = result.Substring(indexof + 2);
if (array.Length > 3)
{
array = array.Remove(array.Length - 3);
var comments = array.Split(new[] { "},{" }, StringSplitOptions.RemoveEmptyEntries);
foreach (var comment in comments)
{
found.Add(ExtractFileComment(comment));
}
}
}
}
return found;
}
public static string ParseMultiPartId(byte[] results)
{
var returnResponse = Encoding.UTF8.GetString(results);
var node = ExtractNode(returnResponse, "multipart_upload");
return ExtractStringValue(node, "id");
}
public static MultiPartDetails ParseMultiPart(string results)
{
var multiPart = new MultiPartDetails();
try
{
var node = ExtractNode(results, "multipart_upload");
var subNode = ExtractNode(node, "parameters");
multiPart.Action = ExtractStringValue(subNode, "action");
multiPart.ContentType = ExtractStringValue(subNode, "Content-Type");
multiPart.Authorisation = ExtractStringValue(subNode, "Authorization");
multiPart.AmsDate = ExtractStringValue(subNode, "x-amz-date");
}
catch (Exception)
{
}
return multiPart;
}
internal static DialogSettings ParseDialogSettings(string data)
{
var settings = new DialogSettings();
settings.dialog_create_folder_height = ExtractIntValue(data, "dialog_create_folder_height");
settings.dialog_create_folder_width = ExtractIntValue(data, "dialog_create_folder_width");
settings.dialog_login_height = ExtractIntValue(data, "dialog_login_height");
settings.dialog_login_width = ExtractIntValue(data, "dialog_login_width");
settings.dialog_select_folder_height = ExtractIntValue(data, "dialog_select_folder_height");
settings.dialog_select_folder_width = ExtractIntValue(data, "dialog_select_folder_width");
return settings;
}
internal static List<Activity> ParseActivities(string activities, int folderid, int fileid, int fileversion = 0)
{
var activitesNodes = ExtractArrayNode(activities, "activities");
var activityList = new List<Activity>();
if (activitesNodes == null)
return null;
foreach (var activity in activitesNodes)
{
if ((fileid == ExtractIntValue(activity, "file_id") && fileversion == ExtractIntValue(activity, "file_version")) && folderid == 0)
activityList.Add(ParseActivity(activity));
else if((fileid == ExtractIntValue(activity, "file_id") && fileversion == 0) && folderid == 0) // get all versions
activityList.Add(ParseActivity(activity));
else if(folderid != 0)
activityList.Add(ParseActivity(activity));
}
return activityList;
}
internal static Activity ParseActivity(string data)
{
var activity = new Activity();
activity.Noun = ExtractStringValue(data, "noun");
activity.Verb = ExtractStringValue(data, "verb");
activity.TimeCreated = ExtractStringValue(data, "created_at");
activity.CurrentVersion = ExtractIntValue(data, "file_version");
activity.ActyvityId = ExtractStringValue(data, "id");
var member = ExtractNode(data, "member");
if (!string.IsNullOrEmpty(member))
activity.MemberId = ExtractIntValue(member, "id");
var nodeCreator = ExtractNode(data, "creator");
if(!string.IsNullOrEmpty( nodeCreator))
activity.UserName = ExtractStringValue(nodeCreator, "name");
var nodeChanges = ExtractNodeWithSubNodes(data, "changes");
if (!string.IsNullOrEmpty(nodeChanges))
{
var nodeName = ExtractNode(nodeChanges, "name");
if (!string.IsNullOrEmpty(nodeName) && nodeChanges.Contains("name"))
{
activity.ChangesNameOld = ExtractStringValue(nodeName, "old");
activity.ChangesNameNew = ExtractStringValue(nodeName, "new");
}
var nodeVersion = ExtractNode(nodeChanges, "version");
if (!string.IsNullOrEmpty(nodeVersion) && nodeChanges.Contains("version"))
{
activity.ChangesVersionOld = ExtractIntValue(nodeVersion, "old");
activity.ChangesVersionNew = ExtractIntValue(nodeVersion, "new");
}
}
var nodeFolder = ExtractNode(data, "folder");
if (!string.IsNullOrEmpty(nodeFolder))
activity.FolderName = ExtractStringValue(nodeFolder, "name");
return activity;
}
internal static Dictionary<string, int> Parsepagination(string data)
{
var pagination = new Dictionary<string, int>();
pagination.Add("current_page", ExtractIntValue(data, "current_page"));
pagination.Add("total_entries", ExtractIntValue(data, "total_entries"));
pagination.Add("total_pages", ExtractIntValue(data, "total_pages"));
return pagination;
}
public class Pagination
{
public int CurrentPage { get; set; }
public int TotalEntries { get; set; }
public int TotalPages { get; set; }
}
internal static Pagination ParsePagination(string data)
{
var pagination = new Pagination();
pagination.CurrentPage = ExtractIntValue(data, "current_page");
pagination.TotalEntries = ExtractIntValue(data, "total_entries");
pagination.TotalPages = ExtractIntValue(data, "total_pages");
return pagination;
}
internal static List<Member> ParseMembers(string data)
{
var members = new List<Member>();
var membersNodes = ExtractArrayNode(data, "members");
foreach (var member in membersNodes)
{
members.Add(ParseMember(member));
}
return members;
}
internal static Member ParseMember(string data)
{
var memeber = new Member();
memeber.MemberName = ExtractStringValue(data, "name");
memeber.MemberId = ExtractIntValue(data, "id");
memeber.Email = ExtractStringValue(data, "email");
memeber.IsDeleted = ExtractStringValue(data, "is_deleted") == "true";
return memeber;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/SyncDialog/SyncDialogWindow.xaml.cs
using System.Collections.Generic;
using System.Linq;
using System.Windows;
using System.Windows.Input;
using Workshare.Components.Views.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.SyncDialog
{
/// <summary>
/// Returns selected by user actions for given activities
/// </summary>
public partial class SyncDialogWindow2
{
ConflictOptions ConflictOption { get; set; }
ConflictVersionOptions VersionsOption { get; set; }
public bool IsClose { get; private set; }
public SyncDialogWindow2()
{
InitializeComponent();
}
private SyncDialogViewModel Vm
{
get
{
return (SyncDialogViewModel) DataContext;
}
}
private void UpdateViewModel()
{
Vm.SelectedVersionsOption = VersionsOption;
Vm.SelectedAction = ConflictOption;
}
public IEnumerable<ImportDocumentAction> GetSelectedActions()
{
return Vm.GetSelectedActions();
}
public SyncDialogWindow2(IEnumerable<ImportFileActivity> importActivities)
: this()
{
var firstActivity = importActivities.First();
var newCount = importActivities.Count(a => a.Changes.Any(c => c.Type == ChangeType.RemoteAdded));
var allCount = importActivities.Count();
var dmsName = firstActivity.DmsFile == null ? string.Empty : firstActivity.DmsFile.Name;
var wsName = firstActivity.Filename;
var onlyNewFiles = allCount == newCount;
var versionsCount =
firstActivity.Changes.Where(a => a.NewerVersions != null && a.NewerVersions.Any())
.Select(a => a.NewerVersions.Count)
.FirstOrDefault();
this.DataContext = new SyncDialogViewModel(importActivities, wsName, dmsName, versionsCount, allCount, newCount, onlyNewFiles);
cb_latestVersion.IsChecked = true;
cb_newDocument.IsChecked = true;
if (onlyNewFiles)
{
grSaveType.IsEnabled = false;
}
if (allCount == 1 && versionsCount == 1 && onlyNewFiles)
{
cb_allVersion.IsEnabled = false;
}
}
private void DockPanel_MouseDown_1(object sender, MouseButtonEventArgs e)
{
OnTitleMouseDown(sender, e);
}
private void cb_LatestVersion_Checked(object sender, RoutedEventArgs e)
{
VersionsOption = ConflictVersionOptions.Latest;
UpdateViewModel();
}
private void cb_AllVersions_Checked(object sender, RoutedEventArgs e)
{
VersionsOption = ConflictVersionOptions.All;
UpdateViewModel();
}
private void cb_newDocument_Checked_1(object sender, RoutedEventArgs e)
{
ConflictOption = ConflictOptions.KeepBoth;
UpdateViewModel();
}
private void cb_newVersion_Checked_1(object sender, RoutedEventArgs e)
{
ConflictOption = ConflictOptions.Replace;
UpdateViewModel();
}
private void OwnWindow_Closed(object sender, System.EventArgs e)
{
IsClose = true;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/SettingsStorage/SettingsOptions.cs
namespace Workshare.Integration.SettingsStorage
{
public static class SettingsOptions
{
public static readonly string PagingSizeInTrueSync = "ActivitiesPerPage";
}
}
<file_sep>/WSComponents/src/Mixpanel.NET/Services/EventTrackerService.cs
using System;
using System.Collections.Generic;
using System.Text;
using Mixpanel.NET.Events;
using Workshare.Components.WSLogger;
namespace Mixpanel.NET.Services
{
// REDO IT IMM
// internal static class Tracker
public static class EventTrackerService
{
static bool _enableReporting = true;
static string _proxyUrl = "http://events.workshare.com";
static string _mixpanelProjectToken = "d3ee5e47158b4405beb1dcca3ae8e664";
public static bool IsReportingEnabled()
{
return _enableReporting;
}
static TrackerOptions GetOptions()
{
TrackerOptions options = new TrackerOptions();
options.SetEventTime = true;
if (!String.IsNullOrEmpty(_proxyUrl))
{
options.ProxyUrl = _proxyUrl;
}
return options;
}
static MixpanelTracker GetTracker(string token)
{
return new MixpanelTracker(_mixpanelProjectToken, null, GetOptions());
}
static string GetEventName(string eventName)
{
return "Integration_DMS_IMAN_" + eventName;
}
//This method is used to add events
public static bool Track(string eventName, IDictionary<string, object> properties)
{
try
{
if (!IsReportingEnabled())
{
return true;
}
eventName = GetEventName(eventName);
MixpanelTracker tracker = GetTracker(_mixpanelProjectToken);
bool result = tracker.Track(eventName, properties);
LogTrace(eventName, properties);
return result;
}
catch (Exception ex)
{
Logger.WriteError(ex);
return false;
}
}
static void LogTrace(string eventName, IDictionary<string, object> properties)
{
StringBuilder sb = new StringBuilder("Mixpanel ");
sb.Append("Event: ").Append(eventName).Append(" Properties: ");
if (properties.ContainsKey("Name"))
{
sb.Append("Name=[").Append(properties["Name"]).Append("] ");
}
foreach (var kvp in properties)
{
if (String.Compare(kvp.Key, "Name") != 0)
{
sb.Append(kvp.Key).Append("=[").Append(kvp.Value).Append("] ");
}
}
Logger.WriteTrace(sb.ToString());
}
}
}<file_sep>/WSComponents/src/WSComponents/Services/ErrorReporterService.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Components.Interfaces;
using Workshare.Integration.Exceptions;
namespace Workshare.Components.Services
{
public class ErrorReporterService
{
private readonly List<KeyValuePair<Guid, ItemException>> _errors;
readonly IModuleView _view;
public ErrorReporterService(IModuleView view)
{
_view = view;
_errors = new List<KeyValuePair<Guid, ItemException>>();
}
public void Add(Guid scopeId, ItemException error)
{
var item = new KeyValuePair<Guid, ItemException>(scopeId, error);
_errors.Add(item);
}
public void AddRange(Guid scopeId, List<ItemException> errors)
{
foreach (var itemException in errors)
{
var item = new KeyValuePair<Guid, ItemException>(scopeId, itemException);
_errors.Add(item);
}
}
public void Add(Guid scopeId, Exception error)
{
var item = new KeyValuePair<Guid, ItemException>(scopeId, new ItemException {Error = error});
_errors.Add(item);
}
public void AddRange(Guid scopeId, List<Exception> errors)
{
foreach (var exception in errors)
{
var item = new KeyValuePair<Guid, ItemException>(scopeId, new ItemException {Error = exception});
_errors.Add(item);
}
}
public bool IsEmpty
{
get { return !_errors.Any(); }
}
public void ReportIfNeeded(Guid scopeId)
{
if (IsEmpty) { return; }
var exceptions = _errors.Where(r => r.Key == scopeId).Select(k => k.Value).Distinct();
_view.ShowErrors(exceptions);
ClearService(scopeId);
}
private void ClearService(Guid scopeId)
{
_errors.RemoveAll(e => e.Key == scopeId);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ActionStrategy/UploadStrategy.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using Workshare.Components.Helpers;
using Workshare.Components.WSLogger;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Properties;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Integration.Processor.Strategies.ActionStrategy
{
public class UploadStrategy : ActionStrategy
{
readonly WsProxy _wsProxy;
readonly IAuthProvider _auth;
readonly DmsWorkerBase _dmsWorker;
readonly SyncInfoService _syncInfoService;
public UploadStrategy(WsProxy wsProxy, IAuthProvider auth, DmsWorkerBase dmsWorker, SyncInfoService syncInfoService)
: base(syncInfoService)
{
this._wsProxy = wsProxy;
this._auth = auth;
this._dmsWorker = dmsWorker;
this._syncInfoService = syncInfoService;
}
void EnsureParentFolderExistsOnWs(WsUser user, FileMap fileMap)
{
var firstAncesstoronWs = fileMap.Parent as FolderMap;
var ancesstors = new List<FolderMap>();
while (firstAncesstoronWs != null && !firstAncesstoronWs.HasRemote())
{
ancesstors.Insert(0, firstAncesstoronWs);
firstAncesstoronWs = firstAncesstoronWs.Parent as FolderMap;
}
if (firstAncesstoronWs != null)
{
var currentParent=firstAncesstoronWs;
foreach (var toCreate in ancesstors)
{
var createdFolder=_wsProxy.CreateFolder(user,currentParent.WsFolder.Id, toCreate.LocalFolder.DisplayName);
toCreate.WsFolder = createdFolder;
_syncInfoService.UpdateSyncInfo(toCreate.LocalFolder, new ActionContext(Services.ActionType.Upload)
{
WsFolder=createdFolder
});
currentParent = toCreate;
}
}
}
public override ProcessResult Process(FileMap fileMap, ItemMapActivityAction action)
{
try
{
var uploadAction = (UploadDocumentAction)action;
var user = _auth.GetCurrentWSUser2();
var allLocalVersionMaps = fileMap.Maps.OfType<VersionMap>().Where(a => a.HasLocal());
foreach (var versionMap in allLocalVersionMaps)
{
if (uploadAction.VersionIds.Contains(versionMap.LocalVersion.Id))
{
var filePath = _dmsWorker.GetFilePath(versionMap.LocalVersion);
var name = _dmsWorker.GetName(versionMap.LocalVersion);
var friendlyName = _dmsWorker.GetFriendlyName(versionMap.LocalVersion);
var friendlyNameOnWorkshare = !string.IsNullOrEmpty(friendlyName)
? friendlyName
: (!string.IsNullOrEmpty(name) ? FileUtils.GetFileNameWithoutExtension(name) : "unknown");
var nameOnWorkshare = !string.IsNullOrEmpty(name) ? name : "unknown";
if (!fileMap.HasRemote())
{
var parentFolderMap = fileMap.GetLocalParent();
_syncInfoService.CanUpdateSyncInfoOrThrow(fileMap.LocalFile, Operations.Send);
_syncInfoService.CanUpdateSyncInfoOrThrow(versionMap, Operations.Send);
_dmsWorker.CanUpdateVersionOrThrow(versionMap.LocalVersion, Operations.Send);
_syncInfoService.CanUpdateSendDataOrThrow(parentFolderMap);
EnsureParentFolderExistsOnWs(user, fileMap);
if (parentFolderMap.WsFolder != null)
{
var parentFolderId = parentFolderMap.WsFolder.Id;
var uploadedFile = _wsProxy.UploadFile(user,
WsUtils.RemoveInvalidSymbolsFromFileName(friendlyNameOnWorkshare),
WsUtils.RemoveInvalidSymbolsFromFileName(nameOnWorkshare), filePath, parentFolderId);
_dmsWorker.AddHistory(versionMap.LocalVersion, string.Empty, string.Format(RES.STR_HISTORY_ITEMEXPORT, DateTime.Now.ToString(Activity.TimeFormat)), Operations.Send);
_syncInfoService.UpdateSyncInfo(fileMap.LocalFile, new ActionContext(Services.ActionType.Upload)
{
WsFile = uploadedFile
});
fileMap.WsFile = uploadedFile;
versionMap.WsVersion = uploadedFile.Versions.FirstOrDefault();
_syncInfoService.UpdateSyncInfo(versionMap.LocalVersion, new ActionContext(Services.ActionType.Upload)
{
WsVersion = versionMap.WsVersion
});
if (parentFolderMap.HasLocal())
{
_syncInfoService.UpdateSendDataInfo(parentFolderMap.LocalFolder,
new List<int> { uploadedFile.Id });
}
}
else if (parentFolderMap.ProcessState == ProcessState.Error)
{
throw parentFolderMap.Error;
}
else
{
throw new Exception("No parent folder linked to Workshare");
}
}
else
{
_dmsWorker.CanUpdateVersionOrThrow(versionMap.LocalVersion, Operations.Send);
_syncInfoService.CanUpdateSyncInfoOrThrow(versionMap, Operations.Send);
var wsVersion = _wsProxy.UploadNewVersion(user, fileMap.WsFile, filePath, name);
_dmsWorker.AddHistory(versionMap.LocalVersion, string.Empty, string.Format(RES.STR_HISTORY_ITEMEXPORT, DateTime.Now.ToString(Activity.TimeFormat)), Operations.Send);
_syncInfoService.UpdateSyncInfo(versionMap.LocalVersion, new ActionContext(Services.ActionType.Upload)
{
WsVersion = wsVersion
});
versionMap.WsVersion = wsVersion;
}
}
}
return ProcessResult.Processed;
}
catch (WebException ex)
{
if (ex.IsStatusCode(HttpStatusCode.Forbidden) || ex.IsStatusCode(HttpStatusCode.Unauthorized))
{
throw new CloudFolderAccessDenied(Resources.STR_UNABLE_SYNC, Resources.STR_UNABLESYNCFILE_TEXT, ex);
}
if (ex.IsConnectionError())
{
throw new OfflineException();
}
Logger.WriteError("UploadStrategy Process catch WebException : ", ex);
throw;
}
catch (Exception ex)
{
if (ex.IsConnectionError())
{
throw new OfflineException();
}
Logger.WriteError("UploadStrategy Process catch Exception : ", ex);
throw;
}
}
public override ProcessResult Process(FolderMap folderMap, ItemMapActivityAction action)
{
return ProcessResult.Processed;
}
}
}
<file_sep>/iManageIntegration/Src/Workshate.HookEvents/HookMoveFolderCmd.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Com.Interwoven.WorkSite.iManage;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.Worksite.iManExt2;
using Workshare.Components.Interfaces;
using Workshare.Components.WSLogger;
namespace Workshare.HookEvents
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("F311F3A1-E00E-48F3-AA23-A4580A97B8B3")]
[ComVisible(true)]
public class HookMoveFolderCmd : ICommand, Com.Interwoven.Worksite.iManExt2._ICommandEvents_Event
{
IManMoveFolderCmd cmd;
bool isCancel = true;
public HookMoveFolderCmd()
{
cmd = new IManMoveFolderCmd();
cmd.OnCancel += cmd_OnCancel;
cmd.OnInitDialog += cmd_OnInitDialog;
cmd.PostOnOK += cmd_PostOnOK;
cmd.PreOnOK += cmd_PreOnOK;
}
public int Accelerator
{
get
{
return cmd.Accelerator;
}
set
{
cmd.Accelerator = value;
}
}
public object Bitmap
{
get
{
return cmd.Bitmap;
}
set
{
cmd.Bitmap = value;
}
}
public ContextItems Context
{
get
{
return cmd.Context;
}
}
private List<IManDocument> GetFiles(IManFolder folder)
{
List<IManDocument> ret = new List<IManDocument>();
foreach (object content in folder.Contents)
{
if (content is IManDocument)
ret.Add(content as IManDocument);
}
if (folder.SubFolders != null)
{
foreach (IManFolder subFolder in folder.SubFolders)
ret.AddRange(GetFiles(subFolder));
}
return ret;
}
public void Execute()
{
cmd.Execute();
if (isCancel) // !!! unfortunatelly 'isCancel' is always true - 'targetFolder' is used to determine should we start sync process or not
{
try
{
var targetFolder = ProcessIManageEvents.GetCmdContextItemValueByName(Context, "SelectedTargetIManFolderObject");
if (targetFolder != null)
{
IManFolder selectedFolder = ProcessIManageEvents.GetCmdContextItemValueByName(Context, "SelectedFolderObject") as IManFolder;
if (selectedFolder != null)
{
List<IManDocument> selectedObjectsList = new List<IManDocument>(GetFiles(selectedFolder));
if (selectedObjectsList.Count > 0)
ProcessIManageEvents.ProcessEvent(selectedObjectsList, EventOperation.MoveFolder, "HookMoveFolderCmd -> Execute");
}
else
{
Logger.Write("HookMoveFolderCmd: There is no data about selected folder", Severity.Information);
}
}
}
catch (Exception exc)
{
Logger.Write(exc, Severity.Error);
}
}
}
public string HelpFile
{
get
{
return cmd.HelpFile;
}
set
{
cmd.HelpFile = value;
}
}
public int HelpID
{
get
{
return cmd.HelpID;
}
set
{
cmd.HelpID = value;
}
}
public string HelpText
{
get
{
return cmd.HelpText;
}
set
{
cmd.HelpText = value;
}
}
public void Initialize(ContextItems Context)
{
cmd.Initialize(Context);
}
public string MenuText
{
get
{
return cmd.MenuText;
}
set
{
cmd.MenuText = value;
}
}
public string Name
{
get
{
return cmd.Name;
}
set
{
cmd.Name = value;
}
}
public int Options
{
get
{
return cmd.Options;
}
set
{
cmd.Options = value;
}
}
public int Status
{
get
{
return cmd.Status;
}
set
{
cmd.Status = value;
}
}
public Commands SubCommands
{
get
{
return cmd.SubCommands;
}
set
{
cmd.SubCommands = value;
}
}
public string Title
{
get
{
return cmd.Title;
}
set
{
cmd.Title = value;
}
}
public CommandType Type
{
get
{
return cmd.Type;
}
set
{
cmd.Type = value;
}
}
public void Update()
{
cmd.Update();
}
void cmd_PreOnOK(object pMyInterface)
{
isCancel = false;
if (PreOnOK != null)
{
PreOnOK(pMyInterface);
}
}
void cmd_PostOnOK(object pMyInterface)
{
if (PostOnOK != null)
{
PostOnOK(pMyInterface);
}
}
void cmd_OnInitDialog(object pMyInterface)
{
if (OnInitDialog != null)
{
OnInitDialog(pMyInterface);
}
}
void cmd_OnCancel(object pMyInterface)
{
isCancel = true;
if (OnCancel != null)
{
OnCancel(pMyInterface);
}
}
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnCancelEventHandler OnCancel;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnInitDialogEventHandler OnInitDialog;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PostOnOKEventHandler PostOnOK;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PreOnOKEventHandler PreOnOK;
}
}<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsProxy.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using Workshare.Components.Helpers;
using Workshare.Components.WSLogger;
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
public class WsProxy
{
public WsFile TryGetFile(WsUser wsUser, int fileId, int parentFolderId)
{
var file = PlatformService.TryGetFile(wsUser._user, fileId, parentFolderId);
return file == null ? null : new WsFile(file, wsUser._user);
}
public WsFolder TryGetFolder(WsUser wsUser, int folderId)
{
var folder = PlatformService.TryGetFolder(wsUser._user, folderId);
return folder == null ? null : new WsFolder(folder, wsUser._user);
}
public static bool IsFolderExist(WsUser wsUser, int folderId)
{
return PlatformService.IsFolderExists(wsUser._user, folderId, -1);
}
public IEnumerable<Activity> GetFileActivities(WsUser user, WsFile wsFileInfo, string lastActivityId)
{
return PlatformService.GetFileActivitiesImanage(user._user, wsFileInfo.file, 0, lastActivityId);
}
public WsVersion UploadNewVersion(WsUser user, WsFile wsFile, string filePath, string name)
{
if (wsFile == null) throw new ArgumentNullException("wsFile");
wsFile.file.FilePath = filePath;
wsFile.file.FriendlyName = FileUtils.GetFileNameWithoutExtension(name);
wsFile.file.Name = name;
PlatformService.UploadNewVersionOfFile3(user._user, wsFile.file);
var versionNumber = wsFile.file.CurrentVersion.Version;
wsFile.Versions = null;//TODO look maybe we can avoid refreshing of versions
return wsFile.Versions.First(a => a.Version == versionNumber);
}
public void DeleteFile(WsUser _user, int _fileId)
{
PlatformService.DeleteFile2(_user._user, _fileId);
}
public void DeleteFolder(WsUser _user, int _folderId)
{
PlatformService.DeleteFolder(_user._user, _folderId);
}
public void AddAccountDataToWDS(WsUser _user, string _key, string _data)
{
PlatformService.CreateOrUpdateWDSDictionary(_user._user, _key, _data, RoleOfDictionary.accounts);
}
public string GetAccountDataFromWDS(WsUser _user, string _key)
{
return PlatformService.GetWDSDictionary(_user._user, _key, RoleOfDictionary.accounts);
}
public void DeleteAccountDataOnWDS(WsUser _user, string _key)
{
PlatformService.DeleteWDSDictionary(_user._user, _key, RoleOfDictionary.accounts);
}
public void AddUserDataToWDS(WsUser _user, string _key, string _data)
{
PlatformService.CreateOrUpdateWDSDictionary(_user._user, _key, _data, RoleOfDictionary.users);
}
public string GetUserDataFromWDS(WsUser _user, string _key)
{
return PlatformService.GetWDSDictionary(_user._user, _key, RoleOfDictionary.users);
}
public void DeleteUserDataOnWDS(WsUser _user, string _key)
{
PlatformService.DeleteWDSDictionary(_user._user, _key, RoleOfDictionary.users);
}
public bool IsUserLoggedIn(UserDetails user)
{
if (user == null)
{
return false;
}
string hostUri = PlatformService.ServiceUrl4 + "/current_user.json";
try
{
var myReq = (HttpWebRequest)WebRequest.Create(hostUri);
if (user.IsProxyEnable)
{
myReq.Proxy = user.Proxy;
myReq.Proxy.Credentials = user.ProxyCredentials;
}
myReq.Method = "GET";
myReq.Timeout = 2000;
myReq.CookieContainer = user.SessionCookies;
using (var response = (HttpWebResponse)myReq.GetResponse())
{
using (var reader = new StreamReader(response.GetResponseStream()) )
{
string responcestring = reader.ReadToEnd();
if (!(responcestring.Contains("Not logged in") && responcestring.Contains("errors")))
return true;
}
}
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Warning);
}
return false;
}
internal WsFile UploadFile(WsUser user, string friendlyName, string name, string filePath, int parentFolderId)
{
var file = new FileDetails()
{
Name = name,
FilePath = filePath,
FriendlyName = friendlyName,
FolderId = parentFolderId
};
try
{
PlatformService.UploadFile3(user._user, file);
var createdFile = PlatformService.GetFile(user._user, file.Id, parentFolderId);
var wscreatedFile = new WsFile(createdFile, user._user);
var currentVersion = wscreatedFile.CurrentVersion;//TODO now it is requiered as some code uses directy Platformservice instead of Proies
return wscreatedFile;
}
finally
{
//in case of exception we should delete temp files from temporary folder anyway
file.DeleteFile();
}
}
internal WsFolder CreateFolder(WsUser user, int parentId, string name)
{
var res = PlatformService.CreateFolder(user._user, name, "", parentId);
return new WsFolder(res, user._user);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/ActivityFinder.cs
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Changes.Visitors;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor.Changes
{
public class ActivityFinder
{
private readonly ModuleBase _module;
public ActivityFinder(ModuleBase module)
{
this._module=module;
}
public List<ImportFileActivity> GetImportActivities(ItemMap scanResult, IEnumerable<ImportFileActivity> existedActivities, bool useForceRequest = true)
{
var opt = new Options() { UseForceRequest = useForceRequest };
opt.DoNotUpdate = true;
var visitor = _module.Resolve<ImportActivityFinder>();
visitor.FoundedActivities = existedActivities == null ? new List<ImportFileActivity>() : existedActivities.ToList();
visitor.ActivitiesToUpdate = opt.ActivitiesToUpdateOnly;
visitor.Options = opt;
scanResult.Apply(visitor);
return visitor.FoundedActivities;
}
public List<UploadFileActivity> GetUploadActivities(ItemMap scanResult, IEnumerable<UploadFileActivity> existedActivities, bool useForceRequest = true)
{
var opt = new Options() {UseForceRequest = useForceRequest};
opt.DoNotUpdate = true;
var visitor = _module.Resolve<UploadActivityFinder>();
visitor.FoundedActivities = existedActivities == null ? new List<UploadFileActivity>() : existedActivities.ToList();
visitor.ActivitiesToUpdate = opt.ActivitiesToUpdateOnly;
visitor.Options = opt;
scanResult.Apply(visitor);
return visitor.FoundedActivities;
}
public void UpdateImportActivities(SummaryProcessResult scanResult, IEnumerable<ImportFileActivity> existedActivities, List<string> activityIdsToUpdate = null)
{
var opt = new Options()
{
ActivitiesToUpdateOnly = activityIdsToUpdate,
UseForceRequest = true
};
var visitor = _module.Resolve<ImportActivityFinder>();
visitor.FoundedActivities = existedActivities == null ? new List<ImportFileActivity>() : existedActivities.ToList();
visitor.ActivitiesToUpdate = opt.ActivitiesToUpdateOnly;
visitor.Options = opt;
scanResult.Apply(visitor);
}
public void UpdateUploadActivities(SummaryProcessResult scanResult, IEnumerable<UploadFileActivity> existedActivities, List<string> activityIdsToUpdate = null)
{
var opt = new Options()
{
UseForceRequest = true,
ActivitiesToUpdateOnly = activityIdsToUpdate
};
var visitor = _module.Resolve<UploadActivityFinder>();
visitor.FoundedActivities = existedActivities == null ? new List<UploadFileActivity>() : existedActivities.ToList();
visitor.ActivitiesToUpdate = opt.ActivitiesToUpdateOnly;
visitor.Options = opt;
scanResult.Apply(visitor);
}
public void UpdateActivities(SummaryProcessResult scanResult, IEnumerable<FileMapActivity> existedActivities, List<string> activityIdsToUpdate = null)
{
UpdateImportActivities(scanResult, existedActivities.OfType<ImportFileActivity>(), activityIdsToUpdate);
UpdateUploadActivities(scanResult, existedActivities.OfType<UploadFileActivity>(), activityIdsToUpdate);
}
public List<ItemMap> GetErrors(ItemMap scanResult)
{
return GetErrorsInternal(scanResult);
}
List<ItemMap> GetErrorsInternal(ItemMap map)
{
var res = new List<ItemMap>();
if (map.Error != null)
{
res.Add(map);
}
foreach (var childMap in map.Maps)
{
res.AddRange(GetErrorsInternal(childMap));
}
return res;
}
//TODO make get update methods and remove initialize
public IEnumerable<FileMapActivity> GetAllActivities(ItemMap scanResult, IEnumerable<FileMapActivity> existedActivities, bool useForceRequest = true)
{
var res = new List<FileMapActivity>();
res.AddRange(this.GetImportActivities(scanResult, existedActivities.OfType<ImportFileActivity>(), useForceRequest).OfType<FileMapActivity>());
res.AddRange(this.GetUploadActivities(scanResult, existedActivities.OfType<UploadFileActivity>(), useForceRequest).OfType<FileMapActivity>());
return res;
}
public class Options
{
public IEnumerable<string> ActivitiesToUpdateOnly;
public bool DoNotUpdate { set; get; }
public bool UseForceRequest { get; set; }
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Registering.cs
using Microsoft.Win32;
namespace Workshare.OpenText
{
public static class Registering
{
static string APP_CLSID = "Workshare.OpenText.WSOTMenuWrapper";//"{08986AE5-D79F-4EE7-B876-D1C69F09C01B}";
static readonly string REGKEY = @"SOFTWARE\Hummingbird\PowerDOCS\OM\EventHandlers";
static readonly string KeyName = @"WorkshareIntegration";
public static void RegisterMenus()
{
var rkey = Registry.LocalMachine.OpenSubKey(REGKEY, true);
if (rkey != null)
{
var kExtMenu = rkey.CreateSubKey(KeyName);
if (kExtMenu != null)
{
object def_value = kExtMenu.GetValue("");
if (def_value == null || string.IsNullOrEmpty(def_value.ToString()))
{
kExtMenu.SetValue("", APP_CLSID);
}
}
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/BaseDMSItem.cs
namespace Workshare.Integration.Processor
{
public abstract class BaseDMSItem
{
//TODO remove
}
}
<file_sep>/WSComponents/src/WSIntegration/Enums/ConflictOptions.cs
namespace Workshare.Integration.Enums
{
public enum ConflictOptions
{
None,
KeepBoth,
Replace,
UpdateOnTheWorkshare
}
public enum ConflictVersionOptions
{
None,
Latest,
All
}
public enum CheckOutOptions
{
None,
CheckOut,
DontCheckOut
}
}
<file_sep>/WSComponents/src/WSIntegration/Exceptions/CannotUpdateSyncInfoException.cs
using System;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Properties;
namespace Workshare.Integration.Exceptions
{
public class CannotUpdateDataOnWebDataStorageExeption:BaseException
{
public CannotUpdateDataOnWebDataStorageExeption(string message)
: base("",message)
{
}
public CannotUpdateDataOnWebDataStorageExeption(string message, Exception inner)
: base("",message, inner)
{
}
}
public class CannotGetDataFromWebDataStorageExeption : BaseException
{
public CannotGetDataFromWebDataStorageExeption(string message)
: base(Resources.STR_CONNECTION_ERROR_CAPTION, message)
{
}
public CannotGetDataFromWebDataStorageExeption(string message, Exception inner)
: base("",message, inner)
{
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CeaseCollaborationDialog/WorkUnits/ScanWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Windows.Forms.VisualStyles;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.DmsItems.Visitors;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies.ActionStrategy;
namespace Workshare.Components.Views.CeaseCollaborationDialog.WorkUnits
{
public class ScanWorkUnit : WorkUnit
{
private readonly List<IDMSItem> _items;
private readonly CeaseCollaborationDialogViewModel _vm;
private readonly Thread _scanThread;
public ScanWorkUnit(IModuleView view, IEnumerable<IDMSItem> items, CeaseCollaborationDialogViewModel vm, Thread scanThread, bool loadingMore = false)
: base(view)
{
if (view == null) throw new ArgumentNullException("view");
if (items == null) throw new ArgumentNullException("items");
if (vm == null) throw new ArgumentNullException("vm");
_items = items.ToList();
_vm = vm;
Name = "Scanning";
TargetItems = _items.Select(a => new TargetItem(a.ID.ToString())
{
Name = a.Name
}).ToArray();
StatusDescription = "Scanning...";
_scanThread = scanThread = new Thread(new ThreadStart(Scan));
}
public override void OnAdded()
{
_vm.State = CeaseCollaborationDialogViewModel.StateEnum.Scanning;
}
public override void Execute()
{
try
{
_scanThread.Start();
}
catch (Exception ex)
{
_view.ShowError(ex);
}
}
private void Scan()
{
Logger.WriteTrace("ScanWorkUnit.Execute");
var processor = _vm.Module.Resolve<Processor>();
var adapter = _vm.Module.Resolve<ActivityFinder>();
List<ItemMap> errors = new List<ItemMap>();
SummaryProcessResult scanMaps = null;
if (_items.FirstOrDefault() is IDMSFolder)
{
var itemToScan = _items.OfType<BaseDMSFolder>().First();
scanMaps = processor.ScanFirstPage(itemToScan, null, null);
}
else
{
scanMaps = processor.DirectScanFile(_items.OfType<BaseDMSFile>());
}
errors = adapter.GetErrors(scanMaps);
_vm.OnCompleted(errors, scanMaps);
}
}
}
<file_sep>/WSComponents/src/WSComponents.Tests/TestWorkUnit.cs
using System.Collections.Generic;
using Moq;
using NUnit.Framework;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Interfaces;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
using System.Linq;
using System;
using Workshare.Integration.Processor.Changes.Activities;
namespace WSComponents.Tests
{
[TestFixture]
public class TestWorkUnit
{
Mock<IDMSFile> file1;
Mock<IDMSFile> file2;
Mock<IDMSFolder> fld;
List<IDMSItem> lst;
List<FileMapActivity> lstToSkip;
Mock<IWSIntegration> integration;
Mock<IWSComponentsApp> app;
Mock<IModuleView> view;
[SetUp]
public void Setup()
{
integration = new Mock<IWSIntegration>();
view = new Mock<IModuleView>();
app = new Mock<IWSComponentsApp>();
app.Setup(x => x.Integration).Returns(integration.Object);
WSApplication.Instance = app.Object;
file1 = new Mock<IDMSFile>();
file2 = new Mock<IDMSFile>();
fld = new Mock<IDMSFolder>();
fld.Setup(x => x.Files).Returns(new List<IDMSFile>() { file2.Object });
lst = new List<IDMSItem>() { file1.Object, fld.Object };
lstToSkip = new List<FileMapActivity>();
}
// [Test]
public void TestSendItems_SendCalled()
{
int DestFolderID = 10;
integration.Setup(x => x.SendItems(lst, It.IsAny<FolderDetails>(), It.IsAny<OperationContext>())).Callback<IEnumerable<IDMSItem>, FolderDetails, OperationContext>((a, b, c) => { Assert.IsTrue(b.Id == DestFolderID); });
SendWorkUnit su = new SendWorkUnit(view.Object, lst, lstToSkip, DestFolderID, CheckOutOptions.CheckOut);
su.Execute();
Assert.AreEqual(su.TargetItems, lst);
integration.Verify(x=>x.SendItems(lst, It.IsAny<FolderDetails>(), It.IsAny<OperationContext>()), Times.Once());
}
// [Test]
public void TestSyncItems_SyncCalled()
{
List<SyncItemInformation> si = new List<SyncItemInformation>()
{
new SyncItemInformation(fld.Object, ConflictOptions.None, ConflictVersionOptions.None),
new SyncItemInformation(file1.Object, ConflictOptions.None, ConflictVersionOptions.None)
};
SynhWorkUnit su = new SynhWorkUnit(view.Object, si, lstToSkip, lst, OperationStage.First);
su.Execute();
Assert.IsTrue(su.TargetItems.Contains((TargetItem)fld.Object));
Assert.IsTrue(su.TargetItems.Contains((TargetItem)file1.Object));
Assert.IsFalse(su.TargetItems.Contains((TargetItem)file2.Object));
integration.Verify(x => x.SyncItems(si, It.IsAny<OperationContext>()), Times.Once());
}
// [Test]
public void TestSendItems_SendRaiseError()
{
int DestFolderID = 10;
ItemsErrorsCollection errs = new ItemsErrorsCollection(null);
errs.Add(file1.Object, new CloudFileNotFound("Test error"));
integration.Setup(x => x.SendItems(lst, It.IsAny<FolderDetails>(), It.IsAny<OperationContext>())).Throws(new MultiItemsException(errs, null));
SendWorkUnit su = new SendWorkUnit(view.Object, lst, lstToSkip, DestFolderID, CheckOutOptions.CheckOut);
su.OnError += (a, e) =>
{
Assert.IsTrue(e.Error is MultiItemsException);
e.Handled = true;
};
su.Execute();
integration.Verify(x => x.SendItems(lst, It.IsAny<FolderDetails>(), It.IsAny<OperationContext>()), Times.Once());
}
// [Test]
public void TestSyncItems_SyncRaiseError_CloudUnAuthorized()
{
var auth = new Mock<IAuthProvider>();
app.Setup(x => x.AuthProvider).Returns(auth.Object);
List<SyncItemInformation> si = new List<SyncItemInformation>()
{
new SyncItemInformation(fld.Object, ConflictOptions.None, ConflictVersionOptions.None),
new SyncItemInformation(file1.Object, ConflictOptions.None, ConflictVersionOptions.None)
};
integration.Setup(x => x.SyncItems(si, It.IsAny<OperationContext>())).Throws(new CloudUnAuthorized());
SynhWorkUnit su = new SynhWorkUnit(view.Object, si, lstToSkip, lst, OperationStage.First);
su.OnError += (a, e) =>
{
Assert.IsTrue(e.Error is MultiItemsException);
};
su.Execute();
integration.Verify(x => x.SyncItems(si, It.IsAny<OperationContext>()), Times.Once());
auth.Verify(x => x.SetCurrentUser(null), Times.Once());
view.Verify(x => x.ShowError(It.IsAny<string>()), Times.Once());
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Strategies/ProcessStrategy.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
using Workshare.Integration.Enums;
using Workshare.Integration;
using WorksharePlatform;
using Workshare.IManage.Contrete;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Modularity;
namespace Workshare.IManage.Strategies
{
class ProcessStrategy : Workshare.Integration.Processor.Strategies.ProcessStrategy
{
public ProcessStrategy(ModuleBase module, WsProxy wsProxy, IAuthProvider auth,DmsWorkerBase dmsWorker,SyncInfoService syncInfoService)
: base(module,wsProxy, auth, dmsWorker, syncInfoService)
{
}
}
}
<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsUser.cs
using System;
using System.Net;
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
public class WsUser
{
public UserDetails _user;
public WsUser(UserDetails user)
{
if (user == null) throw new ArgumentNullException("user");
this._user = user;
}
public string AuthToken { get { return _user.AuthToken; } set { _user.AuthToken = value; } }
public CookieContainer SessionCookies { get { return _user.SessionCookies; } set { _user.SessionCookies = value; } }
public string UserName { get { return _user.UserName; } }
public string Domain
{
get
{
var res = _user.Domain;
if (string.IsNullOrEmpty(res))
{
res = WsUtils.GetEmailDomain(Email);
}
return res;
}
}
public bool InPrivateDomain
{
get
{
return !string.IsNullOrEmpty(_user.Domain);
}
}
public string Email
{
get { return _user.Email; }
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Activities/ImportFileActivity.cs
namespace Workshare.Integration.Processor.Changes.Activities
{
public class ImportFileActivity : FileMapActivity
{
public ImportFileActivityState State { set; get; }
public ImportFileActivity()
{
Type = "ImportActivity";
}
public override bool InKindOfErrorState()
{
return State == ImportFileActivityState.Error
|| State == ImportFileActivityState.CheckedOutOnAnotherMachine
|| State == ImportFileActivityState.CheckedOutToAnother
|| State == ImportFileActivityState.NoAccessOnWorkshare
|| State == ImportFileActivityState.LockedByAnotherProcess;
}
public override bool InKindOfProceeedState()
{
return State == ImportFileActivityState.Imported;
}
public override bool InKindOfProcessingState()
{
return State == ImportFileActivityState.Importing;
}
public override string ToString()
{
return string.Format("ImportFileActivity State={0}; Id={1}, Changes={2}", State, MapId,Changes.AsString());
}
}
public enum ImportFileActivityState {
Scanned,
Imported,
Error,
Importing,
CheckedOutToAnother,
CheckedOutOnAnotherMachine,
NoAccessOnWorkshare,
LockedByAnotherProcess,
DeletingLink,
LinkIsDeleted,
NoAccessOnDMS
};
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/WorkshareIntegration.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
using WorksharePlatform;
using WorkshareCloud.Common;
using System.IO;
using System.Web;
using System.Xml;
using WorkshareCloud.Common.Enums;
using WorkshareCloud.Common.Receivers;
using System.Net;
namespace WorkshareCloud.Common
{
public class WorkshareIntegration
{
#region public
public static WorkshareIntegration Instance
{
get
{
return new WorkshareIntegration();
}
}
public static ICollection<SPListItem> GetListItem(ItemID[] items, Guid listGuid, SPWeb web)
{
var list = web.Lists.GetList(listGuid, false);
if (list != null)
{
List<SPListItem> itms = new List<SPListItem>();
foreach (var itm in items)
{
itms.Add(list.GetItemById(itm.id));
}
return itms;
}
return null;
}
public void ClearFolderSyncDataIfNeed(SPListItem item)
{
var current_folder = item;
while (current_folder != null && CheckFolderIsNeedToClearSyncData(current_folder))
{
ClearFolderCloudData(current_folder);
current_folder = current_folder.Folder.ParentFolder.Item;
}
}
void SyncItems(List<SyncItemInfo> infos, Guid listGuid, UserDetails user)
{
if (infos.Any())
{
using (var errorlist = new ItemsErrorsCollection())
{
CloudAuthenication.CheckIfAuth(user);
var items = GetListItem(infos.Select(x => x.item).ToArray(), listGuid, SPContext.Current.Web);
var parentList = items.First().ParentList;
parentList.CheckWSFieldOrThrow();
//bool idNotInitialize = !EnsureListInitialized(parentList);
foreach (SPListItem listitem in items)
{
SafeFuncCall(errorlist, listitem, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
//var item = (idNotInitialize) ? parentList.GetItemById(listitem.ID) : listitem;
var item = listitem;
if (item.IsFolder())
{
SyncFolder(item.Folder, user);
}
else
{
SyncFile(item, (ConflictOptions)infos.Where(x => x.item.id == item.ID).First().conflictOption, user);
}
});
}
}
}
}
public void SyncItems(List<SyncItemInformation> infos, UserDetails user)
{
using (var errorlist = new ItemsErrorsCollection())
{
foreach (var syncInfo in infos)
{
Guid ListGuid;
if (Extensions.GuidTryParse(syncInfo.listGuid, out ListGuid))
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
//var item = GetListItem(syncInfo.items, ListGuid, SPContext.Current.Web);
SyncItems(syncInfo.items, ListGuid, user);
});
}
else
{
throw new BadRequest(string.Format("Cannot parse ListGuid={0}", syncInfo.listGuid));
}
}
}
}
public void SendItems(List<SendItemInformation> infos, UserDetails user)
{
using (var errorlist = new ItemsErrorsCollection())
{
foreach (var sendInfo in infos)
{
Guid ListGuid;
int folderId;
if (Extensions.GuidTryParse(sendInfo.listGuid, out ListGuid) && int.TryParse(sendInfo.folderId, out folderId))
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
var items = GetListItem(sendInfo.items.ToArray(), ListGuid, SPContext.Current.Web);
SendItems(items, folderId, user);
});
}
else
{
throw new BadRequest(string.Format("ListGuid={0} FolderId={1}", sendInfo.listGuid, sendInfo.folderId));
}
}
}
}
void SendItems(ICollection<SPListItem> items, int folderId, UserDetails user)
{
if (items.Any())
{
using (var errorlist = new ItemsErrorsCollection())
{
CloudAuthenication.CheckIfAuth(user);
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
var parentList = items.First().ParentList;
parentList.CheckWSFieldOrThrow();
if (!CheckFolderExistsWhenSendOrThrow(user, folderId, -1))
{
throw new CloudFolderNotFound(folderId, false);
}
try // need if destination folder on the cloud was deleted during send operation
{
foreach (SPListItem listitem in items)
{
SafeFuncCall(errorlist, listitem, new ExceptionListSignal() { IsAll = true, IsList = true, FolderError = true }, () =>
{
var item = listitem;
if (item.IsFolder())
{
SendFolder(item.Folder, folderId, user);
}
else
{
SendFile(item, folderId, user);
}
});
}
}
catch (CloudFolderNotFound ex)
{
// if destination folder was deleted then clear already processed files from link
foreach (SPListItem listitem in items)
{
SafeFuncCall(errorlist, listitem, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
var item = listitem;
if (item.IsFolder())
{
SyncFolder(item.Folder, user, true);
}
else
{
ClearFileCloudData(item);
}
});
}
errorlist.Clear();
errorlist.Add(null, ex);
}
});
}
}
}
const string CustomXSLT = "Cloud.xsl";
internal string GetCreateFieldAsXml(Guid oId)
{
XmlElement element = new XmlDocument().CreateElement("Field"); // Creating the “Field” element for the Inner XML schema
element.SetAttribute("ID", oId.ToString()); // Setting the GUID of the field from value passed
element.SetAttribute("Type", "CloudPathField"); // Setting the Type name registered in the “Fldtypes*.xml” file
element.SetAttribute("Name", CloudPathFieldValue.CloudField); // Setting the Field name registered in the “Fldtypes*.xml” file
element.SetAttribute("DisplayName", CloudPathFieldValue.CloudField); // Any unique Display Name
element.SetAttribute("Required", "FALSE");
element.SetAttribute("ShowInNewForm", "FALSE");
element.SetAttribute("ShowInEditForm", "FALSE");
element.SetAttribute("ShowInDisplayForm", "FALSE");
element.SetAttribute("ShowInViewForms", "FALSE");
element.SetAttribute("ShowInVersionHistory", "FALSE");
return element.OuterXml; // Returning the OuterXML to create the field as XML
}
public bool EnsureListInitialized(SPList list)
{
try
{
list.ParentWeb.AllowUnsafeUpdates = true;
var wasUpdated = false;
if (!list.Fields.ContainsFieldWithStaticName(CloudPathFieldValue.CloudField))
{
list.CheckPermissionkOrThrow(SPBasePermissions.ManageLists);
var field = GetCreateFieldAsXml(Guid.NewGuid());
//SPField field = list.Fields.CreateNewField(SPFieldType.Text.ToString(), CloudPathFieldValue.CloudField);
//field.ShowInNewForm = false;
//field.ShowInEditForm = false;
//field.ShowInDisplayForm = false;
//field.ShowInViewForms = false;
//field.ShowInVersionHistory = false;
//list.Fields.Add(field);
list.Fields.AddFieldAsXml(field);
list.Update();
wasUpdated = true;
}
for (int i = 0; i < list.Views.Count; i++)
{
var view = list.Views[i];
if (!view.ViewFields.Exists(CloudPathFieldValue.CloudField))
{
view.ViewFields.Add(CloudPathFieldValue.CloudField);
view.Update();
wasUpdated = true;
}
//if (!string.Equals(view.XslLink, CustomXSLT))
//{
// view.XslLink = CustomXSLT;
// view.Update();
// wasUpdated = true;
//}
}
UpdateReceiver(list);
if (wasUpdated) { list.ParentWeb.Audit.WriteItemUpdate(list); }
return !wasUpdated;
}
finally
{
list.ParentWeb.AllowUnsafeUpdates = false;
}
}
#endregion
#region privates
private static readonly char[] InvalidSharePointFileNameCharacters = { '~', '#', '%', '&', '*', ':', '<', '>', '?', '/', '\\', '{', '}', '|' };
readonly string MISSEDONCLOUD = "Item was deleted from Workshare";
void UpdateReceiver(SPList list)
{
list.EventReceivers.OfType<SPEventReceiverDefinition>().Where(p => string.Equals(p.Class, typeof(WorkshareReceiver).FullName)).ToList().ForEach(receiver => receiver.Delete());
list.EventReceivers.Add(SPEventReceiverType.ItemCheckingIn, typeof(WorkshareReceiver).Assembly.FullName, typeof(WorkshareReceiver).FullName);
//list.EventReceivers.Add(SPEventReceiverType.ItemCheckedIn, typeof(WorkshareReceiver).Assembly.FullName, typeof(WorkshareReceiver).FullName);
//list.EventReceivers.Add(SPEventReceiverType.ItemUncheckedOut, typeof(WorkshareReceiver).Assembly.FullName, typeof(WorkshareReceiver).FullName);
list.EventReceivers.Add(SPEventReceiverType.ItemAdding, typeof(WorkshareReceiver).Assembly.FullName, typeof(WorkshareReceiver).FullName);
}
private int SendFolder(SPFolder folder, int folderId, UserDetails user)
{
int newfolderId = -1;
if (!(new CloudPathFieldValue((string)folder.Item[CloudPathFieldValue.CloudField]).HasValue))
{
using (var errorlist = new ItemsErrorsCollection())
{
SafeFuncCall(errorlist, folder.Item, new ExceptionListSignal() { IsWeb = true, FolderError = true }, () =>
{
FolderDetails newFolder;
try
{
newFolder = PlatformService.CreateFolder(user, folder.Name, "desc", folderId);
}
catch (WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SEND, Properties.Resources.STR_UNANBESENDFOLDER_TEXT, ex);
}
else
{
throw;
}
}
RunUnsafeUpdate(folder.Item.Web, () =>
{
folder.Item.SetCloudValue(newFolder);
folder.Item.SystemUpdate(false);
});
string itemSendInfo = "";
foreach (SPFolder item in folder.SubFolders)
{
SafeFuncCall(errorlist, item.Item, new ExceptionListSignal() { IsAll = true, IsList = true, FolderError = true }, () =>
{
int folderID = SendFolder(item, newFolder.Id, user);
if (folderID > 0)
{
itemSendInfo += folderID.ToString() + ";";
}
});
}
foreach (SPFile file in folder.Files)
{
SafeFuncCall(errorlist, file.Item, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true, FolderError = true }, () =>
{
int fileID = SendFile(file.Item, newFolder.Id, user);
if (fileID > 0)
{
itemSendInfo += fileID.ToString() + ";";
}
});
}
RunUnsafeUpdate(folder.Item.Web, () =>
{
if (folder.Item.Properties.Contains("SyncedFileData"))
{
folder.Item.Properties.Remove("SyncedFileData");
}
folder.Item.Properties.Add("SyncedFileData", itemSendInfo);
folder.Item.SystemUpdate(false);
});
folder.ParentWeb.Audit.WriteItemSent(folder.Item, newFolder, PlatformService.GetAncestor(user, folderId));
newfolderId = newFolder.Id;
});
}
}
else
{
throw new AlreadySentException();
}
return newfolderId;
}
private void SyncFilesInFolder(SPFolder folder, List<FileDetails> filesOnCloud, UserDetails user)
{
using (var errorlist = new ItemsErrorsCollection())
{
// sync existing files
List<int> filesOnSP = new List<int>();
foreach (SPFile item in folder.Files)
{
var fileCloudData = new CloudPathFieldValue((string)(item.Item)[CloudPathFieldValue.CloudField]);
if (fileCloudData.ItemId > 0)
{
filesOnSP.Add(fileCloudData.ItemId);
}
SafeFuncCall(errorlist, item.Item, new ExceptionListSignal() { IsAll = true, IsWeb = true }, () =>
{
SyncFile(item.Item, ConflictOptions.None, user);
//item.Item.Update();
});
}
string processedFilesString = folder.Item.Properties.Contains("SyncedFileData") ? folder.Item.Properties["SyncedFileData"].ToString() : "";
var processedFiles = processedFilesString.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
foreach (var fileID in filesOnCloud.Select(x => x.Id).Except(filesOnSP))
{
SafeFuncCall(errorlist, folder.Item, new ExceptionListSignal() { IsAll = true, IsWeb = true }, () =>
{
var cloudFile = filesOnCloud.Where(x => x.Id == fileID).First();
if (!processedFiles.Contains(cloudFile.Id.ToString()))
{
try
{
processedFilesString += cloudFile.Id + ";";
PlatformService.GetVersionDetails(user, cloudFile);
AddNewFileToSharePoint(folder, cloudFile, PlatformService.DownloadFile(user, fileID));
RunUnsafeUpdate(folder.Item.Web, () =>
{
if (folder.Item.Properties.Contains("SyncedFileData"))
{
folder.Item.Properties["SyncedFileData"] = processedFilesString;
}
else
{
folder.Item.Properties.Add("SyncedFileData", processedFilesString);
}
folder.Item.SystemUpdate(false);
});
}
catch (WebException e)
{
if (IsStatusCode(e, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SYNC, Properties.Resources.STR_UNABLESYNCFILE_TEXT, e);
}
else
{
throw;
}
}
}
});
}
}
}
private void AddNewFileToSharePoint(SPFolder folder, FileDetails file, string filePath)
{
RunUnsafeUpdate(folder.ParentWeb, () =>
{
SPFile fl = folder.Files.AddWSFile(RemoveInvalidSymbols(file.Name), filePath);
//fl.Item.SetCloudValue(file);
//fl.Item.SystemUpdate(false);
RunUnsafeUpdate(fl.Item.Web, () => { fl.CheckOut(); });
folder.ParentWeb.Audit.WriteItemSynced(folder.Item, file);
});
}
private void SyncSubFolderInFolder(SPFolder folder, IEnumerable<FolderDetails> foldersOnCloud, UserDetails user)
{
using (var errorlist = new ItemsErrorsCollection())
{
// sync existing folders
Dictionary<int, string> folderOnSP = new Dictionary<int, string>();
foreach (SPFolder item in folder.SubFolders)
{
var folderCloudData = new CloudPathFieldValue((string)(item.Item)[CloudPathFieldValue.CloudField]);
if (folderCloudData.ItemId > 0)
{
folderOnSP.Add(folderCloudData.ItemId, RemoveInvalidSymbols(item.Name));
}
SafeFuncCall(errorlist, item.Item, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true }, () =>
{
SyncFolder(item, user);
//item.Item.Update();
});
}
// add new folders from cloud to SP and sync it
string processedFolderString = folder.Item.Properties.Contains("SyncedFileData") ? folder.Item.Properties["SyncedFileData"].ToString() : "";
var processedFolders = processedFolderString.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
var newFolders = foldersOnCloud.Select(f => f.Id).Except(folderOnSP.Select(x => x.Key));
foreach (var folderID in newFolders)
{
SafeFuncCall(errorlist, folder.Item, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true }, () =>
{
var cloudFolder = foldersOnCloud.Where(x => x.Id == folderID).First();
if (!processedFolders.Contains(cloudFolder.Id.ToString()))
{
RunUnsafeUpdate(folder.Item.Web, () =>
{
processedFolderString += cloudFolder.Id + ";";
SPFolder fld = folder.SubFolders.Add(folder.ServerRelativeUrl + "/" + RemoveInvalidSymbols(cloudFolder.Name) + "/");
if (folder.Item.Properties.Contains("SyncedFileData"))
{
folder.Item.Properties["SyncedFileData"] = processedFolderString;
}
else
{
folder.Item.Properties.Add("SyncedFileData", processedFolderString);
}
folder.Update();
folder.Item.SystemUpdate(false);
fld.Item.SetCloudValue(cloudFolder);
fld.Item.SystemUpdate(false);
SyncFolder(fld, user);
});
}
});
}
}
}
private void ClearFolderCloudData(SPListItem folder)
{
RunUnsafeUpdate(folder.Web, () =>
{
folder.SetCloudValue((FolderDetails)null);
if (folder.Properties.Contains("SyncedFileData"))
{
folder.Properties.Remove("SyncedFileData");
}
folder.SystemUpdate(false);
});
}
private bool CheckFolderExistsWhenSendOrThrow(UserDetails user, int itemID, int parentFolderID)
{
try
{
return PlatformService.IsFolderExists(user, itemID, parentFolderID);
}
catch (WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SEND, Properties.Resources.STR_UNABLESENDITEM_TEXT, ex);
}
else
{
throw;
}
}
}
private bool CheckFolderExistsWhenSyncOrThrow(UserDetails user, int itemID, int parentFolderID)
{
try
{
return PlatformService.IsFolderExists(user, itemID, parentFolderID);
}
catch (WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SYNC, Properties.Resources.STR_UNABLESYNCITEM_TEXT, ex);
}
else
{
throw;
}
}
}
private void SyncFolder(SPFolder folder, UserDetails user, bool ClearFieldData = false)
{
using (var errorlist = new ItemsErrorsCollection())
{
SafeFuncCall(errorlist, folder.Item, new ExceptionListSignal() { IsWeb = true }, () =>
{
var CloudPathData = new CloudPathFieldValue((string)(folder.Item)[CloudPathFieldValue.CloudField]);
if (CloudPathData.HasValue)
{
//var cloudfolder = PlatformService.GetFolder(_user, CloudPathData.FolderId); // need for check if user can access to folder
if (!ClearFieldData && CheckFolderExistsWhenSyncOrThrow(user, CloudPathData.ItemId, CloudPathData.ParentFolderId))
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsList = true }, () =>
{
SyncFilesInFolder(folder, PlatformService.GetFiles(user, CloudPathData.ItemId), user);
});
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsList = true }, () =>
{
SyncSubFolderInFolder(folder, PlatformService.GetChildFolders(user, CloudPathData.ItemId), user);
});
if (CheckFolderIsNeedToClearSyncData(folder.Item)) // all files sync OK then clear folder sync data
{
ClearFolderCloudData(folder.Item);
}
}
else
{// folder was removed from cloud so we should clear Cloud field for all subitems and clear cloud field for this folder
// clear cloud field for files in the folder
foreach (SPFile file in folder.Files)
{
SafeFuncCall(errorlist, file.Item, new ExceptionListSignal() { IsAll = true }, () =>
{
var fileclouddata = new CloudPathFieldValue((string)(file.Item)[CloudPathFieldValue.CloudField]);
FileDetails fd = fileclouddata.HasValue ? PlatformService.GetFile(user, fileclouddata.ItemId, fileclouddata.ParentFolderId) : null;
ClearFileCloudData(file.Item);
if (fileclouddata.HasValue)
{
file.Item.Web.Audit.WriteItemStopSync(file.Item, MISSEDONCLOUD, fd);
}
});
}
//clear cloud field for all subfolders
foreach (SPFolder item in folder.SubFolders)
{
SafeFuncCall(errorlist, item.Item, new ExceptionListSignal() { IsAll = true, IsList = true }, () =>
{
SyncFolder(item, user, true);
});
}
SafeFuncCall(errorlist, folder.Item, new ExceptionListSignal() { IsAll = true }, () =>
{
ClearFolderCloudData(folder.Item);
folder.ParentWeb.Audit.WriteItemStopSync(folder.Item, MISSEDONCLOUD);
});
if (!ClearFieldData)
{
throw new CloudFolderNotFound(CloudPathData.ParentFolderId, PlatformService.IsFolderExists(user, CloudPathData.ParentFolderId, -1));
}
}
}
});
}
}
private bool CheckFolderIsNeedToClearSyncData(SPListItem item)
{
if (item == null) { return false; }
SPList list = item.ParentList;
SPFolder folder = item.Folder;
if (folder.Item == null) { return false; } // check if item is root folder
if (folder == null)
{
SPFile file = item.File;
if (file == null || file.ParentFolder == null || file.ParentFolder.Item == null)
{
return false;
}
else
{
folder = file.ParentFolder;
}
}
SPQuery qry = new SPQuery();
qry.Folder = folder;
qry.Query =
"<Where> "+
"<Or> "+
"<IsNotNull><FieldRef Name='" + CloudPathFieldValue.CloudField + "'/></IsNotNull> "+
"<And> "+
"<Neq><FieldRef Name='CheckoutUser' LookupId='TRUE'/><Value Type='Integer'><UserID/></Value></Neq> "+
"<IsNotNull><FieldRef Name='CheckoutUser'/></IsNotNull> "+
"</And> "+
"</Or> " +
"</Where>";
SPListItemCollection res = list.GetItems(qry);
return res.Count == 0;
}
private void ClearFileCloudData(SPListItem file)
{
RunUnsafeUpdate(file.Web, () =>
{
file.SetCloudValue((FileDetails)null);
file.SystemUpdate(false);
});
}
private void SyncFile(SPListItem item, ConflictOptions conflictOption, UserDetails _user)
{
SafeFuncCall(null, null, new ExceptionListSignal() { IsWeb = true }, () =>
{
//CloudAuthenication.CheckIfAuth();
Exception ex = null;
item.CheckPermissionkOrThrow(SPBasePermissions.EditListItems);
item.CheckNotCheckedOutToOtherUserOrThrow();
var CloudPathData = new CloudPathFieldValue((string)item[CloudPathFieldValue.CloudField]);
if (CloudPathData.HasValue)
{
try
{
var folder = PlatformService.GetFolder(_user, CloudPathData.ParentFolderId); // need for check if user can access to folder
if (folder != null)
{
var file = PlatformService.GetFiles(_user, CloudPathData.ParentFolderId).Where(x => x.Id == CloudPathData.ItemId).FirstOrDefault();
// if file is null so it was deleted from cloud and we should clear synchronization data from sharepoint
if (file != null)
{
var cloud_versions = PlatformService.GetVersionDetails(_user, file);
var currentFileVersion = cloud_versions.Where(x => x.Id == CloudPathData.VersionId).FirstOrDefault();
if (currentFileVersion != null)
{
var cloudIsNewer = cloud_versions.Where(x => x.Version > currentFileVersion.Version).Any();
var SPFileModified = (DateTime)item[SPBuiltInFieldId.Modified];
bool SPFileIsNewer = SPFileModified != null && SPFileModified.Ticks > CloudPathData.ItemModifiedBy;
// file on the cloud is newer
if (cloudIsNewer && (!SPFileIsNewer || conflictOption == ConflictOptions.Replace))
{
var filepath = PlatformService.DownloadFile(_user, CloudPathData.ItemId);
UpdateFileOnSharepoint(item, filepath, file);
item.Web.Audit.WriteItemSync(item, file);
}
else
if (cloudIsNewer && (SPFileIsNewer && conflictOption == ConflictOptions.KeepBoth))
{
AddNewFileToSharePoint(item.File.ParentFolder, file, PlatformService.DownloadFile(_user, file.Id));
//ClearFileCloudData(item);
}
else //file on the sharepoint is newer
{
if (SPFileIsNewer && conflictOption == ConflictOptions.None)
{
if (cloudIsNewer)
{
ex = new FileConflictException(item.Name, file.Name, item.ID, item.ParentList.ID, SPFileModified, file.CurrentVersion.CreateDate, item.File.ModifiedBy.Name, file.CurrentVersion.Creator.UserName);
}
else
{
//ex = new FileOnSharePointIsNewer(item.Name, item.ID);
UploadNewFileVersionToWorkshareCloud(_user, item, file);
item.Web.Audit.WriteItemSync(item, file);
}
}
}
}
else
{
//how they can delete version??
ex = new VersionDoesNotExist();
}
}
else
{
// file was removed from cloud
FileDetails fd = PlatformService.GetFile(_user, CloudPathData.ItemId, CloudPathData.ParentFolderId);
ClearFileCloudData(item);
item.Web.Audit.WriteItemStopSync(item, MISSEDONCLOUD, fd);
ex = new CloudFileNotFound(CloudPathData.ParentFolderId, PlatformService.IsFolderExists(_user, CloudPathData.ParentFolderId, -1));
}
}
if (ex != null)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService);
throw ex;
}
ClearFileCloudData(item.File.Item);
}
catch (WebException e)
{
if (IsStatusCode(e, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SYNC, Properties.Resources.STR_UNABLESYNCFILE_TEXT, e);
}
else
{
throw;
}
}
}
});
}
private void ThrowIfFolderNotExists(UserDetails user, int folderID)
{
if (!PlatformService.IsFolderExists(user, folderID, -1))
throw new CloudFolderNotFound(folderID, false);
}
private int SendFile(SPListItem item, int folderId, UserDetails user)
{
int fileID = -1;
ThrowIfFolderNotExists(user, folderId);
SafeFuncCall(null, null, new ExceptionListSignal() { IsWeb = true }, () =>
{
if (item != null && item.File != null)
{
item.CheckPermissionkOrThrow(SPBasePermissions.EditListItems);
item.CheckNotCheckedOutToOtherUserOrThrow();
if (!item.IsCheckedOutToCurrentUser())
{
RunUnsafeUpdate(item.Web, () =>
{
item.File.CheckOut();
});
}
if (!(new CloudPathFieldValue((string)item[CloudPathFieldValue.CloudField]).HasValue))
{
fileID = UploadFileToWorkshareCloud(item, folderId, user);
item.Web.Audit.WriteItemSent(new[] { item }, new FolderDetails() { Id = folderId }, PlatformService.GetAncestor(user, folderId));
}
else
{
throw new AlreadySentException();
}
}
});
return fileID;
}
private bool UploadNewFileVersionToWorkshareCloud(UserDetails user, SPListItem item, FileDetails file)
{
var value = item.GetCloudValue();
if (value != null)
{
//TO DO - need to make sure whether we have FilePath
//file.FilePath
file.Name = item.Name;
RunUnsafeUpdate(item.Web, () =>
{
PlatformService.UploadNewVersionOfFile(user, file);
//item.SetCloudValue(file);
//item.SystemUpdate(false);
});
return true;
}
return false;
}
private int UploadFileToWorkshareCloud(SPListItem item, int folderId, UserDetails user)
{
var value = item.GetCloudValue();
if (value != null)
{
var fileDetailes = new FileDetails()
{
Name = item.Name,
FriendlyName = ((string.IsNullOrEmpty(item.DisplayName)) ? Path.GetFileNameWithoutExtension(item.Name) : item.DisplayName),
//TO DO - need to get FilePath
//FilePath
FolderId = folderId
};
try
{
RunUnsafeUpdate(item.Web, () =>
{
PlatformService.UploadFile3(user, fileDetailes);
item.SetCloudValue(fileDetailes);
item.SystemUpdate(false);
});
return fileDetailes.CurrentVersion.FileId;
}
catch (WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(Properties.Resources.STR_UNABLE_SEND, Properties.Resources.STR_UNABLESENDFILE_TEXT, ex);
}
else
{
throw;
}
}
};
return -1;
}
private bool UpdateFileOnSharepoint(SPListItem item, string filePath, FileDetails file)
{
using (Stream stream = File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.None))
{
RunUnsafeUpdate(item.Web, () =>
{
item.File.SaveBinary(stream, new SPFileSaveBinaryParameters() { CreateVersion = false, CheckRequiredFields = false });
//item.File.Item["Name"] = file.Version.Name; we can upload on workshare cloud new version with new name, but new name is not displayed in Web UI
//item.File.Item.SetCloudValue(file);
item.File.Item.SystemUpdate(false);
});
return true;
}
}
private class ExceptionListSignal
{
public bool IsList = false;
public bool IsWeb = false;
public bool IsAll = false;
public bool FolderError = false;
}
public static bool IsStatusCode(WebException ex, HttpStatusCode code)
{
return ex.Response != null && ex.Response is HttpWebResponse && (((HttpWebResponse)ex.Response).StatusCode == code);
}
private void SafeFuncCall(ItemsErrorsCollection errorlist, SPListItem item, ExceptionListSignal flags, Action func)
{
try
{
try
{
func();
}
catch (Exception ex)
{
ReThrowException(ex);
}
}
catch (Exception ex)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService);
if (flags.FolderError && ex is CloudFolderNotFound)
{
throw;
}
if (flags.IsList && ex is ListProcessException)
{
if (errorlist != null)
{
errorlist.AddRange(((ListProcessException)ex).errorList);
}
return;
}
if (ex is CloudUnAuthorized)
{
throw;
}
if (flags.IsWeb && ex is System.Net.WebException)
{
if (errorlist != null && item != null)
{
errorlist.Add(item, ex);
return;
}
else
{
throw;
}
}
if (flags.IsAll)
{
if (errorlist != null && item != null)
{
errorlist.Add(item, ex);
return;
}
else
{
throw;
}
}
else
{
throw;
}
}
}
public static void ReThrowException(Exception ex)
{
if (ex is System.Net.WebException)
{
var webEx = (System.Net.WebException)ex;
if (webEx.Response is System.Net.HttpWebResponse)
{
var code = ((System.Net.HttpWebResponse)(webEx.Response)).StatusCode;
if (code == System.Net.HttpStatusCode.Unauthorized)
{
throw new CloudUnAuthorized(ex);
}
}
}
else if (ex is UnauthorizedAccessException)
{
throw new CloudUnAuthorized(ex);
}
else if ((ex.Message.Contains("no such file or folder") || (ex.Message.Contains("file not found") || ex is FileNotFoundException)))
{
throw new ItemNotFound(ex);
}
throw ex;
}
private void RunUnsafeUpdate(SPWeb web, Action func)
{
try
{
web.AllowUnsafeUpdates = true;
func();
}
finally
{
web.AllowUnsafeUpdates = false;
}
}
private string RemoveInvalidSymbols(string name)
{
StringBuilder _name = new StringBuilder(name);
for (int i = 0; i < InvalidSharePointFileNameCharacters.Length; i++)
{
_name.Replace(InvalidSharePointFileNameCharacters[i], '_');
}
return _name.ToString().Trim();
}
#endregion
}
public class SyncItemInformations
{
public SyncItemInformations()
{
infos = new List<SyncItemInformation>();
}
public List<SyncItemInformation> infos { set; get; }
}
public class SyncItemInformation
{
public List<SyncItemInfo> items { set; get; }
public string listGuid { set; get; }
}
public class SendItemInformations
{
public SendItemInformations()
{
infos = new List<SendItemInformation>();
}
public List<SendItemInformation> infos { set; get; }
}
public class SendItemInformation
{
public List<ItemID> items { set; get; }
public string listGuid { set; get; }
public string folderId { set; get; }
}
public class SyncItemInfo
{
public ItemID item;
public int conflictOption; //0 - show error when conflict, 1 - use file from cloud, 2 - create new file on the Sharepoint
}
public class ItemID
{
public int id;
public int FSObjType;
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/ChangeToTextConverter.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Windows.Data;
using Workshare.Components.WSLogger;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
namespace Workshare.Components.Views.TrueSyncDialog
{
class ChangeToTextConverter:IValueConverter
{
public string TimeFormat = "HH:mm dd/MM";
public string TimeFormatDateOnly = "dd/MM";
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
try
{
var change = value as FileActivityChange;
var sb = new StringBuilder();
if (change != null)
{
var wsFile = change.Parent.WsFile;
switch (change.Type)
{
case ChangeType.Uploaded:
{
if (wsFile != null)
{
sb.AppendFormat("Shared in Workshare by {0} since {1}", wsFile.Creator.UserName,
wsFile.CreatedAt.ToString(TimeFormatDateOnly));
}
else
{
sb.Append("Shared");
}
break;
}
case ChangeType.RemoteDeleted:
{
sb.Append("Deleted or moved from Workshare");
break;
}
case ChangeType.RemoteAdded:
{
if (wsFile != null)
{
var lastVersion =
change.NewerVersions.OrderByDescending(p => p.Version).FirstOrDefault();
string versionUpdater = lastVersion == null ? "" : lastVersion.Creator.UserName;
string versionUploadDate = lastVersion == null
? ""
: lastVersion.CreateDate.ToString(TimeFormat);
sb.AppendFormat("New document uploaded by {0} at {1}", versionUpdater, versionUploadDate);
}
else
{
sb.Append("New document uploaded");
}
break;
}
case ChangeType.RemoteChanged:
{
if (wsFile != null)
{
var lastVersion =
change.NewerVersions.OrderByDescending(p => p.Version).FirstOrDefault();
string versionUpdater = lastVersion == null ? "" : lastVersion.Creator.UserName;
string versionUploadDate = lastVersion == null ? "" : lastVersion.CreateDate.ToString(TimeFormat);
sb.AppendFormat("New version uploaded by {0} at {1}", versionUpdater, versionUploadDate);
}
else
{
sb.Append("New version uploaded");
}
break;
}
case ChangeType.LocalChanged:
{
sb.AppendFormat("File changed locally after sharing in Workshare");
break;
}
case ChangeType.NamesDiffer:
{
if (wsFile != null)
{
sb.AppendFormat("Name differs. In Workshare : {0}. Locally : {1}", change.WsName,
change.LocalName);
}
else
{
sb.Append("Local and remote name are different");
}
break;
}
case ChangeType.BothChanged:
{
sb.AppendFormat("File was changed locally and remotely after sharing in Workshare");
break;
}
case ChangeType.DocTypeChanged:
{
sb.AppendFormat("Document type changed from {0} to {1}", change.LocalDocType,
change.WsDocType);
break;
}
case ChangeType.NotSentVersion:
{
sb.AppendFormat("Local version #{0} not sent. Saved at {1}", change.VersionNumber,
change.SavedTime);
break;
}
case ChangeType.VersionChangedAfterSend:
{
sb.AppendFormat("Version #{0} was changed after send. Saved at {1}", change.VersionNumber,
change.SavedTime);
break;
}
case ChangeType.FolderSharedWithOthers:
{
sb.AppendFormat("Folder shared by {0} with {1} other(s)", change.FolderSharedBy.UserName,
change.OtherMemebersCount);
break;
}
default:
{
sb.AppendFormat("{0}", change);
break;
}
}
}
return sb.ToString();
}
catch (Exception ex)
{
Logger.WriteError(ex);
return "Error during detecting of changes";
}
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Authentication/AuthentificationForm.cs
using System;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using System.Windows.Forms;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.WSLogger;
using Workshare.Integration.Exceptions;
using WorkshareCloud.ServiceProxy.ProxyHelper;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.Authentication
{
public partial class AuthentificationForm : Form
{
// public readonly static string LoginRelUri = string.Format("login?claim_token_url={1}&reuse_session=1", HttpUtility.UrlEncode(PlatformService.HostWithSchema.ToString()).Replace("%2520", "%20"));
public readonly static string LoginRelUri = string.Format("login?device[app_uid]={0}", WSApplication.Instance.APP_CODE);
Uri LoginUri
{
get
{
return new Uri(WSApplication.Instance.Server + LoginRelUri);
}
}
public Exception Error { get; set; }
public AuthentificationForm()
{
InitializeComponent();
var sh = new ScriptHelper();
sh.act_LaunchInBrowser = (a) => { this.webBrowserForm.Navigate(a); };
this.webBrowserForm.ObjectForScripting = sh;
this.Disposed += AuthentificationForm_Disposed;
this.Text = RES.STR_LOGIN_WINDOW_TITLE;
this.Width = 1020;
this.Height = 800;
//this.StartPosition = FormStartPosition.CenterParent;
this.Load += AuthentificationForm_Load;
//webBrowserForm.ScriptErrorsSuppressed = true; breaks authentication with domain credentials
webBrowserForm.Navigated += (object sender, WebBrowserNavigatedEventArgs args) =>
{
Action<HtmlDocument> blockAlerts = (HtmlDocument d) =>
{
HtmlElement h = d.GetElementsByTagName("head")[0];
HtmlElement s = d.CreateElement("script");
mshtml.IHTMLScriptElement e = (mshtml.IHTMLScriptElement)s.DomElement;
e.text = "window.alert=function(){};";
h.AppendChild(s);
};
var b = sender as WebBrowser;
blockAlerts(b.Document);
for (int i = 0; i < b.Document.Window.Frames.Count; i++)
try { blockAlerts(b.Document.Window.Frames[i].Document); }
catch (Exception) { };
};
webBrowserForm.NavigateError += webBrowserForm_NavigateError;
webBrowserForm.Navigated += webBrowserForm_Navigated;
webBrowserForm.DocumentCompleted += webBrowserForm_DocumentCompleted;
webBrowserForm.IsWebBrowserContextMenuEnabled = false;
webBrowserForm.AllowWebBrowserDrop = false;
}
void webBrowserForm_NavigateError(object sender, NewWebBrowser.WebBrowserNavigateErrorEventArgs e)
{
if (e.StatusCode == (int)HttpStatusCode.ProxyAuthenticationRequired)
{
Error = new OfflineException(RES.STR_CONNECTION_ERROR_TEXT_PROXYAUTHENTICATIONREQUIRED, null);
}
else
{
Error = new OfflineException();
}
webBrowserForm.Navigate("about:blank");
this.DialogResult = DialogResult.Abort;
this.Close();
}
void webBrowserForm_DocumentCompleted(object sender, WebBrowserDocumentCompletedEventArgs e)
{
if (webBrowserForm.Document != null)
{
var loginctrl = webBrowserForm.Document.GetElementById("user_session_email");
if (loginctrl != null)
{
loginctrl.Focus();
}
}
this.pb_Loader.Visible = false;
}
void AuthentificationForm_Disposed(object sender, EventArgs e)
{
webBrowserForm.Dispose();
}
void AuthentificationForm_Load(object sender, EventArgs e)
{
Logger.Write("Version of WebBrowser installed: " + webBrowserForm.Version, Severity.Information);
if (ProxyDetector.CheckForProxy(User, this.Handle))
{
if (webBrowserForm.Version.Major <= ModuleViewBase.IE8MAJORVERSION)
{
webBrowserForm.Navigate(LoginUri, null, null, @"User-Agent:Mozilla/4.0 (compatible; MSIE 8.0)");
}
else
{
webBrowserForm.Navigate(LoginUri);
}
}
else
{
this.DialogResult = DialogResult.Cancel;
this.Close();
}
}
public CookieContainer Cookies = new CookieContainer();
public UserDetails User =WSApplication.Instance.AuthProvider.GetCurrentWSUser();
void webBrowserForm_Navigated(object sender, WebBrowserNavigatedEventArgs e)
{
Logger.Write("Redirected to a: " + e.Url, Severity.Information);
// This has been added to enable the Single Sign on with PingOne
if (e.Url.ToString().EndsWith("/sso/login"))
return;
if (e.Url.ToString().StartsWith("https://sso.connect.pingidentity.com/sso"))
return;
if (e.Url.ToString().EndsWith("startsso.aspx?sp=pingone"))
return;
var doc = webBrowserForm.Document;
if (doc != null)
{
var str = CookieReader.Reader.GetCookie(LoginUri.ToString());
if (!string.IsNullOrEmpty(str))
{
var splits = str.Split(';');
if (splits.Length > 1)
{
foreach (var cookie in splits)
{
var cookieSplit = cookie.Split('=');
if (cookie.Length < 2) continue;
var cookieName = cookieSplit[0].Trim();
if (string.Equals(cookieName, DMSAuthProvider.DeviceCredential, StringComparison.InvariantCultureIgnoreCase))//(string.Equals(cookieName, ManAuthProvider._session_id, StringComparison.InvariantCultureIgnoreCase) || string.Equals(cookieName, ManAuthProvider.user_credentials, StringComparison.InvariantCultureIgnoreCase))
{
var cookieObj = new Cookie(cookieName, cookieSplit[1]);
Cookies.Add(WSApplication.Instance.Server, cookieObj);
}
}
}
}
if (!doc.Url.AbsolutePath.Contains("login") && Cookies.Count == 1 /*&& !doc.Url.AbsolutePath.IsDiscarded("user_sessions")*/)
{
if (User == null)
{
User = new UserDetails() { SessionCookies = Cookies };
}
else
{
User.SessionCookies = Cookies;
}
this.DialogResult = (Cookies.Count == 1) ? DialogResult.OK : DialogResult.Cancel;
this.Close();
}
}
else
{
Logger.Write("HTML DOCUMENT IS NULL: " + e.Url, Severity.CriticalError);
}
}
}
[ComVisible(true)]
public class ScriptHelper
{
public Action<string> act_LaunchInBrowser
{
get;
internal set;
}
public void launchUrlInBrowser(string url)
{
if (act_LaunchInBrowser != null)
{
act_LaunchInBrowser(url);
}
}
}
internal class CookieReader
{
/// <summary>
/// Enables the retrieval of cookies that are marked as "HTTPOnly".
/// Do not use this flag if you expose a scriptable interface,
/// because this has security implications. It is imperative that
/// you use this flag only if you can guarantee that you will never
/// expose the cookie to third-party code by way of an
/// extensibility mechanism you provide.
/// Version: Requires Internet Explorer 8.0 or later.
/// </summary>
private const int INTERNET_COOKIE_HTTPONLY = 0x00002000;
[DllImport("wininet.dll", SetLastError = true)]
private static extern bool InternetGetCookieEx(
string url,
string cookieName,
StringBuilder cookieData,
ref int size,
int flags,
IntPtr pReserved);
/// <summary>
/// Returns cookie contents as a string
/// </summary>
/// <param name="url"></param>
/// <returns></returns>
public string GetCookie(string url)
{
int size = 512;
var sb = new StringBuilder(size);
if (!InternetGetCookieEx(url, null, sb, ref size, INTERNET_COOKIE_HTTPONLY, IntPtr.Zero))
{
if (size < 0)
{
return null;
}
sb = new StringBuilder(size);
if (!InternetGetCookieEx(url, null, sb, ref size, INTERNET_COOKIE_HTTPONLY, IntPtr.Zero))
{
return null;
}
}
return sb.ToString();
}
public static CookieReader Reader = new CookieReader();
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/Layouts/WorkshareCloud/FileIconHandler.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web;
using System.Net;
using System.IO;
using System.Web.Script.Serialization;
using WorkshareCloud.Common;
using Microsoft.SharePoint.Utilities;
using Microsoft.SharePoint;
namespace WorkshareCloud.Layouts.WorkshareCloud
{
class FileIconHandler : IHttpHandler
{
public bool IsReusable
{
get { return false; }
}
public void ProcessRequest(HttpContext context)
{
try
{
string str_size = context.Request.QueryString["size"];
IconSize size = IconSize.Size16;
if (string.Equals(str_size, "32"))
{
size = IconSize.Size32;
}
var filename = context.Request.QueryString["filename"];
if (string.IsNullOrEmpty(filename))
{
filename = "a.txt";
}
if (new[] { "32", "16" }.Contains(str_size))
{
var iconUrl = SPUtility.ConcatUrls("/_layouts/images/", SPUtility.MapToIcon(SPContext.Current.Web, filename, "", size));
context.Response.Redirect(iconUrl, false);
}
else if (string.Equals(str_size, "40"))
{
var iconUrl = getMedium(Path.GetExtension(filename));
context.Response.Redirect(iconUrl, false);
}
}
catch (Exception)
{
context.Response.Redirect("/_layouts/images/ictxt.gif", false);
}
}
private static readonly string[] WordExt = { ".docx", ".docm", ".dotx", ".dotm", ".doc", ".dot", ".rtf" };
private static readonly string[] ExcelExt = {
".xl", ".xlsx", ".xlsm", ".xlsb", ".xlam", ".xltx", ".xltm", ".xls",
".xlt"
};
private static readonly string[] PptExt = {
".pptx", ".ppt", ".pptm", ".ppsx", ".pps", ".ppsm", ".potx", ".pot",
".potm", ".odp"
};
private static readonly string[] PDFExt = { ".pdf" };
private static readonly string[] AudioExt = { ".mp3", ".wav", ".flac", ".aac" };
private static readonly string[] VideoExt = { ".avi", ".mp4", ".mov", ".mkv" };
private static readonly string[] ImageExt = { ".bmp", ".jpg", ".jpeg", ".jpe", ".jfif", ".ico", ".png", ".tif", ".tiff", ".dib", ".raw", ".gif" };
string getMedium(string ext)
{
var imageUrl = "/_layouts/WorkshareCloud/Images/docs/";
if (WordExt.Contains(ext))
{
imageUrl += "doc-medium.png";
}
else if (ExcelExt.Contains(ext))
{
imageUrl += "xls-medium.png";
}
else if (PptExt.Contains(ext))
{
imageUrl += "ppt-medium.png";
}
else if (PDFExt.Contains(ext))
{
imageUrl += "pdf-medium.png";
}
else if (ImageExt.Contains(ext))
{
imageUrl += "image-medium.png";
}
else if (AudioExt.Contains(ext))
{
imageUrl += "audio-medium.png";
}
else if (VideoExt.Contains(ext))
{
imageUrl += "video-medium.png";
}
else
{
imageUrl += "generic-medium.png";
}
return imageUrl;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/Visitors/BreakLinkVisitor.cs
using System;
using System.Collections.Generic;
using Workshare.Components.WSLogger;
using Workshare.Integration.Processor.Services;
namespace Workshare.Integration.Processor.Maps.Visitors
{
internal class BreakLinkVisitor : ItemMapVisitor
{
private readonly SyncInfoService _syncInfoService;
private readonly Stack<ItemMap> _currentParent = new Stack<ItemMap>();
public BreakLinkVisitor(SyncInfoService syncInfoService)
{
_syncInfoService = syncInfoService;
}
public override bool VisitEnter(FileMap fileMap)
{
var parentFolderMap = _currentParent.Peek() as FolderMap;
if (parentFolderMap.HasLocal()&&fileMap.HasLocal())
{
var parentInfo=_syncInfoService.GetSyncInfo(parentFolderMap.LocalFolder);
var fileSyncInfo = _syncInfoService.GetSyncInfo(fileMap.LocalFile);
if (parentInfo != null && fileSyncInfo!=null && fileSyncInfo.ParentId==parentInfo.ItemId)
{
_syncInfoService.BreakLink(fileMap.LocalFile);
}
}
return false;
}
public override bool VisitEnter(FolderMap folderMap)
{
var parentMap = _currentParent.Count == 0 ? null : _currentParent.Peek();
_currentParent.Push(folderMap);
try
{
var parentFolderMap = parentMap as FolderMap;
if (parentMap == null || parentFolderMap.HasLocal())
{
var parentInfo = (parentFolderMap != null)
? _syncInfoService.GetSyncInfo(parentFolderMap.LocalFolder)
: null;
var folderSyncInfo = (folderMap.HasLocal()) ? _syncInfoService.GetSyncInfo(folderMap.LocalFolder) : null;
return folderSyncInfo != null &&
(parentInfo == null || folderSyncInfo.ParentId == parentInfo.ItemId);
}
return false;
}
catch (Exception ex)
{
Logger.WriteError(ex);
folderMap.Error = ex;
folderMap.ProcessState=ProcessState.Error;
return true;
}
}
public override void VisitLeave(FolderMap folderMap)
{
_currentParent.Pop();
}
public override void Visit(FolderMap foldermap)
{
try
{
_syncInfoService.BreakLink(foldermap.LocalFolder);
}
catch (Exception ex)
{
Logger.WriteError(ex);
foldermap.Error = ex;
foldermap.ProcessState = ProcessState.Error;
}
}
public override void Visit(FileMap fileMap)
{
try
{
_syncInfoService.BreakLink(fileMap.LocalFile);
}
catch (Exception ex)
{
Logger.WriteError(ex);
fileMap.Error = ex;
fileMap.ProcessState = ProcessState.Error;
}
}
}
}
<file_sep>/WSComponents/src/WSCloudService/UserDetails.cs
using System.Net;
using System.Globalization;
namespace WorksharePlatform
{
public class UserDetails
{
public UserDetails()
{
Host = PlatformService.Host;
Proxy = null;
}
public string UserId { get; set; }
public string AccountUuId { get; set; }
public string AccountId { get; set; }
public string Company { get; set; }
public string Email { get; set; }
public string Password { get; set; }
public string ConfirmPassword { get; set; }
public string UserName { get; set; }
public string Phone { get; set; }
public CookieContainer SessionCookies { get; set; }
public ICredentials ProxyCredentials { get; set; }
public WebProxy Proxy { get; set; }
public string AuthToken { get; set; }
public int CurrentFolderId { get; set; }
public string Host { get; set; }
public string ServiceUrl
{
get
{
return PlatformService.ServiceUrl;
}
}
public bool IsProxyEnable
{
get
{
return Proxy != null;
}
}
public int RootFolderId { get; set; }
public string Domain { get; set; }
}
}
<file_sep>/WSComponents/src/WSCloudService/FileVersionDetails.cs
using System;
namespace WorksharePlatform
{
public class FileVersionDetails
{
public int Id { get; set; }
public int FileId { get; set; }
public int CompleteId { get; set; }
public string MultiPartId { get; set; }
public bool IsChunkingRequired { get; set; }
public string Name { get; set; }
public long Size { get; set; }
public int Version { get; set; }
public string Action { get; set; }
public string Key { get; set; }
public string AwsAccessKey { get; set; }
public string AclType { get; set; }
public string Policy { get; set; }
public string Signature { get; set; }
public string SuccessRedirect { get; set; }
public string ContentType { get; set; }
public string Cache { get; set; }
public string Encryption { get; set; }
public string ImageUrl { get; set; }
public string ThumbUrl { get; set; }
public UserDetails Creator { get; set; }
public DateTime CreateDate { get; set; }
public ApiVersion ApiVersion { get; set; }
//parameters for api v1.3
public string ContentLength { get; set; }
public string UserAgent { get; set; }
public string Authorization { get; set; }
public string HttpVerb { get; set; }
public string AuthDate { get; set; }
}
public enum ApiVersion { One = 1, Three = 2 }
}
<file_sep>/WSComponents/src/WSIntegration/Common/OwnCancellationTokenSource.cs
using System;
using Workshare.Components.WSLogger;
namespace Workshare.Integration.Common
{
public class OwnCancellationTokenSource
{
private string id;
public OwnCancellationTokenSource()
{
id = Guid.NewGuid().ToString();
}
private bool _cancelled;
public bool IsCancelled {
get { return _cancelled; }
}
public void Cancel()
{
_cancelled = true;
Logger.WriteTrace(string.Format("cancellation token {0} was set to true", id));
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/ActivityCounter.cs
#define ENABLE_COUNTER
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Workshare.Components.Concrete;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Components.Common
{
class ActivityInfo
{
public IDMSFolder Folder;
public DateTime UpdateTime;
public List<FileMapActivity> Changes;
public int FoldersActivity
{
get
{
return Changes.Count;
}
}
}
enum ActivityUpdateAction
{
Update,
Remove
}
public class ActivityCounter
{
static ActivityCounter _instance = null;
IDMSFolder _currentFolder;
Thread updateFolderThread = null;
Dictionary<int, ActivityInfo> _counterArr;
object lock_object;
const int UpdateFolderInterval = 30000;
BackgroundWorker _worker;
ModuleBase _module;
WsProxy _wsProxy;
ActivityCounter(ModuleBase module)
{
_module = module;
_counterArr = new Dictionary<int, ActivityInfo>();
lock_object = new object();
_currentFolder = null;
_worker = new BackgroundWorker();
_worker.DoWork+=_worker_DoWork;
_wsProxy = new WsProxy();
}
void AddFoldersActivityCount(IDMSFolder _folder, List<FileMapActivity> _changes)
{
lock (lock_object)
{
if (_counterArr.ContainsKey(_folder.DMSId))
{
_counterArr[_folder.DMSId].Changes = _changes;
_counterArr[_folder.DMSId].UpdateTime = DateTime.Now;
}
else
{
_counterArr.Add(_folder.DMSId, new ActivityInfo() { Changes = _changes, Folder = _folder, UpdateTime = DateTime.Now });
}
}
}
public void Remove(IDMSFolder folder)
{
lock (lock_object)
{
if (_counterArr.ContainsKey(folder.DMSId))
{
_counterArr.Remove(folder.DMSId);
}
if (_counterArr.Count > 0)
{
_currentFolder = _counterArr.First().Value.Folder;
}
}
}
public static ActivityCounter Instance
{
get
{
return _instance ?? (_instance = new ActivityCounter(WSApplication.Instance.Module));
}
}
public IDMSFolder CurrentFolder
{
get
{
return _currentFolder;
}
set
{
if (value != null)
{
try
{
if (_currentFolder == null || _currentFolder.DMSId != value.DMSId)
{
_currentFolder = value;
UpdateUserFolder(_currentFolder);
}
}
catch (Exception ex)
{
Logger.WriteError("Error in update user folder for the counter", ex);
}
}
}
}
public int ActivityCount(IDMSFolder _folder)
{
lock(lock_object)
{
if (_counterArr.ContainsKey(_folder.DMSId))
{
return _counterArr[_folder.DMSId].FoldersActivity;
}
else
{
return -1;
}
}
}
bool NeedToScanAll
{
get
{
lock (lock_object)
{
return _counterArr.Count == 0;
}
}
}
bool CheckIfNeedUpdateFolder(IDMSFolder _folder)
{
TimeSpan diff = TimeSpan.Zero;
lock (lock_object)
{
if (_counterArr.ContainsKey(_folder.DMSId))
{
diff = DateTime.Now - _counterArr[_folder.DMSId].UpdateTime;
}
}
if (diff == TimeSpan.Zero)
{
return true;
}
else
{
return diff.TotalMilliseconds >= UpdateFolderInterval;
}
}
void _worker_DoWork(object sender, DoWorkEventArgs e)
{
try
{
while (true)
{
try
{
ProcessFolder(_currentFolder);
Thread.Sleep(10000);
}
catch (ThreadAbortException)
{
}
catch (Exception ex)
{
Logger.WriteError(ex);
Thread.Sleep(10000);
}
}
}
catch (Exception ex)
{
Trace.TraceError(ex.ToString());
}
}
void UpdateFoldersActivityCount(IDMSFolder _folder, ActivityUpdateAction updateAction, List<FileMapActivity> _changes = null)
{
lock (lock_object)
{
switch (updateAction)
{
case ActivityUpdateAction.Remove:
Remove(_folder);
break;
case ActivityUpdateAction.Update:
AddFoldersActivityCount(_folder, _changes);
break;
}
}
}
private void ProcessFolder(IDMSFolder folder)
{
if (folder.IsDeleted)
{
Instance.Remove(folder);
return;
}
var processor = _module.Resolve<Processor>();
var scanmaps = processor.Scan(new List<BaseDMSItem> { (BaseDMSItem)folder }, new ScanOptions() { UseForceRequest = false });
var counterVisitor = new CounterVisitor(UpdateFoldersActivityCount, _module);
scanmaps.Apply(counterVisitor);
}
bool fullScanInProgress = false;
void FullScan(IDMSItem _item)
{
try
{
fullScanInProgress = true;
if (_item != null && _item.RootFolder() != null)
{
List<IDMSFolder> flds = _item.RootFolder().SubFolders.ToList();
foreach (var fld in flds)
{
ProcessFolder(fld);
}
}
}
catch(Exception ex)
{
Logger.Write("Full scan activity error",ex, Severity.Error);
}
finally
{
fullScanInProgress = false;
_worker.RunWorkerAsync();
}
}
public void UpdateUserFolder(IDMSFolder _folder, bool force = false)
{
var usr = WSApplication.Instance.AuthProvider.GetCurrentWSUser();
// if user is loged in
if (_folder != null && !_folder.IsDeleted && _wsProxy.IsUserLoggedIn(usr))
{
// for the first time scan all folders
if (NeedToScanAll && !fullScanInProgress)
{
var fullscanthread = new Thread(() =>
{
try
{
FullScan(_folder);
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
});
#if ENABLE_COUNTER
fullscanthread.Start();
#endif
}
if (CheckIfNeedUpdateFolder(_folder) || force)
{
if (updateFolderThread != null && updateFolderThread.ThreadState == System.Threading.ThreadState.Running)
{
updateFolderThread.Abort();
updateFolderThread.Join();
updateFolderThread = null;
}
updateFolderThread = new Thread(() =>
{
try
{
ProcessFolder(_folder);
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
});
#if ENABLE_COUNTER
updateFolderThread.Start();
#endif
}
}
}
}
class CounterVisitor : ItemMapVisitor
{
internal delegate void UpdateFunctionDelegate(IDMSFolder _folder, ActivityUpdateAction _updateAction, List<FileMapActivity> _changes = null);
internal UpdateFunctionDelegate updateFunction;
ModuleBase _module;
public CounterVisitor(UpdateFunctionDelegate _updateFunction, ModuleBase module)
{
updateFunction = _updateFunction;
_module = module;
}
public override void Visit(FileMap result)
{
}
public override void Visit(FolderMap foldermap)
{
if (foldermap != null && foldermap.LocalFolder != null && foldermap.WsFolder != null)
{
var sis = _module.Resolve<SyncInfoService>();
if (!foldermap.HasRemote() && foldermap.ProcessState == ProcessState.Scanned && foldermap.LocalFolder != null && sis.GetSyncInfo(foldermap.LocalFolder, false) == null)
{
updateFunction(foldermap.LocalFolder, ActivityUpdateAction.Update, new List<FileMapActivity>());
}
else
{
var adapter = new ActivityFinder(WSApplication.Instance.Module);
updateFunction(foldermap.LocalFolder, ActivityUpdateAction.Update, adapter.GetAllActivities(foldermap, new List<FileMapActivity>(), false).Where(TrueSyncDialogViewModel.FilterActivities).ToList());
}
}
else if (foldermap != null && foldermap.LocalFolder != null && foldermap.WsFolder == null)
{
updateFunction(foldermap.LocalFolder, ActivityUpdateAction.Remove);
}
}
public override bool VisitEnter(FolderMap folderMap)
{
return true;
}
public override void VisitLeave(FolderMap folderProcessResult)
{
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ProcessStrategy.cs
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies.ActionStrategy;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Strategies
{
public class ProcessStrategy : DmsProcessStrategyBase
{
WsProxy wsProxy;
IAuthProvider auth;
DmsWorkerBase dmsWorker;
SyncInfoService syncInfoService;
ModuleBase module;
public ProcessStrategy(ModuleBase module, WsProxy wsProxy, IAuthProvider auth, DmsWorkerBase dmsWorker, SyncInfoService syncInfoService)
{
this.wsProxy=wsProxy;
this.auth = auth;
this.dmsWorker = dmsWorker;
this.syncInfoService = syncInfoService;
this.module = module;
}
public override ProcessResult Process(FileMap fileMap, DmsProcessOptions dmsProcessOptions)
{
return ProcessInternal(fileMap, dmsProcessOptions);
}
public ProcessResult ProcessInternal(ItemMap fileMap, DmsProcessOptions dmsProcessOptions)
{
var forthisNode = dmsProcessOptions.Actions
.Where(a => (a.Activity == null ? a.ActivityId : a.Activity.MapId) == fileMap.Id)
.Distinct(new CompareByType()).ToList();
var res = new ProcessResult(fileMap.ProcessState);
foreach (var action in forthisNode)
{
ActionStrategy.ActionStrategy actionStrategy = null;
if (action.Type == ActionType.None)
{
continue;
}
else if (action.Type == ActionType.Import)
{
actionStrategy = module.Resolve<ImportStrategy>();
}
else if (action.Type == ActionType.Upload)
{
actionStrategy = module.Resolve<UploadStrategy>();
}
else if (action.Type == ActionType.CeaseCollaboration)
{
actionStrategy = module.Resolve<CeaseCollaborationStrategy>();
}
Debug.Assert(actionStrategy != null, "New action type?");
if (fileMap is FolderMap)
{
res = actionStrategy.Process((FolderMap) fileMap, action);
}
else if(fileMap is FileMap)
{
res = actionStrategy.Process((FileMap)fileMap, action);
}
if (res.Result == ProcessState.Cancelled || res.Result == ProcessState.Error)
{
return res;
}
}
return res;
}
public override ProcessResult Process(FolderMap foldermap, DmsProcessOptions dmsProcessOptions)
{
return ProcessInternal(foldermap, dmsProcessOptions);
}
public class CompareByType : IEqualityComparer<ItemMapActivityAction>
{
public bool Equals(ItemMapActivityAction x, ItemMapActivityAction y)
{
return x.Type == y.Type;
}
public int GetHashCode(ItemMapActivityAction obj)
{
return obj.Type.GetHashCode();
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Common/CollaborationItemInformation.cs
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
namespace Workshare.Integration.Common
{
public class CollaborationItemInformation: SyncItemInformation
{
public IDMSItem item;
public FileDetails wsItem;
public ConflictOptions syncOption;
public ConflictVersionOptions verOptions;
public bool AddOnlyNewFiles;//if sync _folder then only new files should be added
public CollaborationItemInformation(IDMSItem _item, ConflictOptions _option, ConflictVersionOptions _verOptions) : base(_item, _option, _verOptions)
{
item = _item;
syncOption = _option;
verOptions = _verOptions;
wsItem = null;
}
public CollaborationItemInformation(FileDetails _wsitem, IDMSItem _item, ConflictOptions _option, ConflictVersionOptions _verOptions): base(_wsitem, _item, _option, _verOptions)
{
wsItem = _wsitem;
syncOption = _option;
verOptions = _verOptions;
item = _item;
}
public CollaborationItemInformation CopyFor(IDMSItem item)
{
return new CollaborationItemInformation(item, syncOption, verOptions);
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/CeaseCollaborationCommand.cs
using System;
using System.Linq;
using System.Runtime.InteropServices;
using Workshare.Components;
using Workshare.Components.Helpers;
using Workshare.Components.Properties;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
namespace Workshare.IManage
{
public class CeaseCollaborationCommand : Command
{
//public override object Bitmap
//{
// get
// {
// return Resources.workshare_collaboration.GetHbitmap();
// }
// set { }
//}
public override string MenuText
{
get
{
return "Close Collaboration";
}
set
{
}
}
public override string HelpText
{
get { return MenuText + "\nCollaboration item"; }
set { }
}
public override void ExecuteAction()
{
var contextItems = GetContextItems().ToList();
if (!contextItems.Any())
{
return;
}
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
Application.iManInstance.ClientType = GetClientType();
var linkedItems = contextItems.Where(p => syncService.GetSyncInfo(p, false) != null);
var someItemsLinked = linkedItems.Any();
var hasPermissions = contextItems.All(p => p.DoesUserHavePermissions(Permissions.EditItem));
var anyTaskAlreadyInProgress = Application.iManInstance.Presenter.IsAnyItemInProgress();
var someCheckedOutToOther = contextItems.Any(a => a.IsCheckedOutToOtherUser());
var canCallob = someItemsLinked
&& hasPermissions
&& !anyTaskAlreadyInProgress
&& !someCheckedOutToOther;
if (canCallob)
{
Application.iManInstance.View.OnCollaborationItemsClicked(new CollaborationItemsClickedArgs(linkedItems.Select(p => new CollaborationItemInformation(p, ConflictOptions.None, ConflictVersionOptions.None))));
}
else if (!someItemsLinked)
{
if (contextItems.HasOnlyOneFile())
{
ShowError(
new BaseException(Resources.STR_CollaborationItemsCommand_ExecuteAction_Unable_to_cease,
Resources.STR_ERROR_TEXT_File_is_not_sent_to_Workshare_), contextItems.FirstOrDefault());
}
else if (contextItems.HasOnlyOneFolder())
{
ShowError(
new BaseException(Resources.STR_CollaborationItemsCommand_ExecuteAction_Unable_to_cease,
Resources.STR_ERROR_TEXT_Folder_is_not_sent_to_Workshare_), contextItems.FirstOrDefault());
}
else
{
ShowError(
new BaseException(Resources.STR_CollaborationItemsCommand_ExecuteAction_Unable_to_cease,
Resources.STR_ExecuteAction_All_items_are_not_sent_to_Workshare), contextItems.FirstOrDefault());
}
}
else if (anyTaskAlreadyInProgress)
{
ShowError(new BaseException(Resources.STR_CollaborationItemsCommand_ExecuteAction_Unable_to_cease,
Resources.STR_ExecuteAction_Some_another_task_is_already_in_progress), contextItems.FirstOrDefault());
}
else if (someCheckedOutToOther)
{
ShowError(new BaseException(Resources.STR_CollaborationItemsCommand_ExecuteAction_Unable_to_cease,
Resources.STR_ExecuteAction_File_is_checked_out_to_another_users), contextItems.FirstOrDefault());
}
else
{
ShowError(new DMSUnAuthorizedException(), contextItems.FirstOrDefault());
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("9DC1D747-703C-43F5-9255-A3DFF7F177F6")]
[ComVisible(true)]
public class DocumentCeaseCollaborationCommand : CeaseCollaborationCommand
{
public override string HelpText
{
get
{
return "Close Collaboration for document on Workshare\nClose Collaboration";
}
set
{
base.HelpText = value;
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("C7E01420-25DC-40D1-8736-FCE298657FBE")]
[ComVisible(true)]
public class FolderCeaseCollaborationCommand : CeaseCollaborationCommand
{
public override string HelpText
{
get
{
return "Close Collaboration for folder on Workshare\nClose Collaboration";
}
set
{
base.HelpText = value;
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Progress/VMs/ProgressViewModel.cs
using System;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Linq;
using System.Windows;
using System.Windows.Threading;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.Progress.VMs;
using Workshare.Components.Views.TrueSyncDialog.WorkUnits;
using Workshare.Components.WSLogger;
namespace Workshare.Components.Views.Progress
{
class ProgressViewModel : BasePropertyChanged<ProgressViewModel>, IDisposable
{
readonly Dispatcher _dispatcher;
public ObservableCollection<ProgressItemVm> Items { set; get; }
public RelayCommand ClearCmd { set; get; }
public RelayCommand RemoveItem{ set; get; }
private CommandInvoker _invoker;
public Visibility Visible
{
get
{
return (Items.Count > 0) && (!_supress) ? Visibility.Visible : Visibility.Hidden;
}
set
{
}
}
public ProgressViewModel()
{
Items = new ObservableCollection<ProgressItemVm>()
{
new ProgressItemVm()
{
Name = "Processing something",
TargetItems = new ObservableCollection<TargetItemVm>()
{
new TargetItemVm(new TargetItem("as")
{
Name = "asd.docx"
})
}
}
};
}
public ProgressViewModel(CommandInvoker invoker)
{
_invoker = invoker;
Items = new ObservableCollection<ProgressItemVm>();
_dispatcher = Dispatcher.CurrentDispatcher;
_invoker.WorkUnitAdded += invoker_WorkUnitAdded;
_invoker.WorkCompleted += invoker_WorkCompleted;
}
bool _supress;
public void Suppress(bool tosuppress)
{
this._supress = tosuppress;
TriggerVisibilityChanged();
}
void TriggerVisibilityChanged()
{
//TODO Recview investigate
///We assume that view will be opened during all process life. But sometimes it is can be closed (if parent set and owner is closed)
/// then it will fail. For now we just do not set Owner for Progress window
/// Need to review design of progress window infrastructure
try
{
this.PropertyHasChanged(a => a.Visible);
}
catch (InvalidOperationException ex)
{
Logger.WriteError(ex);
}
}
void invoker_WorkUnitAdded(IWorkUnit obj)
{
if(obj==null)return;
Logger.WriteTrace(string.Format("ProgressViewModel invoker_WorkUnitAdded Type={0}",obj.GetType()));
if (obj is ScanWorkUnit || obj is CeaseCollaborationDialog.WorkUnits.ScanWorkUnit) return;
InvokeiNDispatcherThread(() =>
{
var existedProgressItem = Items.FirstOrDefault(a => a.Name == obj.Name);
if (existedProgressItem == null)
{
existedProgressItem = new ProgressItemVm()
{
Name = obj.Name
};
}
foreach (var item in obj.TargetItems)
{
var newItemVm = new TargetItemVm(item);
existedProgressItem.TargetItems.Add(newItemVm);
}
if (!Items.Contains(existedProgressItem))
{
Items.Add(existedProgressItem);
}
existedProgressItem.PropertyChangedAll();
TriggerVisibilityChanged();
});
}
void InvokeiNDispatcherThread(Action act)
{
if (_dispatcher.CheckAccess())
{
act();
}
else
{
_dispatcher.Invoke(act);
}
}
void invoker_WorkCompleted(IWorkUnit obj)
{
InvokeiNDispatcherThread(() =>
{
var existedProgressItem = Items.FirstOrDefault(a => a.Name == obj.Name);
if (existedProgressItem != null)
{
foreach (var targetItem in obj.TargetItems)
{
var toRemove = existedProgressItem.TargetItems.Where(a => a.IsFor(targetItem)).ToList();
toRemove.ForEach(a => existedProgressItem.TargetItems.Remove(a));
}
if (existedProgressItem.TargetItems.Count == 0)
{
Items.Remove(existedProgressItem);
}
existedProgressItem.PropertyChangedAll();
TriggerVisibilityChanged();
}
});
}
public void Dispose()
{
_invoker.WorkUnitAdded -= invoker_WorkUnitAdded;
_invoker.WorkCompleted -= invoker_WorkCompleted;
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/WSOTMenuInitializer.cs
using System;
using System.Runtime.InteropServices;
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using Hummingbird.DM.Extensions.Interop.VHELPER;
using Workshare.Components.WSLogger;
using Workshare.OpenText.Concrete;
namespace Workshare.OpenText
{
[ComVisible(true)]
[ClassInterface(ClassInterfaceType.None)]
[Guid("08986AE5-D79F-4EE7-B876-D1C69F09C01B")]
public class WSOTMenuWrapper : IOMEventHandler, IOMDocumentMenuSink, IOMDocumentEventSink
{
VBConnector connector;
public short ExecuteMenuItem(IDocProfiles pProfiles, string bstrCmdID)
{
try
{
CommandHelper.ExecuteMenu(pProfiles, bstrCmdID);
}
catch (Exception ex)
{
Logger.Write("Trying to EXECUTE menu item", ex, Severity.CriticalError);
}
return (short)HResultCodes.HS_NEXT_HANDLER;
}
public void GetMenuItems(IDocProfiles pProfiles, ControlMenu pMenu)
{
try
{
var mnu = CommandHelper.GetCommandName(pProfiles);
var OTMenu = pMenu.GetMenu();
if (mnu != null)
{
bool AlreadyExist = false;
int i = 1;
while (!AlreadyExist && i<=OTMenu.Count)
{
if (string.Equals(OTMenu[i].ID, mnu.ID, StringComparison.InvariantCultureIgnoreCase))
{
AlreadyExist = true;
}
i++;
}
if (!AlreadyExist)
{
pMenu.AddVerb(mnu.ID, mnu.Name, mnu.Parent, mnu.InsertBefore, 0);
}
//pMenu.SetVerbIcons(mnu.ID, false, (int)RES.Workshare_48x48.Handle, (int)RES.Workshare_48x48.Handle, (int)RES.Workshare_48x48.Handle);
}
}
catch (Exception ex)
{
Logger.Write("Trying to ADD menu item", ex, Severity.CriticalError);
}
}
public void SelectionChanged(IDocProfiles pProfiles, ControlMenu pMenu)
{
//throw new NotImplementedException();
}
public void Init(IApplication pApp)
{
//System.Diagnostics.Debugger.Launch();
try
{
connector = new VBConnector();
int interfacesConnected;
interfacesConnected = connector.Connect(pApp, this);
Application.OTInstance.ParentWindow = pApp.DefParentWindow;
Application.OTInstance.UserDST = pApp.DST;
Application.OTInstance.CurrentLibraryName = pApp.CurrentLibrary.Name;
Application.OTInstance.CurrentUser = pApp.CurrentLibrary.UserName;
}
catch { }
}
public void Terminate(IApplication pApp)
{
connector = null;
}
public short ProfileEvent(short sEventID, short sEventType, IProfile pProfile, Hummingbird.DM.Extensions.Interop.DOCSObjects.IBCCoreCtx pParams)
{
try
{
if (sEventType == 2 && (sEventID == (int)eDOCSDEventType.DE_UNLOCK || sEventID == (int)eDOCSDEventType.DE_RELEASE_DOC ))
{
if (pProfile.Columns.Count > 0 && pProfile.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString() != "")
{
pProfile.Columns[Application.OTInstance.SENDINFO_FIELD].Value = "";
pProfile.Save(0);
}
}
}
catch
{ }
return (short)HResultCodes.HS_NEXT_HANDLER;
}
public short VersionEvent(short sEventID, short sEventType, IVersion pVersion, Hummingbird.DM.Extensions.Interop.DOCSObjects.IBCCoreCtx pParams)
{
try
{
if (sEventType == 2 && (sEventID == (int)eDOCSDEventType.DE_CHECK_OUT || sEventID == (int)eDOCSDEventType.DE_LOCK))
{
var pProfile = pVersion.Profile;
if (pProfile.Columns.Count > 0 && pProfile.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString() != "")
{
pProfile.Columns[Application.OTInstance.SENDINFO_FIELD].Value = "";
pProfile.Save(0);
}
}
}
catch
{ }
return (short)HResultCodes.HS_NEXT_HANDLER;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/VMs/FileUploadActivityVm.cs
using Workshare.Components.Views.TrueSyncDialog;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs
{
public class FileUploadActivityVm : UploadFileActivityVm
{
public FileUploadActivityVm()
: base(null)
{
}
public FileUploadActivityVm(UploadFileActivity data)
: base(data)
{
_data = data;
}
public override bool DiscardAlwaysVisible
{
get { return true; }
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/IWSIntegration.cs
using System.Collections.Generic;
using Workshare.Integration.Common;
using WorksharePlatform;
namespace Workshare.Integration.Interfaces
{
public interface IWSIntegration
{
void SendItems(IEnumerable<IDMSItem> localItems, FolderDetails folder, OperationContext args);
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/ULSLogger.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint.Administration;
namespace WorkshareCloud
{
internal class DiagnosticService : SPDiagnosticsServiceBase
{
private static string DiagnosticsAreaName = "SharePointEmails";
public DiagnosticService()
{
}
public DiagnosticService(string name, SPFarm farm)
: base(name, farm)
{
}
protected override IEnumerable<SPDiagnosticsArea> ProvideAreas()
{
List<SPDiagnosticsCategory> categories = new List<SPDiagnosticsCategory>();
foreach (string catName in Enum.GetNames(typeof(Category)))
{
uint catId = (uint)(int)Enum.Parse(typeof(Category), catName);
categories.Add(new SPDiagnosticsCategory(catName, TraceSeverity.Verbose, EventSeverity.Error, 0, catId));
}
yield return new SPDiagnosticsArea(DiagnosticsAreaName, categories);
}
public static DiagnosticService Local
{
get
{
return SPDiagnosticsServiceBase.GetLocal<DiagnosticService>();
}
}
public SPDiagnosticsCategory this[Category id]
{
get
{
return Areas[DiagnosticsAreaName].Categories[id.ToString()];
}
}
}
public enum Category
{
Default,
SelectFolderPage,
NewFolderPage,
CloudService
}
public enum SeverityEnum
{
Trace,
Warning,
Information,
Error,
CriticalError,
Verbose
}
public class Logger
{
private static DiagnosticService Local
{
get
{
if (_local == null)
{
_local = DiagnosticService.Local;
}
if (_local == null) throw new NullReferenceException("DiagnosticService is null");
return _local;
}
}static DiagnosticService _local;
public static void WriteTrace(string text, TraceSeverity severety)
{
WriteTrace(text, severety, Category.Default);
}
public static void WriteTrace(string text, TraceSeverity severety, Category category)
{
Local.WriteTrace(1, Local[category], severety, text);
}
}
}
<file_sep>/iManageIntegration/Src/CustomFieldsChooser/ChooseCustomField.cs
using Microsoft.Win32;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using Workshare.IManage;
using RES = Workshare.Autonomy.Configuration.Properties.Resources;
namespace CustomFieldsChooser
{
public partial class ChooseCustomField : Form
{
//this is an alias for imProfileAttributeID.imProfileCustom13 enum
private readonly int imProfileCustom13 = 37;
private readonly int imProfileCustom25 = 49;
public ChooseCustomField()
{
InitializeComponent();
this.StartPosition = FormStartPosition.CenterScreen;
this.Shown += new EventHandler(dlg_shown);
}
private void dlg_shown(object sender, EventArgs e)
{
this.TopMost = true;
this.checkedListBox1.SelectionMode = this.checkedListBox2.SelectionMode = SelectionMode.One;
this.checkedListBox1.CheckOnClick = this.checkedListBox2.CheckOnClick = true;
var checkedcustom = Registering.GetCustomFieldValue();
if (checkedcustom != null && checkedcustom >= imProfileCustom13 && checkedcustom <= imProfileCustom13 + 3)
this.checkedListBox1.SetItemChecked(checkedcustom - imProfileCustom13 ?? 0, true);
var checkedcustomicon = Registering.GetCustomIconFieldValue();
if (checkedcustomicon != null && checkedcustomicon >= imProfileCustom25 && checkedcustomicon <= imProfileCustom25 + 2)
this.checkedListBox2.SetItemChecked(checkedcustomicon - imProfileCustom25 ?? 0, true);
}
private void button1_Click(object sender, EventArgs e)
{
SafeRegistryOperation(() =>
{
int checkedCustom = -1, checkedCustomIcon = -1;
for (int ix = 0; ix < checkedListBox1.Items.Count; ++ix)
{
if (checkedListBox1.GetItemChecked(ix))
{
checkedCustom = ix + imProfileCustom13;
break;
}
}
if (checkedCustom != -1)
Registering.SetCustomFieldValue(checkedCustom);
else
Registering.RemoveCustomFieldValues();
for (int ix = 0; ix < checkedListBox2.Items.Count; ++ix)
{
if (checkedListBox2.GetItemChecked(ix))
{
checkedCustomIcon = ix + imProfileCustom25;
break;
}
}
if (checkedCustomIcon != -1)
Registering.SetCustomIconFieldValue(checkedCustomIcon);
else
Registering.RemoveCustomIconFieldValues();
this.Close();
});
}
private void checkedListBox1_ItemCheck(object sender, ItemCheckEventArgs e)
{
if (e.NewValue == CheckState.Checked)
{
for (int ix = 0; ix < checkedListBox1.Items.Count; ++ix)
{
if (e.Index != ix)
{
checkedListBox1.SetItemChecked(ix, false);
}
}
}
}
private void checkedListBox2_ItemCheck(object sender, ItemCheckEventArgs e)
{
if (e.NewValue == CheckState.Checked)
{
for (int ix = 0; ix < checkedListBox2.Items.Count; ++ix)
{
if (e.Index != ix)
{
checkedListBox2.SetItemChecked(ix, false);
}
}
}
}
private void button2_Click(object sender, EventArgs e)
{
this.Close();
}
void SafeRegistryOperation(Action action)
{
try
{
action();
}
catch (Exception ex)
{
if (ex is System.UnauthorizedAccessException || ex is System.Security.SecurityException)
{
MessageBox.Show(RES.STR_CANNOT_RIGHT_WRITE_REGISTRY,RES.STR_ERROR_DLG_TITLE);
}
else
{
throw;
}
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/DocumentAction.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Enums;
using Workshare.Integration.Processor.Changes;
namespace Workshare.Integration.Processor
{
public abstract class ItemMapActivityAction
{
public ItemMapActivityAction(string id)
{
ActivityId = id;
Actions = new List<ItemMapActivityAction>();
this.Title = GetType().Name;
}
public ItemMapActivityAction(ItemMapActivity activity)
{
Activity = activity;
ActivityId = activity.Id;
Actions = new List<ItemMapActivityAction>();
this.Title = GetType().Name;
}
public ItemMapActivity Activity { get; set; }
//for now must be same as ItemMapActivity.Id
public string ActivityId { set; get; }
public abstract ActionType Type { get; }
public virtual string Title { get; set; }
public List<ItemMapActivityAction> Actions { set; get; }
public void AddAction(ItemMapActivityAction action)
{
if (!this.Actions.Contains(action))
{
this.Actions.Add(action);
}
}
public void AddAction(IEnumerable<ItemMapActivityAction> actions)
{
foreach (var action in actions)
{
this.Actions.Add(action);
}
}
}
public class ItemMapActivityActionGroup : ItemMapActivityAction
{
public ItemMapActivityActionGroup(string title)
: base("")
{
this.Title = title;
}
public override ActionType Type
{
get { return ActionType.None; }
}
}
public class ImportDocumentAction : ItemMapActivityAction
{
public ImportDocumentAction(ItemMapActivity activity, string title, ConflictVersionOptions a, ImportType b)
: base(activity)
{
this.Title = title;
this.ImportType = b;
this.ImportVersionsAction = a;
}
public override ActionType Type { get { return ActionType.Import; } }
public ImportType ImportType { set; get; }
public ConflictVersionOptions ImportVersionsAction { set; get; }
public bool ImportBreakLink { set; get; }
}
public class UploadDocumentAction : ItemMapActivityAction
{
public UploadDocumentAction(ItemMapActivity activity, string title)
: base(activity)
{
this.Title = title;
this.VersionIds = new List<string>();
}
public override ActionType Type { get { return ActionType.Upload; } }
public UploadType UploadType { set; get; }
public List<string> VersionIds { get; private set; }
}
public class CeaseCollaborationAction : ItemMapActivityAction
{
public CeaseCollaborationAction(string documentChangeId, CeaseCollaborationType fileAction, CeaseCollaborationImportType importAction)
: base(documentChangeId)
{
FileAction = fileAction;
ImportAction = importAction;
}
public override ActionType Type { get { return ActionType.CeaseCollaboration; } }
public CeaseCollaborationImportType ImportAction { get; private set; }
public CeaseCollaborationType FileAction { get; private set; }
}
public enum ActionType { None, Import, Upload, CeaseCollaboration }
public enum ImportType { None, AsNewVersion, AsNewDocument, AsRelatedDocument }
public enum UploadType { None, SpecificVersions }
public enum CeaseCollaborationImportType { None, WithComments, WithoutComments }
public enum CeaseCollaborationType { Delete, Skip }
public enum CeaseCollaborationCloseSpaceError { None, RemoteDeleted, CheckOutToOtherUser, CheckOutFileNotFound, NoAccessToDeleteOnWS, NoAccessToDownloadFileFromWS }
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Processor.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.DmsItems.Visitors;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Maps.Visitors;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor
{
public class Processor
{
readonly ModuleBase _module;
private readonly DmsWorkerBase _dmsWorker;
private readonly TreePathFinder _treePathFinder;
public Processor(ModuleBase module, DmsWorkerBase dmsWorker, TreePathFinder treePathFinder)
{
_module = module;
_dmsWorker = dmsWorker;
_treePathFinder = treePathFinder;
}
public SummaryProcessResult Scan(List<BaseDMSItem> items, ScanOptions options)
{
return ScanFirstPage((BaseDMSFolder)items.First(), options, null);
}
public SummaryProcessResult ScanFirstPage(BaseDMSFolder folder, ScanOptions options, IItemsDetector detector)
{
return InternalScanNextPage(folder, options, new SummaryProcessResult(), detector);
}
public SummaryProcessResult ScanNextPage(BaseDMSFolder folder, ScanOptions options,
SummaryProcessResult previousScan)
{
if (previousScan == null) throw new ArgumentNullException("previousScan");
return InternalScanNextPage(folder, options, previousScan, null);
}
private SummaryProcessResult InternalScanNextPage(BaseDMSFolder folder, ScanOptions options, SummaryProcessResult scanResultToProceed, IItemsDetector detector)
{
Logger.WriteTrace(string.Format("ScanNextPage"));
scanResultToProceed = scanResultToProceed ?? new SummaryProcessResult();
options = options ?? new ScanOptions() { UseForceRequest = true };
var visitor = _module.Resolve<ScanVisitor>();
visitor.Initialize(scanResultToProceed);
visitor.Options = options;
try
{
var iterator = scanResultToProceed.Iterator;
iterator = iterator ?? new PageIterator(options.ItemsPerPage, visitor, folder, detector);
scanResultToProceed.Iterator = iterator;
iterator.VisitNextPage();
if (options.Cancellation.IsCancelled)
{
visitor.Result.ProcessState = ProcessState.Cancelled;
}
}
catch (Exception ex)
{
Logger.WriteError(ex);
visitor.Result.ProcessState = ProcessState.Error;
visitor.Result.Error = ex;
}
return visitor.Result;
}
public SummaryProcessResult DirectScanFile(IEnumerable<BaseDMSFile> files)
{
Logger.WriteTrace("DirectScanFile");
SummaryProcessResult scanResultToProceed = new SummaryProcessResult();
ScanOptions options = new ScanOptions() { UseForceRequest = true };
var visitor = _module.Resolve<ScanVisitor>();
visitor.Initialize(scanResultToProceed);
visitor.Options = options;
try
{
foreach (var file in files)
{
var iterator = new PageIterator(int.MaxValue, visitor, file, null);
iterator.VisitNextPage();
}
if (options.Cancellation.IsCancelled)
{
visitor.Result.ProcessState = ProcessState.Cancelled;
}
}
catch (Exception ex)
{
Logger.WriteError(ex);
visitor.Result.ProcessState = ProcessState.Error;
visitor.Result.Error = ex;
}
return visitor.Result;
}
public IEnumerable<SummaryProcessResult> ScanFile(BaseDMSFile item, ScanOptions options)
{
Logger.WriteTrace(string.Format("ScanFile"));
var results = new List<SummaryProcessResult>();
var parents = _dmsWorker.GetParents(item).ToList();
if (parents.Any())
{
foreach (var parent in parents)
{
var bottomLevelFolder = new SingleChildDMSFolder((BaseDMSFolder)parent) { File = item };
_treePathFinder.FindPath((BaseDMSFolder)parent, bottomLevelFolder);
var topLevelFolder = _treePathFinder.Reverse(bottomLevelFolder);
var result = Scan(new List<BaseDMSItem> { topLevelFolder }, options);
results.Add(result);
}
}
else
{
var result = Scan(new List<BaseDMSItem> { item }, options);
results.Add(result);
}
return results;
}
public SummaryProcessResult Process(SummaryProcessResult scanResult, ProcessOptions options)
{
Logger.WriteTrace(string.Format("Process"));
var processVisitor = _module.Resolve<ProcessVisitor>();
processVisitor.Initialize(options);
try
{
scanResult.Apply(processVisitor);
}
catch (Exception ex)
{
scanResult.ProcessState = ProcessState.Error;
scanResult.Error = ex;
}
return scanResult;
}
public void BreakLink(ItemMap root)
{
Logger.WriteTrace(string.Format("BreakLink"));
var visitor = _module.Resolve<BreakLinkVisitor>();
root.Apply(visitor);
}
}
public class ProcessOptions
{
public ProcessOptions()
: this(new OwnCancellationTokenSource())
{
}
public ProcessOptions(OwnCancellationTokenSource cancellation)
{
this.ActionsToApply = new List<ItemMapActivityAction>();
this.Cancellation = cancellation;
}
public OwnCancellationTokenSource Cancellation
{
private set;
get;
}
public List<ItemMapActivityAction> ActionsToApply { private set; get; }
}
public class ScanOptions
{
public ScanOptions()
: this(new OwnCancellationTokenSource())
{
ItemsPerPage = int.MaxValue;
}
public ScanOptions(OwnCancellationTokenSource cancellation)
{
this.Cancellation = cancellation;
this.ItemsPerPage = int.MaxValue;
UseForceRequest = true;
}
public OwnCancellationTokenSource Cancellation
{
private set;
get;
}
public int ItemsPerPage { get; set; }
public bool UseForceRequest { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/ErrorsWindow/ErrorsViewModel.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Windows.Input;
using Workshare.Components.Views.Common;
using Workshare.Integration.Common;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
using Permissions = Workshare.Integration.Enums.Permissions;
namespace Workshare.Components.Views.ErrorsWindow
{
class ErrorsViewModel : OwnViewModel
{
readonly BindingList<ItemErrorViewModel> _errors = new BindingList<ItemErrorViewModel>();
public BindingList<ItemErrorViewModel> Errors { get { return _errors; } }
public ErrorsViewModel()
{
Errors.Add(new ItemErrorViewModel(new FakeFile
{
DisplayName = "asdasdasd.docx"
},
new BaseException("Caption1","TestError1")));
Errors.Add(new ItemErrorViewModel(new FakeFile
{
DisplayName = "asdasdasd.docx"
},
new BaseException("Caption1", "TestError1")));
Errors.Add(new ItemErrorViewModel(new FakeFile
{
DisplayName = "asdasdasd.docx"
},
new BaseException("Caption1", "long error long error long error long error long error long error long error long error long error long error long error")));
}
public ErrorsViewModel(IEnumerable<ItemException> errors)
{
if (errors != null)
{
errors.ToList().ForEach(p => Errors.Add(new ItemErrorViewModel(p.Item, p.Error)));
}
CloseCommand = new RelayCommand(p => RaiseClose(false));
}
public ICommand CloseCommand { set; get; }
}
public class ItemErrorViewModel
{
public IDMSItem Item { private set; get; }
public Exception Error { private set; get; }
public ItemErrorViewModel(IDMSItem item, Exception error)
{
Item = item;
Error = error;
}
}
#region Fakes
class FakeFile : IDMSFile
{
public string GetFilePath()
{
throw new NotImplementedException();
}
public DateTime Modified
{
get { throw new NotImplementedException(); }
}
public bool IsCheckedOutFileExists()
{
throw new NotImplementedException();
}
public List<IDMSFolder> ParentFolders
{
get { throw new NotImplementedException(); }
}
public IDMSFile GetLatest()
{
throw new NotImplementedException();
}
public void DiscardCheckout(bool deleteCheckOutFile = false)
{
throw new NotImplementedException();
}
public IEnumerable<IDmsVersion> GetVersions()
{
throw new NotImplementedException();
}
public IDmsVersion AddVersion(string filePath, FileDetails file, List<Activity> activities, string versionFriendlyName = null, bool checkInVersion = false, bool keepLocalState = false)
{
throw new NotImplementedException();
}
public IDMSItemID ID
{
get { throw new NotImplementedException(); }
}
public int DMSId
{
get { throw new NotImplementedException(); }
}
public string Name
{
get;
set;
}
public bool DoesUserHavePermissions(Permissions permissions)
{
throw new NotImplementedException();
}
public bool CheckedOutToUser
{
get { throw new NotImplementedException(); }
}
public bool CheckedOut
{
get { throw new NotImplementedException(); }
}
public string DisplayName
{
get;
set;
}
public IDMSFolder ParentFolder
{
get { throw new NotImplementedException(); }
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
throw new NotImplementedException();
}
public void OnBeforeSending(OperationContext context)
{
throw new NotImplementedException();
}
public void OnAfterSending(OperationContext context)
{
throw new NotImplementedException();
}
public void OnSendError(object args, Exception e)
{
throw new NotImplementedException();
}
public void AddHistory(string eventName, string eventComment, Integration.Operations operation)
{
throw new NotImplementedException();
}
public void AddHistories(List<Activity> activities)
{
throw new NotImplementedException();
}
public IDMSFolder RootFolder()
{
throw new NotImplementedException();
}
public string DMSItemKey
{
get { throw new NotImplementedException(); }
}
}
class FakeFolder : IDMSFolder
{
public IDMSFolder AddSubFolder(FolderDetails cloudFolder)
{
throw new NotImplementedException();
}
public IEnumerable<IDMSFolder> SubFolders
{
get;
set;
}
public IEnumerable<IDMSFile> Files
{
get;
set;
}
public bool IsDeleted
{
get { throw new NotImplementedException(); }
}
public IDMSItemID ID
{
get { throw new NotImplementedException(); }
}
public int DMSId
{
get { throw new NotImplementedException(); }
}
public string Name
{
get;
set;
}
public bool DoesUserHavePermissions(Permissions permissions)
{
throw new NotImplementedException();
}
public bool CheckedOutToUser
{
get { throw new NotImplementedException(); }
}
public bool CheckedOut
{
get { throw new NotImplementedException(); }
}
public string DisplayName
{
get;
set;
}
public IDMSFolder ParentFolder
{
get { throw new NotImplementedException(); }
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
throw new NotImplementedException();
}
public void OnBeforeSending(OperationContext context)
{
throw new NotImplementedException();
}
public void OnAfterSending(OperationContext context)
{
throw new NotImplementedException();
}
public void OnSendError(object args, Exception e)
{
throw new NotImplementedException();
}
public void AddHistory(string eventName, string eventComment, Integration.Operations operation)
{
throw new NotImplementedException();
}
public void AddHistories(List<Activity> activities)
{
throw new NotImplementedException();
}
public IDMSFolder RootFolder()
{
throw new NotImplementedException();
}
public string DMSItemKey
{
get { throw new NotImplementedException(); }
}
}
#endregion
}
<file_sep>/WSComponents/src/WSComponents/Presenter/ModulePresenterBase.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Exceptions;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Presenter
{
public abstract class ModulePresenterBase : IModulePresenter
{
protected readonly CommandInvoker invoker;
private readonly IModuleView _view;
readonly WsProxy _wsProxy;
protected ModulePresenterBase(IModuleView view)
{
_view = view;
invoker = new CommandInvoker();
_wsProxy = new WsProxy();
ShowProgress();
}
public void ShowProgress()
{
var threadApartment = Thread.CurrentThread.GetApartmentState();
if (threadApartment != ApartmentState.STA)
{
Logger.WriteWarning("Startin ProgressWindow is separate thread");
var newWindowThread = new Thread(() =>
{
_view.ShowProgressWindow(invoker);
System.Windows.Threading.Dispatcher.Run();
});
newWindowThread.SetApartmentState(ApartmentState.STA);
newWindowThread.IsBackground = true;
newWindowThread.Start();
}
else
{
Logger.WriteWarning("Startin ProgressWindow is current thread");
_view.ShowProgressWindow(invoker);
}
}
private DialogSettings _currentDialogSettings;
public DialogSettings GetCurrentDialogSettings(UserDetails user)
{
if (_currentDialogSettings == null)
{
_currentDialogSettings = GetDialogSettingsFromWorkshare(user);
}
return _currentDialogSettings;
}
public virtual bool IsAnyItemInProgress()
{
return invoker.GetAllWorks().Any();
}
public virtual void OnCollaborationItemsClicked(CollaborationItemsClickedArgs args)
{
InvokeInSTAThreadIfNeed(() =>
{
if (LoginIfNeeded())
{
CollaborationItems(args.Items);
}
});
}
private void CollaborationItems(IEnumerable<CollaborationItemInformation> items)
{
var dmsItems = items.Select(p => p.item);
var dlg = new Views.CeaseCollaborationDialog.CeaseCollaborationDialog(WSApplication.Instance.Module, invoker, dmsItems, _view);
new System.Windows.Interop.WindowInteropHelper(dlg).Owner = _view.ActivieWindowHandle;
dlg.ShowDialog();
}
public virtual WorkUnit GetSendCommand(IEnumerable<IDMSItem> items, IEnumerable<FileMapActivity> itemsToSkip, int destFolderId, CheckOutOptions option)
{
var res = new SendWorkUnit(_view, items, destFolderId, option);
res.OnError += res_OnError;
return res;
}
protected CheckOutOptions ShowCheckOutDialogIfNeed(SendItemsClickedArgs args)
{
return _view.ShowCheckOutDialogIfNeed(args);
}
public virtual void OnSendItemsClicked(SendItemsClickedArgs args)
{
InvokeInSTAThreadIfNeed(() =>
{
bool repeat = true;
while (repeat)
{
if (LoginIfNeeded())
{
var res = _view.ShowSelectFolder(GetCurrentDialogSettings(WSApplication.Instance.AuthProvider.GetCurrentWSUser()));
if (res > -1)
{
invoker.AddToQueue(GetSendCommand(args.Items, new List<FileMapActivity>(), res, CheckOutOptions.CheckOut));
repeat = false;
}
else if (res == -1)
{
repeat = false;
}
else if (res == -10) // we lost auth credentials
{
var user = WSApplication.Instance.AuthProvider.GetCurrentWSUser();
if (user != null)
{
user.AuthToken = string.Empty;
user.SessionCookies = null;
}
}
}
else
{
repeat = false;
}
}
});
}
public abstract void OnSyncItemsClicked(SyncItemsClickedArgs args);
private void res_OnError(object sender, WorkUnitErrorEventArgs e)
{
var error = e.Error as MultiItemsException;
if (error != null)
{
e.Handled = true;
var multi = error;
var errors = multi.errorList.AsQueryble().ToList();
if (errors.Any())
{
_view.ShowErrors(errors);
}
}
}
public virtual bool LoginIfNeeded()
{
try
{
var user = WSApplication.Instance.AuthProvider.GetCurrentWSUser();
if (!WorkshareCloud.ServiceProxy.ProxyHelper.ProxyDetector.CheckForProxy(user, _view.ActivieWindowHandle))
{
return false;
}
if (user != null)
{
if (!string.IsNullOrEmpty(user.AccountUuId))
{
return true;
}
if (_wsProxy.IsUserLoggedIn(user))
{
return true;
}
}
var res = _view.ShowLogin(out user, GetCurrentDialogSettings(user));
if (res)
{
WSApplication.Instance.AuthProvider.SetCurrentUser(user);
}
return res;
}
catch (WebException ex)
{
if (ex.Status == WebExceptionStatus.Timeout)
{
var exceptions = new List<ItemException>()
{
new ItemException() { Error = new OfflineException(RES.STR_CONNECTION_ERROR_TEXT_TIMEOUT, ex.InnerException) }
};
WSApplication.Instance.View.ShowErrors(exceptions);
}
return false;
}
}
#region public common methods
public static void InvokeInSTAThreadIfNeed(Action act)
{
if (Thread.CurrentThread.GetApartmentState() == ApartmentState.STA)
{
act();
}
else
{
Exception threadEx = null;
var thread = new Thread((a) =>
{
try
{
act();
}
catch (Exception ex)
{
threadEx = ex;
}
});
thread.SetApartmentState(ApartmentState.STA);
thread.Start();
while (thread.IsAlive)
{
Thread.Sleep(1);
System.Windows.Forms.Application.DoEvents();
}
if (threadEx != null)
{
throw threadEx;
}
}
}
#endregion
#region internal region
protected readonly DialogSettings DefaultDialogSettings = new DialogSettings()
{
dialog_create_folder_height = 514,
dialog_create_folder_width = 596,
dialog_select_folder_height = 514,
dialog_select_folder_width = 596,
dialog_login_height = 728,
dialog_login_width = 1026
};
DialogSettings GetDialogSettingsFromWorkshare(UserDetails user)
{
try
{
return PlatformService.GetDialogSettings(user);
}
catch (Exception ex)
{
Logger.Write("Cannot load setting for dilogs. Will be used default.", ex, Severity.Error);
return DefaultDialogSettings;
}
}
#endregion
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/Enums/ConflictOptions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorkshareCloud.Common.Enums
{
public enum ConflictOptions
{
None = 0,
Replace = 1,
KeepBoth = 2,
UpdateOnTheWorkshare = 3
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CeaseCollaborationDialog/CeaseCollaborationDialogViewModel.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Threading;
using Workshare.Components.Concrete;
using Workshare.Components.Helpers;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.CeaseCollaborationDialog.WorkUnits;
using Workshare.Components.Views.Common;
using Workshare.Components.Views;
using Workshare.Components.Views.Progress.VMs;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using RES_INT = Workshare.Integration.Properties.Resources;
using RES_CMP = Workshare.Components.Properties.Resources;
namespace Workshare.Components.Views.CeaseCollaborationDialog
{
public class CeaseCollaborationDialogViewModel : OwnViewModel<CeaseCollaborationDialogViewModel>
{
const string LastPart = "been uploaded and shared on Workshare.\nPlease select your options from the menus and confirm your choice.";
readonly Dispatcher _dispatcher;
public ModuleBase Module { get; set; }
private readonly SyncInfoService _syncInfoService;
internal SummaryProcessResult ScanResult;
internal Dictionary<CeaseCollaborationCloseSpaceError, List<FileMap>> CeaseCollaborationCloseSpaceErrorFiles = new Dictionary<CeaseCollaborationCloseSpaceError, List<FileMap>>();
private CeaseCollaborationDialog _dialog;
private System.Threading.Thread _scanThread;
private List<IDMSItem> _items;
private StateEnum _state;
public StateEnum State
{
get { return _state; }
set
{
if (_state != value)
{
_state = value;
PropertyHasChanged(p => p.State);
}
}
}
public CeaseCollaborationImportType ImportAction { get; private set; }
public CeaseCollaborationType FileAction { get; private set; }
public CeaseCollaborationDialogViewModel()
{
ConfirmCommand = CancelCommand = new RelayCommand((p) => { RaiseClose(false); });
_dialogDesc = "The document {DocumentName} has " + LastPart;
}
public CeaseCollaborationDialogViewModel(ModuleBase module, CommandInvoker invoker, IEnumerable<IDMSItem> items, CeaseCollaborationDialog dialog, IModuleView view)
{
if (items == null || items.Count() == 0) throw new ArgumentException("The parameter cannot be null or empty", "items");
_dispatcher = Dispatcher.CurrentDispatcher;
Module = module;
_syncInfoService = module.Resolve<SyncInfoService>();
_items = items.ToList();
_dialog = dialog;
ConfirmCommand = new RelayCommand((p) =>
{
ImportAction = (CeaseCollaborationImportType)_dialog.Tag;
FileAction = _dialog.chkFileActionBox.IsChecked == true ? CeaseCollaborationType.Delete : CeaseCollaborationType.Skip;
List<ItemException> itemExceptions = new List<ItemException>();
CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.CheckOutToOtherUser].ForEach(i =>
{
itemExceptions.Add(new ItemException() { Item = i.LocalFile, Error = new BaseException(RES_CMP.STR_UNABLE_TO_CLOSE_COLLABORATION_SPACE, RES_CMP.STR_ExecuteAction_File_is_checked_out_to_another_users) });
});
CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.CheckOutFileNotFound].ForEach(i =>
{
itemExceptions.Add(new ItemException() { Item = i.LocalFile, Error = new BaseException(RES_CMP.STR_UNABLE_TO_CLOSE_COLLABORATION_SPACE, RES_INT.STR_LOCALFILENOTFOUND_WITHFILENAME_TEXT) });
});
if (FileAction == CeaseCollaborationType.Delete && CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS].Count > 0)
{
int countFilesOnWS = CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.None].Count;
int NoAccessToDeleteOnWSCount = countFilesOnWS - CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS].Count;
itemExceptions.Add(new ItemException() { Item = null, Error = new BaseException(RES_CMP.STR_UNABLE_TO_CLOSE_COLLABORATION_SPACE, string.Format(RES_CMP.STR_NO_PRIVILEGE_TO_DELETE_FILES_FROM_WS, NoAccessToDeleteOnWSCount, countFilesOnWS)) });
}
if (ImportAction != CeaseCollaborationImportType.None && CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS].Count > 0)
{
int countFilesOnWS = CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.None].Count;
int NoAccessToDownloadFileFromWSCount = countFilesOnWS - CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS].Count;
itemExceptions.Add(new ItemException() { Item = null, Error = new BaseException(RES_CMP.STR_UNABLE_TO_CLOSE_COLLABORATION_SPACE, string.Format(RES_CMP.STR_NO_PRIVILEGE_TO_DOWNLOAD_FILES_FROM_WS, NoAccessToDownloadFileFromWSCount, countFilesOnWS)) });
}
if (itemExceptions.Count != 0)
view.ShowErrors(itemExceptions);
bool canConfirm = itemExceptions.Count == 0;
if (canConfirm)
{
var unit = new ProcessWorkUnit(view, _items, this);
invoker.AddToQueue(unit);
}
RaiseClose(true);
});
ScanCommand = new RelayCommand((p) =>
{
var unit = new ScanWorkUnit(view, _items, this, _scanThread);
invoker.AddToQueue(unit);
});
BreakLinkCommand = new RelayCommand((p) =>
{
var breakWorkUnit = new BreakWorkUnit(this, view);
breakWorkUnit.Execute();
RaiseClose(false);
});
CancelCommand = new RelayCommand((p) => { RaiseClose(false); });
if (_items.Count() > 1)
{
_dialogDesc = string.Format("The {0} selected documents have " + LastPart, _items.Count());
}
else if (_items.Count() == 1)
{
IDMSItem item = _items.First();
if (item is IDMSFile) _dialogDesc = string.Format("The document \"{0}\" has " + LastPart, item.Name);
else if (item is IDMSFolder) _dialogDesc = string.Format("The folder \"{0}\" has " + LastPart, item.Name);
}
}
private List<FileMap> GetCeaseCollaborationCloseSpaceErrorFiles(IEnumerable<ItemMap> maps, CeaseCollaborationCloseSpaceError error)
{
List<FileMap> ret = new List<FileMap>();
if (maps != null)
{
if (maps.OfType<FileMap>().Count() > 0)
{
if (error == CeaseCollaborationCloseSpaceError.RemoteDeleted)
{
maps.OfType<FileMap>().Where(i => !i.HasRemote() && _syncInfoService.IsLinked(i.LocalFile, false)).ToList().ForEach(j => ret.Add(j));
}
else if (error == CeaseCollaborationCloseSpaceError.CheckOutToOtherUser)
{
maps.OfType<FileMap>().Where(i => i.LocalFile != null && i.LocalFile.IsCheckedOutToOtherUser()).ToList().ForEach(j => ret.Add(j));
}
else if (error == CeaseCollaborationCloseSpaceError.CheckOutFileNotFound)
{
maps.OfType<FileMap>().Where(i => i.LocalFile != null && i.LocalFile.CheckedOut && !i.LocalFile.IsCheckedOutFileExists()).ToList().ForEach(j => ret.Add(j));
}
else if (error == CeaseCollaborationCloseSpaceError.None)
{
maps.OfType<FileMap>().Where(i => i.HasRemote()).ToList().ForEach(j => ret.Add(j));
}
}
if (maps.OfType<FolderMap>().Count() > 0)
{
if (error == CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS || error == CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS)
{
maps.OfType<FolderMap>().ToList().ForEach(i =>
{
if (error == CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS || error == CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS)
{
bool canAddMap = false;
if (error == CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS && i.WsFolder != null && i.WsFolder._folder != null && !i.WsFolder._folder.Permissions.CanAddOrRemoveFiles)
canAddMap = true;
else if (error == CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS && i.WsFolder != null && i.WsFolder._folder != null && !i.WsFolder._folder.Permissions.DownloadFiles)
canAddMap = true;
if (canAddMap)
{
var currUser = Module.Resolve<IAuthProvider>().GetCurrentWSUser2();
if (!i.WsFolder.Owner._user.UserId.Equals(currUser._user.UserId, StringComparison.OrdinalIgnoreCase))
i.Maps.OfType<FileMap>().Where(j => j.HasRemote()).ToList().ForEach(k => ret.Add(k));
}
}
if (i.Maps.Any(j => j is FolderMap))
ret.AddRange(GetCeaseCollaborationCloseSpaceErrorFiles(i.Maps, error));
});
}
else
{
maps.OfType<FolderMap>().ToList().ForEach(i => ret.AddRange(GetCeaseCollaborationCloseSpaceErrorFiles(i.Maps, error)));
}
}
}
return ret;
}
private void CeaseCollaborationDialogViewModel_OnCompleted_Init(IEnumerable<ItemMap> maps)
{
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.None, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.None));
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.RemoteDeleted, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.RemoteDeleted));
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.CheckOutToOtherUser, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.CheckOutToOtherUser));
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.CheckOutFileNotFound, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.CheckOutFileNotFound));
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.NoAccessToDeleteOnWS));
CeaseCollaborationCloseSpaceErrorFiles.Add(CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS, GetCeaseCollaborationCloseSpaceErrorFiles(maps, CeaseCollaborationCloseSpaceError.NoAccessToDownloadFileFromWS));
}
public void OnCompleted(List<ItemMap> errors, SummaryProcessResult scanResult)
{
ScanResult = scanResult;
this._dispatcher.Invoke(new Action(() =>
{
if (errors.Any(p => p.Error.IsConnectionError()))
{
this.State = StateEnum.NetError;
}
else if (ScanResult.Maps.OfType<FolderMap>().Any() && FolderDeletedOnWs())
{
this.State = StateEnum.RootFolderDeleted;
}
else if (errors.Any(p => p.Error.IsCloudFolderAccessDenied()))
{
this.State = StateEnum.Forbidden;
}
else
{
CeaseCollaborationDialogViewModel_OnCompleted_Init(scanResult.Maps);
this.State = StateEnum.Default;
}
}));
}
public void OnViewClosing()
{
if (_scanThread != null) _scanThread.Abort();
}
private bool FolderDeletedOnWs()
{
var folder = ScanResult.Maps.OfType<FolderMap>();
return folder.All(f => !f.HasRemote() && f.ProcessState == ProcessState.Scanned);
}
public RelayCommand ConfirmCommand { get; set; }
public RelayCommand CancelCommand { get; set; }
public RelayCommand ScanCommand { get; set; }
public RelayCommand BreakLinkCommand { get; set; }
public string DialogDescription { get { return _dialogDesc; } }
private string _dialogDesc = "";
public enum StateEnum { Default, Scanning, NetError, RootFolderDeleted, Forbidden }
}
}
<file_sep>/WSComponents/src/WSCloudService/Permissions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public class Permissions
{
public int FolderId { get; set; }
internal Dictionary<string, bool> PermissionSet { get; set; }
public bool User_must_login
{
get
{
if (PermissionSet.ContainsKey("can_access_anonymously"))
{
return !PermissionSet["can_access_anonymously"];
}
else
{
return false;
}
}
set
{
if (PermissionSet.ContainsKey("can_access_anonymously"))
{
PermissionSet["can_access_anonymously"] = !value;
}
else
{
PermissionSet.Add("can_access_anonymously", !value);
}
}
}
public bool Member_may_invite
{
get
{
if (PermissionSet.ContainsKey("can_invite_with_link"))
{
return PermissionSet["can_invite_with_link"];
}
else
{
return false;
}
}
set
{
if (PermissionSet.ContainsKey("can_invite_with_link"))
{
PermissionSet["can_invite_with_link"] = value;
}
else
{
PermissionSet.Add("can_invite_with_link", value);
}
}
}
public bool CanAddOrRemoveFiles
{
get
{
if (PermissionSet.ContainsKey("can_manipulate"))
{
return PermissionSet["can_manipulate"];
}
else
{
return false;
}
}
set
{
if (PermissionSet.ContainsKey("can_manipulate"))
{
PermissionSet["can_manipulate"] = value;
}
else
{
PermissionSet.Add("can_manipulate", value);
}
}
}
public bool CanAddVersions
{
get
{
if (PermissionSet.ContainsKey("can_upload_changes"))
{
return PermissionSet["can_upload_changes"];
}
else
{
return false;
}
}
set
{
if (PermissionSet.ContainsKey("can_upload_changes"))
{
PermissionSet["can_upload_changes"] = value;
}
else
{
PermissionSet.Add("can_upload_changes", value);
}
}
}
public bool DownloadFiles
{
get
{
if (PermissionSet.ContainsKey("can_download_original"))
{
return PermissionSet["can_download_original"];
}
else
{
return false;
}
}
set
{
if (PermissionSet.ContainsKey("can_download_original"))
{
PermissionSet["can_download_original"] = value;
}
else
{
PermissionSet.Add("can_download_original", value);
}
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CheckOutDialog/CheckOutDialog.xaml.cs
using Workshare.Components.Views.Common;
using Workshare.Integration.Enums;
namespace Workshare.Components.Views.CheckOutDialog
{
/// <summary>
/// Interaction logic for CheckOutDialog.xaml
/// </summary>
public partial class CheckOutDialog : OwnWindow
{
public CheckOutDialog()
{
InitializeComponent();
this.DataContext = new CheckOutDilalogModel();
btnCheckOut.Focus();
}
public string FileName
{
set
{
var model = this.DataContext as CheckOutDilalogModel;
if (null != model)
{
model.FileName= value;
}
}
}
public CheckOutOptions Result
{
get
{
var model = DataContext as CheckOutDilalogModel;
if (model != null)
{
return model.Result;
}
else
{
return CheckOutOptions.None;
}
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Progress/ProgressWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Data;
using System.Windows.Input;
using System.Windows.Interop;
using System.Windows.Media;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Views.Common;
using Workshare.Components.WSLogger;
using RES = Workshare.Components.Properties.Resources;
using RES_INT = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.Progress
{
/// <summary>
/// Interaction logic for ProgressWindow.xaml
/// </summary>
partial class ProgressWindow : Window
{
private readonly CommandInvoker _invoker;
public ProgressWindow()
{
InitializeComponent();
}
public ProgressWindow(CommandInvoker invoker):this()
{
_invoker = invoker;
this.LbItems.MouseDoubleClick += lbItems_MouseDoubleClick;
this.Loaded += ProgressWindow_Loaded;
this.Closed += ProgressWindow_Closed;
}
void ProgressWindow_Closed(object sender, EventArgs e)
{
Logger.WriteTrace("ProgressWindow_Closed");
var viewModel = this.DataContext as ProgressViewModel;
viewModel.Dispose();
WSApplication.Instance.View.ShowProgressWindow(_invoker);
}
void ProgressWindow_Loaded(object sender, RoutedEventArgs e)
{
Logger.WriteTrace("ProgressWindow_Loaded");
this.DataContext = new ProgressViewModel(_invoker);
}
public void Suppress(bool suppress)
{
this.Dispatcher.BeginInvoke(new Action(() =>
{
var viewModel = this.DataContext as ProgressViewModel;
if (viewModel != null)
{
viewModel.Suppress(suppress);
}
}));
}
void lbItems_MouseDoubleClick(object sender, MouseButtonEventArgs e)
{
var viewModel = this.DataContext as ProgressViewModel;
if (viewModel != null && viewModel.RemoveItem!= null)
{
var a = e.OriginalSource as FrameworkElement;
viewModel.RemoveItem.Execute(a.DataContext);
}
}
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
Logger.WriteTrace("ProgressWindow_Closed");
var viewModel = this.DataContext as ProgressViewModel;
if (viewModel != null)
{
e.Cancel = true;
viewModel.ClearCmd.Execute(null);
}
}
int OwnerBorder = 35;
int OwnerBottomHeight = 45;
[DllImport("user32.dll")]
static extern IntPtr GetActiveWindow();
private void SetPosition()
{
var parentWindw = Process.GetCurrentProcess().MainWindowHandle;
if (parentWindw != IntPtr.Zero)
{
RECT rect = new RECT();
if (GetWindowRect(parentWindw, ref rect))
{
this.Top = rect.Top + OwnerBottomHeight;
this.Left = rect.Right - this.ActualWidth - OwnerBorder;
return;
}
}
this.Top = OwnerBorder;
this.Left = SystemParameters.VirtualScreenWidth - this.ActualWidth - OwnerBorder;
}
private void Window_SizeChanged_1(object sender, SizeChangedEventArgs e)
{
SetPosition();
}
[DllImport("user32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
static extern bool GetWindowRect(IntPtr hWnd, ref RECT lpRect);
[StructLayout(LayoutKind.Sequential)]
private struct RECT
{
public int Left;
public int Top;
public int Right;
public int Bottom;
}
}
public class StatusToDMSConverter : IValueConverter
{
Dictionary<WorkStatus, ImageSource> cache = new Dictionary<WorkStatus, ImageSource>();
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
if (value is WorkStatus)
{
var val = (WorkStatus)value;
if (!cache.ContainsKey(val))
{
System.Drawing.Bitmap img;
switch (val)
{
case WorkStatus.Processing:
case WorkStatus.Pending:
{
img = RES.Synchronize;
break;
}
case WorkStatus.Success:
{
img = RES.Apply;
break;
}
default:
{
img = RES.Error;
break;
}
}
var bitmap = Utils.Convert(img,ImageFormat.Png);
cache[val] = bitmap;
}
return cache[val];
}
return null;
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
public class StatusToTextConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
if (value is WorkStatus)
{
switch ((WorkStatus)value)
{
case WorkStatus.Processing:
case WorkStatus.Pending:
{
if (parameter is TrueSynhWorkUnit)
{
return RES_INT.STR_WORK_STATUS_SYNCHRONIZATION_TEXT;
}
//else if (parameter is SynhWorkUnit)
//{
// return RES_INT.STR_WORK_STATUS_SYNCHRONIZATION_TEXT;
//}
else if (parameter is SendWorkUnit)
{
return RES_INT.STR_WORK_STATUS_SENDING_TEXT;
}
else
{
return "Processing";
}
}
case WorkStatus.Success:
{
return RES_INT.STR_WORK_STATUS_FINISHED_TEXT;
}
case WorkStatus.Error:
{
return RES_INT.STR_WORK_STATUS_ERROR_TEXT;
}
default:
{
break;
}
}
}
return null;
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/eDOCSEventType.cs
namespace Workshare.OpenText.Concrete
{
enum eDOCSDEventType
{
DE_OPEN = 1,
DE_OPEN_COMPATIBLE = 2,
DE_LOCK = 3,
DE_UNLOCK = 4,
DE_DELETE = 5,
DE_PRINT = 6,
DE_PREVIEW = 7,
DE_SELECT_FOR_EDIT = 8,
DE_RELEASE_DOC = 9,
DE_SAVE = 10,
DE_NEW_VERSION = 11,
DE_NEW_DOC = 12,
DE_CHECK_OUT = 13,
DE_UPLOAD = 14,
DE_DOWNLOAD = 15,
DE_LOCK_FOR_EDIT = 16,
DE_UNLOCK_VERSION = 17,
DE_ADD_LINK = 18,
DE_REMOVE_LINK = 19,
DE_UPDATE_LINK = 20
}
}
<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsFolder.cs
using System.Collections.Generic;
using System.Linq;
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
public class WsFolder
{
//TODO make non-public
public FolderDetails _folder;
readonly UserDetails _user;
public WsFolder(FolderDetails folder, UserDetails user)
{
this._folder = folder;
this._user = user;
}
public int Id { get { return _folder.Id; } }
IEnumerable<WsFile> _files;
public IEnumerable<WsFile> Files
{
get
{
if (_files == null)
{
_files = PlatformService.GetFiles(_user, _folder.Id).Select(a => new WsFile(a, _user));
}
return _files;
}
}
IEnumerable<WsFolder> _subFolders;
public IEnumerable<WsFolder> SubFolders
{
get
{
if (_subFolders == null)
{
_subFolders = PlatformService.GetChildFolders(_user, _folder.Id).Select(a => new WsFolder(a, _user));
}
return _subFolders;
}
}
public string Url { get { return _folder.Url; } }
public WsUser Owner { get { return new WsUser(_folder.Owner); } }
/// <summary>
/// retruns members only directly set to current folder - ignores parent folder members
/// </summary>
public int MemberCount { get { return _folder.MemberCount; } }
private IEnumerable<WsMember> _members;
/// <summary>
/// returns members including parend folders
/// </summary>
public IEnumerable<WsMember> Members
{
get
{
if (_members == null)
{
_members = PlatformService.GetMembers(_user, this.Id).Select(a => new WsMember(_user, a));
}
return _members;
}
}
public string Name {
get { return _folder.Name; }
}
public bool IsDeleted {
get { return _folder.IsDeleted; }
}
public int ParentId {
get { return _folder.ParentId; }
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/WorkUnits/ScanWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Workshare.Components.Common;
using Workshare.Components.Views.TrueSyncDialog;
using Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.Exceptions;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog.WorkUnits
{
public class ScanWorkUnit : WorkUnit
{
private readonly TrueSyncFilesScanDialogVm _vm;
private readonly DmsWorkerBase _dmsWorker;
private readonly MapController _mapController;
public ScanWorkUnit(TrueSyncFilesScanDialogVm vm, DmsWorkerBase dmsWorker) : base(vm.View)
{
_vm = vm;
_mapController = vm.Module.Resolve<MapController>();
_dmsWorker = dmsWorker;
}
public override void OnAdded()
{
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.Loading;
}
public override void Execute()
{
try
{
var processor = _vm.Module.Resolve<Processor>();
var scanResults = processor.ScanFile((BaseDMSFile)_vm.File, new ScanOptions() { UseForceRequest = true });
bool vmShouldRaiseClose = false;
foreach (var result in scanResults)
{
var activities = new List<UploadFileActivity>();
var activityFinder = _vm.Module.Resolve<ActivityFinder>();
var uploadActivities = activityFinder.GetUploadActivities(result, null);
var errors = activityFinder.GetErrors(result);
if (errors.Any())
{
if (errors.First().Error.IsCloudFolderAccessDenied())
{
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.NoAccessOnWorkshare;
}
else
{
_vm.Error = errors.First().Error;
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.Error;
}
}
else
{
var uploadActivity = uploadActivities
.FirstOrDefault(a => a.DmsId == _vm.File.ID.ToString()
&& _vm.ActivityToDisplay(a));
if (uploadActivity != null)
{
var folderMap = _mapController.GetFirstParentLinkedWithExistedOnWs(result, uploadActivity.MapId);
if (folderMap != null)
{
var mapAnylizer = _vm.Module.Resolve<MapAnylizer>();
var isFolderSharedWithExternal = mapAnylizer.IsFolderSharedWithExternalUsers(folderMap);
activities.Add(uploadActivity);
var result2 = result;
_vm.Dispatcher.Invoke(new Action(() =>
{
_vm.ParentScans.Add(new ParentScan
{
Activities =
new ObservableCollection<FileUploadActivityVm>(
activities.Select(a => new FileUploadActivityVm(a))),
Scan = result2
});
_vm.WorkspaceName = mapAnylizer.GetName(folderMap);
_vm.PropertyHasChanged(a => a.WorkspaceName);
_vm.IsSharedExternally = isFolderSharedWithExternal;
if (!isFolderSharedWithExternal)
{
StartAutoUploadingIfNeed();
}
if(vmShouldRaiseClose)
vmShouldRaiseClose = false;
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.Default;
}));
}
else
{
Logger.WriteWarning("File not in the synced _folder");
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.NoInSyncedFolder;
vmShouldRaiseClose = true;
}
}
else
{
Logger.WriteWarning("No items found for uploading of file");
_vm.State = TrueSyncFilesScanDialogVm.StateEnum.NoChangesDetected;
vmShouldRaiseClose = true;
}
}
}
if (vmShouldRaiseClose)
_vm.Dispatcher.Invoke(new Action(() => _vm.RaiseClose(false)));
}
catch (Exception ex)
{
_vm.View.ShowError(ex);
}
}
void StartAutoUploadingIfNeed()
{
if (!_vm.ParentScans.Any() || _vm.ParentScans.Count == 1)
{
var parentScan = _vm.ParentScans.First();
if (parentScan.Activities.Any() && parentScan.Activities.Count == 1)
{
var activity = parentScan.Activities.Single();
var action = activity.Actions.FirstOrDefault();
if (action != null)
{
_vm.ExecuteDocumentAction(new[]{action});
}
}
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Exceptions/Exceptions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Common;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Properties;
using WorksharePlatform;
namespace Workshare.Integration.Exceptions
{
/// <summary>
/// workshare is inavailable
/// </summary>
public class ItemException
{
public IDMSItem Item
{
set;
get;
}
public Exception Error
{
set;
get;
}
public FileDetails WSItem
{
get;
set;
}
}
public class ItemsErrorsCollection : IDisposable
{
readonly List<ItemException> _mErrors = new List<ItemException>();
readonly OperationContext _context;
public ItemsErrorsCollection(OperationContext context)
{
_context=context;
}
public void Add(IDMSItem item, Exception ex)
{
_mErrors.Add(new ItemException
{
Item = item,
Error = ex,
WSItem = null
});
}
public void Add(FileDetails item, Exception ex)
{
_mErrors.Add(new ItemException
{
WSItem = item,
Error = ex,
Item = null
});
}
public void Add(FileDetails wsitem, IDMSItem item, Exception ex)
{
_mErrors.Add(new ItemException
{
WSItem = wsitem,
Error = ex,
Item = item
});
}
public void Clear()
{
_mErrors.Clear();
}
void Add(ItemException ex)
{
_mErrors.Add(ex);
}
public void AddRange(ItemsErrorsCollection errors)
{
errors.AsQueryble().ToList().ForEach(Add);
}
public IQueryable<ItemException> AsQueryble()
{
return _mErrors.AsQueryable();
}
public int Count
{
get
{
return _mErrors.Count;
}
}
public void Check()
{
if (Count > 0)
{
throw new MultiItemsException(this, _context);
}
}
public void Dispose()
{
Check();
}
}
public class BaseException : Exception
{
protected BaseException()
{ }
public BaseException(string caption, string msg)
: base(msg)
{
Caption = caption;
}
public BaseException(string caption, string msg, Exception ex)
: base(msg, ex)
{
Caption = caption;
}
public string Caption { get; private set; }
}
public class OfflineException : BaseException
{
public OfflineException()
: base(Resources.STR_CONNECTION_ERROR_CAPTION, Resources.STR_CONNECTION_ERROR_TEXT)
{
}
public OfflineException(Exception inner)
: base(Resources.STR_CONNECTION_ERROR_CAPTION, Resources.STR_CONNECTION_ERROR_TEXT, inner)
{
}
public OfflineException(string message, Exception inner)
: base(Resources.STR_CONNECTION_ERROR_CAPTION, message, inner)
{
}
}
public class CannotGetDataFromServer : BaseException
{
public CannotGetDataFromServer()
: base(Resources.STR_CONNECTION_ERROR_CAPTION, Resources.STR_CONNECTION_ERROR_TEXT)
{
}
public CannotGetDataFromServer(Exception inner)
: base(Resources.STR_CONNECTION_ERROR_CAPTION, Resources.STR_CONNECTION_ERROR_TEXT, inner)
{
}
}
/// <summary>
/// exeptions during processing of items
/// </summary>
public class MultiItemsException : BaseException
{
public ItemsErrorsCollection errorList;
public OperationContext OperationContext { get; private set; }
public MultiItemsException(ItemsErrorsCollection errList, OperationContext operationContext)
: base(Resources.STR_ERROR_HAPPEN, "")
{
errorList = errList;
OperationContext = operationContext;
}
public override string ToString()
{
var text = string.Empty;
errorList.AsQueryble().Where(a => a != null).ToList().ForEach(p => text += ((p.Item==null)?"no item":p.Item.Name) + " : " + ((p.Error != null) ? p.Error.Message : p.GetType().Name) + Environment.NewLine + p.Error.StackTrace + Environment.NewLine);
return text;
}
}
public class LocalFileNotFound : BaseException
{
public LocalFileNotFound()
: base(Resources.STR_LOCALFILENOTFOUND_CAPTION, Resources.STR_LOCALFILENOTFOUND_TEXT)
{
}
}
public class CheckedOutOnAnotherMachine : Exception
{
public string MachineName { set; get; }
public CheckedOutOnAnotherMachine(string machineName)
{
this.MachineName = machineName;
}
}
public class LockedByAnotherProcessException : BaseException
{
public LockedByAnotherProcessException(string filename)
: base(
string.IsNullOrEmpty(filename) ? Resources.STR_ERROR_CAPTION_File_is_locked : string.Format(Resources.STR_ERROR_CAPTION_File___0___is_locked_, filename),
Resources.STR_ERROR_TEXT_File_is_currently_opened_in_the_another_application)
{
}
}
public class CloudFileNotFound : CloudItemNotFound
{
public CloudFileNotFound(string operation_message)
: base(operation_message, Resources.STR_CLOUDFILENOTFOUND)
{ }
}
public class CloudItemNotFound : BaseException
{
protected CloudItemNotFound(string operationMessage, string message)
: base(operationMessage, message)
{
}
}
public class CloudFolderNotFound : CloudItemNotFound
{
public CloudFolderNotFound(string operationMessage)
: base(operationMessage, Resources.STR_CLOUDFOLDERNOTFOUND)
{ }
}
public class DMSItemNotFound : BaseException
{
public DMSItemNotFound(string caption, string name)
: base(caption, name)
{ }
}
/// <summary>
/// not enough permissions
/// </summary>
public class CloudFolderAccessDenied : BaseException
{
public CloudFolderAccessDenied(string caption, string text, Exception ex)
: base(caption, text, ex)
{ }
}
public class FileAlreadySentException : BaseException
{
public FileAlreadySentException()
: base(Resources.STR_UNABLE_SEND, Resources.STR_FILE_ALREADY_SENT)
{
}
}
public class FolderAlreadySentException : BaseException
{
public FolderAlreadySentException()
: base(Resources.STR_UNABLE_SEND, Resources.STR_FOLDER_ALREADY_SENT)
{
}
}
public class FileCheckoutedToAnotherUser : BaseException
{
public FileCheckoutedToAnotherUser(Operations oper, string userName = "")
: base((oper == Operations.Send) ? Resources.STR_UNABLE_SEND : Resources.STR_UNABLE_SYNC,
(oper == Operations.Send) ? Resources.STR_FILEISCHECKEDOUTBYANOTHERUSER_TEXT : Resources.STR_FILEISCHECKEDOUTBYANOTHERUSER_TEXT_SYNC)
{
this.UserName = userName;
}
public string UserName { private set; get; }
public FileCheckoutedToAnotherUser(string userName="")
: base(Resources.STR_UNABLE_SEND, Resources.STR_FILEISCHECKEDOUTBYANOTHERUSER_TEXT)
{
this.UserName = userName;
}
}
//not authenticated on workshare
public class CloudUnAuthorized : BaseException
{
public CloudUnAuthorized()
: base(Resources.STR_WRONGAUTH_CAPTION, Resources.STR_WRONGAUTH_TEXT)
{ }
public CloudUnAuthorized(Exception ex)
: base(Resources.STR_WRONGAUTH_CAPTION, Resources.STR_WRONGAUTH_TEXT, ex)
{ }
}
public class CannotProcessItemException : BaseException
{
public CannotProcessItemException()
: base(Resources.STR_UNABLE_SEND, Resources.STR_LONG_FILE_NAME_TEXT)
{
}
}
public class DMSUnAuthorizedException: BaseException
{
public DMSUnAuthorizedException()
: base(Resources.STR_DO_NOT_HAVE_PERMISSION_CAPTION, Resources.STR_DO_NOT_HAVE_PERMISSION_TEXT)
{
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/IAuthProvider.cs
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Integration.Interfaces
{
public interface IAuthProvider
{
UserDetails GetCurrentWSUser();
WsUser GetCurrentWSUser2();
void SetCurrentUser(UserDetails user);
UserDetails CheckIfAuth();
}
}
<file_sep>/WSComponents/src/WSComponents/Helpers/Helpers.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using Workshare.Integration.Interfaces;
namespace Workshare.Components.Helpers
{
public class WinFormHelper : System.Windows.Forms.IWin32Window
{
IntPtr m_Handle;
public WinFormHelper(IntPtr pParent)
{
m_Handle = pParent;
}
public IntPtr Handle
{
get { return m_Handle; }
}
}
static class WinInetHelper
{
[System.Runtime.InteropServices.DllImport("wininet.dll", CharSet = System.Runtime.InteropServices.CharSet.Auto, SetLastError = true)]
static extern bool InternetSetOption(int hInternet, int dwOption, IntPtr lpBuffer, int dwBufferLength);
public static void ResetSession()
{
return;
int option = (int)3/* INTERNET_SUPPRESS_COOKIE_PERSIST*/;
var ptr = Marshal.AllocHGlobal(Marshal.SizeOf(option));
try
{
Marshal.StructureToPtr(option, ptr, false);
InternetSetOption(0, 81/*INTERNET_OPTION_SUPPRESS_BEHAVIOR*/, ptr, Marshal.SizeOf(option));
}
finally
{
Marshal.Release(ptr);
}
}
}
public static class CollectionExt
{
public static bool HasOnlyOneFile(this IEnumerable<IDMSItem> collection)
{
if (collection == null) return false;
return collection.All(a => a is IDMSFile) && collection.Count() == 1;
}
public static bool HasOnlyOneFolder(this IEnumerable<IDMSItem> collection)
{
if (collection == null) return false;
return collection.All(a => a is IDMSFolder) && collection.Count() == 1;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Exceptions/CannotAddHistoryException.cs
using System;
using Workshare.Integration.Exceptions;
namespace Workshare.Components.Exceptions
{
public class CannotAddHistoryException : BaseException
{
public CannotAddHistoryException(string message)
: base("",message)
{
}
public CannotAddHistoryException(string message, Exception inner)
: base("",message, inner)
{
}
}
}
<file_sep>/README.md
dmssync
=======<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Visitors/ChangesDetector.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Workshare.Components.Helpers;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Changes.Visitors
{
public class ChangesDetector
{
private readonly DmsWorkerBase _dmsWorker;
private readonly IAuthProvider _auth;
private readonly SyncInfoService _syncInfoService;
public DetectorOptions opt;
public ChangesDetector(DmsWorkerBase dmsWorker, IAuthProvider auth, SyncInfoService syncInfoService)
{
this._dmsWorker = dmsWorker;
this._auth = auth;
_syncInfoService = syncInfoService;
opt = new DetectorOptions() { UseForceRequest = true };
}
public void AddSharedChangeIfDetected(FileMap filemap, FileMapActivity fileActivity)
{
if (filemap.WsFile != null)
{
fileActivity.AddChange(new FileActivityChange
{
Type = ChangeType.Uploaded,
});
}
}
string GetWsFileName(FileMap filemap)
{
return (filemap.WsFile != null) ? (FileUtils.ChangeExtension(filemap.WsFile.FriendlyName, FileUtils.GetExtension(filemap.WsFile.Name))) : null;
}
string GetLocalFileName(FileMap filemap)
{
return (filemap.LocalFile != null) ? filemap.LocalFile.Name : null;
}
public void AddRenameChangeIfDetected(FileMap filemap, FileMapActivity fileActivity)
{
var wsFilename = GetWsFileName(filemap);
var localFilename = GetLocalFileName(filemap);
if (!string.IsNullOrEmpty(wsFilename) && !string.IsNullOrEmpty(localFilename)
&& !string.Equals(FileUtils.GetFileNameWithoutExtension(wsFilename), FileUtils.GetFileNameWithoutExtension(localFilename), StringComparison.InvariantCultureIgnoreCase))
{
fileActivity.AddChange(new FileActivityChange
{
Type = ChangeType.NamesDiffer,
LocalName = localFilename,
WsName = wsFilename
});
}
}
public void AddTypeChangedIfDetected(FileMap filemap, FileMapActivity fileActivity)
{
var wsFilename = GetWsFileName(filemap);
var localFilename = GetLocalFileName(filemap);
if (!string.Equals(FileUtils.GetExtension(wsFilename), FileUtils.GetExtension(localFilename),
StringComparison.InvariantCultureIgnoreCase))
{
var oldType = FileUtils.GetExtension(localFilename);
var newType = FileUtils.GetExtension(wsFilename);
if (oldType != null && newType != null)
{
oldType = oldType.Replace(".", "").ToUpper();
newType = newType.Replace(".", "").ToUpper();
}
if (!string.IsNullOrEmpty(oldType) && !string.IsNullOrEmpty(newType))
{
fileActivity.AddChange(new FileActivityChange
{
LocalDocType = oldType,
WsDocType = newType,
Type = ChangeType.DocTypeChanged
});
}
}
}
private FolderMap GetFirstFolderSentToWs(FileMap fileMap)
{
//TODO use GetFirstParentLinkedWithExistedOnWs
var current = fileMap.IndirectParent ?? fileMap.Parent as FolderMap;
while (current != null && current.WsFolder == null)
{
current = fileMap.IndirectParent ?? current.Parent as FolderMap;
}
return current;
}
internal void AddFolderSharedIfDetected(FileMapActivity fileActivity, FileMap fileMap)
{
var folderMap = GetFirstFolderSentToWs(fileMap);
if (folderMap == null || folderMap.WsFolder == null) return;
var wsFolder = folderMap.WsFolder;
if (fileMap.LocalFile == null) return;
WsUser currentUser = _auth.GetCurrentWSUser2();
var otherMembersCount = wsFolder.Members.GetWithoutCurrentUser(currentUser).Count();
fileActivity.AddChange(new FileActivityChange
{
Type = ChangeType.FolderSharedWithOthers,
FolderSharedBy = wsFolder.Owner,
OtherMemebersCount = otherMembersCount,
OtherMemberViewLink = wsFolder.Url
});
}
internal void AddLastUnSentOrChangedVersionIfDetected(FileMapActivity fileActivity, FileMap fileMap)
{
var allLinkedVerionMaps =
fileMap.Maps.OfType<VersionMap>()
.Where(a => a.HasLocal() && a.HasRemote())
.OrderByDescending(a => a.LocalVersion.Number);
var lastLinkedVersionMap = allLinkedVerionMaps.FirstOrDefault();
var allNotLinkedVersionMaps = fileMap.Maps.OfType<VersionMap>().Where(a => a.HasLocal() && !a.HasRemote());
var lastChangedNotLinkedVersionMap = allNotLinkedVersionMaps
//.OrderByDescending(a => a.LocalVersion.Number)
.FirstOrDefault(a => a.LocalVersion.EditTime == allNotLinkedVersionMaps.Max(x => x.LocalVersion.EditTime));
var syncInfo = _syncInfoService.GetSyncInfo(fileMap, opt.UseForceRequest);
bool addLastNotLinked = lastChangedNotLinkedVersionMap != null && lastLinkedVersionMap == null
||
lastChangedNotLinkedVersionMap != null
&& lastLinkedVersionMap.LocalVersion.Number < lastChangedNotLinkedVersionMap.LocalVersion.Number
||
lastChangedNotLinkedVersionMap != null && syncInfo != null
&& lastChangedNotLinkedVersionMap.LocalVersion.Number > syncInfo.LastUploadedDmsVerNum
||
lastChangedNotLinkedVersionMap != null
&& lastLinkedVersionMap.LocalVersion.Number > lastChangedNotLinkedVersionMap.LocalVersion.Number
&& lastChangedNotLinkedVersionMap.LocalVersion.EditTime > lastLinkedVersionMap.LocalVersion.EditTime;
if (addLastNotLinked)
{
fileActivity.AddChange(new FileActivityChange
{
Type = ChangeType.NotSentVersion,
SavedTime = _dmsWorker.GetEditTime(lastChangedNotLinkedVersionMap.LocalVersion),
VersionNumber = lastChangedNotLinkedVersionMap.LocalVersion.Number,
LocalVersionId = lastChangedNotLinkedVersionMap.LocalVersion.Id
});
}
else
{
foreach (var sentversion in allLinkedVerionMaps.Reverse())
{
if (sentversion.LocalVersion.WasChangedAfterSend2(opt.UseForceRequest))
{
fileActivity.AddChange(new FileActivityChange()
{
Type = ChangeType.VersionChangedAfterSend,
SavedTime = _dmsWorker.GetEditTime(sentversion.LocalVersion),
VersionNumber = sentversion.LocalVersion.Number,
LocalVersionId = sentversion.LocalVersion.Id
});
break;
}
}
}
}
internal void AddChangesIfDetected(FileMap fileMap, FileMapActivity documentChanges)
{
if (fileMap.HasLocal() && fileMap.HasRemote())
{
var syncInfo = _syncInfoService.GetSyncInfo(fileMap, opt.UseForceRequest);
var versionMaps = fileMap.Maps.OfType<VersionMap>().ToList();
var onlyRemoteVersionMaps = versionMaps.Where(p => p.LocalVersion == null && p.WsVersion != null).ToList();
var lastWsVersionNumImportedToDms =
fileMap.WsFile.Versions.Where(p => syncInfo != null && p.Id == syncInfo.LastImportedWsVerId)
.Select(a => a.Version).FirstOrDefault();
var newVersions =
onlyRemoteVersionMaps.Where(
a => a.HasRemote() && a.WsVersion.Version > lastWsVersionNumImportedToDms)
.Select(a => a.WsVersion)
.ToList();
var newversionsExist = newVersions.Any();
var localWasUpdated = fileMap.LocalFile != null && fileMap.LocalFile.WasUpdatedAfterSend2(opt.UseForceRequest);
if (newversionsExist && localWasUpdated)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.BothChanged,
NewerVersions = newVersions,
});
}
else if (newversionsExist)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.RemoteChanged,
NewerVersions = newVersions,
});
}
else if (localWasUpdated)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.LocalChanged
});
}
}
}
internal void AddNotModifiedIfdetected(FileMap fileMap, ImportFileActivity documentChanges)
{
if (documentChanges.Changes.Any())
{
return;
}
if (fileMap.LocalFile != null && fileMap.WsFile != null)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.NotModified
});
}
}
internal void AddDeleted(FileMap fileMap, ImportFileActivity documentChanges)
{
if (fileMap.ProcessState == ProcessState.Error) return;
if (fileMap.HasLocal() && !fileMap.HasRemote() && _syncInfoService.GetSyncInfo(fileMap.LocalFile, opt.UseForceRequest) != null)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.RemoteDeleted
});
}
}
internal void AddAddedOnWs(FileMap fileMap, ImportFileActivity documentChanges)
{
if (fileMap.WsFile != null && fileMap.LocalFile == null)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.RemoteAdded,
NewerVersions = fileMap.WsFile.Versions.ToList()
});
}
}
internal void AddNotSent(FileMap fileMap, ImportFileActivity documentChanges)
{
if (fileMap.LocalFile != null && _syncInfoService.GetSyncInfo(fileMap.LocalFile, opt.UseForceRequest) == null)
{
documentChanges.AddChange(new FileActivityChange
{
Type = ChangeType.NotSent
});
}
}
}
public class DetectorOptions
{
public bool UseForceRequest { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents.Tests/TestAuthProvider.cs
using NUnit.Framework;
using Workshare.Components.Concrete;
namespace WSComponents.Tests
{
[TestFixture]
public class TestAuthProvider
{
[Test]
public void TestGetUserDefault_NotNull()
{
var auth_provider = new DMSAuthProvider();
Assert.IsNotNull(auth_provider.GetCurrentWSUser(), "Auth provider must not return NULL user");
}
}
}
<file_sep>/WSComponents/src/WSComponents/Exceptions/FileCannotBeCheckedInException.cs
using System;
using Workshare.Integration.Exceptions;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Exceptions
{
public class FileCannotBeCheckedInException : BaseException
{
public FileCannotBeCheckedInException(string reasonMessage)
: base(RES.STR_UNABLE_SYNC, string.Format(RES.STR_FILE_CANNOT_BE_CHECKEDIN, reasonMessage))
{
}
}
public class FileUnlockFailedException : BaseException
{
public FileUnlockFailedException()
: base(RES.STR_UNABLE_SYNC, RES.STR_FILE_CANNOT_BE_UNLOCKED)
{
}
}
public class FileCannotBeDeletedException : BaseException
{
public FileCannotBeDeletedException(Exception inner)
: base("","Cannot remove the file. "+inner.Message,inner)
{
}
}
}
<file_sep>/WSComponents/src/WSComponents/Interfaces/ICloseRequester.cs
using System;
namespace Workshare.Components.Interfaces
{
public interface ICloseRequester
{
event Action<bool?> CloseRequested;
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/ItemMap.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Processor.DmsItems.Visitors;
namespace Workshare.Integration.Processor.Maps
{
public enum ProcessState{
Scanned,
Processed,
Error,
Cancelled}
public abstract class ItemMap
{
string _id;
public string Id
{
get
{
if (string.IsNullOrEmpty(_id))
{
_id=GetId();
}
return _id;
}
protected set
{
_id = value;
}
}
readonly List<ItemMap> _maps;
public IEnumerable<ItemMap> Maps { get { return _maps; } }
public ItemMap Parent { private set; get; }
public ItemMap IndirectParent { set; get; }
public Exception Error;
public ProcessState ProcessState { set; get; }
protected ItemMap()
{
_maps = new List<ItemMap>();
}
public void AddIf(ItemMap map)
{
map.Parent = this;
if (!_maps.Contains(map))
{
_maps.Add(map);
}
}
public abstract void Apply(ItemMapVisitor visitor);
public abstract string GetId();
public override string ToString()
{
return string.Format("{0} : State={1}, Id={2}, ChildrenCount={3}",this.GetType().Name,ProcessState,Id,_maps.Count);
}
}
public class SummaryProcessResult : ItemMap
{
public PageIterator Iterator { get; set; }
public override void Apply(ItemMapVisitor visitor)
{
foreach (var item in this.Maps.ToList())
{
item.Apply(visitor);
}
}
public override string GetId()
{
return "SummaryProcessResult";
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Extensions/LocalFileExtensions.cs
using System;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.Extensions
{
public static class LocalFileExtentions
{
public static void CheckPermissionkOrThrow(this IDMSItem obj, Permissions permissions)
{
if (!obj.DoesUserHavePermissions(permissions))
{
throw new UnauthorizedAccessException();
}
}
public static void CheckNotCheckeOutToOtherUserOrThrow(this IDMSFile item, Operations oper)
{
if (item != null)
{
if (item.IsCheckedOutToOtherUser()) throw new FileCheckoutedToAnotherUser(oper);
if (!item.IsCheckedOutFileExists()) throw new LocalFileNotFound();
}
}
public static bool IsCheckedOutToOtherUser(this IDMSItem item)
{
if (item != null)
{
return (item.CheckedOut && !item.CheckedOutToUser);
}
return true;
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/SyncItemsCommand.cs
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Globalization;
using System.Linq;
using System.Runtime.InteropServices;
using Com.Interwoven.Worksite.iManExt;
using Workshare.Components;
using Workshare.Components.Helpers;
using Workshare.Components.Properties;
using Workshare.IManage.Views;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.DmsItems;
namespace Workshare.IManage
{
public class SyncItemsCommand : Command
{
public override object Bitmap {
get
{
return Resources.workshare_sync_from.GetHbitmap();
}
set { }
}
public override string MenuText
{
get
{
return "Sync from Workshare";
}
set
{
}
}
public override string HelpText {
get { return MenuText + "\nSync item"; }
set { }
}
public override void ExecuteAction()
{
var contextItems = GetContextItems().ToList();
if (!contextItems.Any())
{
return;
}
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
var linkedItemsCount = contextItems.Count(p => syncService.GetSyncInfo(p) != null);
var allItemsAreLinked = linkedItemsCount == contextItems.Count;
var someItemsLinked = linkedItemsCount > 0;
var hasPermissions = contextItems.All(p => p.DoesUserHavePermissions(Permissions.EditItem));
var anyTaskAlreadyInProgress = Application.iManInstance.Presenter.IsAnyItemInProgress();
var someCheckedOutToOther = contextItems.Any(a => a.IsCheckedOutToOtherUser());
var canSync = allItemsAreLinked
&& hasPermissions
&& !anyTaskAlreadyInProgress
&& !someCheckedOutToOther;
if (canSync)
{
Application.iManInstance.View.OnSynchItemsClicked(new SyncItemsClickedArgs(contextItems.Select(p => new SyncItemInformation(p, ConflictOptions.None, ConflictVersionOptions.None))));
}
else if (!someItemsLinked)
{
if (contextItems.HasOnlyOneFile())
{
ShowError(
new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync,
Resources.STR_ERROR_TEXT_File_is_not_sent_to_Workshare_), contextItems.FirstOrDefault());
}
else if (contextItems.HasOnlyOneFolder())
{
ShowError(
new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync,
Resources.STR_ERROR_TEXT_Folder_is_not_sent_to_Workshare_), contextItems.FirstOrDefault());
}
else
{
ShowError(
new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync,
Resources.STR_ExecuteAction_All_items_are_not_sent_to_Workshare),
contextItems.FirstOrDefault());
}
}
else if (!allItemsAreLinked)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ExecuteAction_Some_items_are_not_sent_to_Workshare), contextItems.FirstOrDefault());
}
else if (anyTaskAlreadyInProgress)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ExecuteAction_Some_another_task_is_already_in_progress), contextItems.FirstOrDefault());
}
else if (someCheckedOutToOther)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ExecuteAction_File_is_checked_out_to_another_users), contextItems.FirstOrDefault());
}
else
{
ShowError(new DMSUnAuthorizedException(),contextItems.FirstOrDefault());
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("41CF85DC-221B-48F5-8159-4083A4A18786")]
[ComVisible(true)]
public class DocumentSyncItemsCommand : SyncItemsCommand
{
public override string HelpText
{
get
{
return "Sync document from Workshare\nSync from Workshare";
}
set
{
base.HelpText = value;
}
}
public override int Status
{
get
{
//Context.OfType<object>().ToList().ForEach(p => System.Diagnostics.Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
bool IsAnyDocumentSelected = false;
try
{
var items = GetContextItems();
if (items.Count == 1 && (items[0] is Workshare.Integration.Interfaces.IDMSFolder))
{
var fld = items[0] as Workshare.Integration.Interfaces.IDMSFolder;
IsAnyDocumentSelected = fld.Files.Count() > 0;
}
else
{
IsAnyDocumentSelected = items.OfType<Workshare.Integration.Interfaces.IDMSFile>().Any();
}
}
catch
{ }
return (int)((IsAnyDocumentSelected && !Application.iManInstance.Presenter.IsAnyItemInProgress() && IsAnyServerAvailable()) ? CommandStatus.nrActiveCommand : CommandStatus.nrGrayedCommand);
}
set
{
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("0BDF6E98-73AC-4B5C-9D08-247D1F7E475F")]
[ComVisible(true)]
public class FolderSyncItemsCommand : SyncItemsCommand
{
public override string HelpText
{
get
{
return "Sync folder from Workshare\nSync from Workshare";
}
set
{
base.HelpText = value;
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("EB73626D-1145-491B-9866-F88FC890662D")]
[ComVisible(true)]
public class FolderTrueSyncItemsCommand : Command
{
public Bitmap ResizeBitmap(Bitmap b, int nWidth, int nHeight)
{
var result = new Bitmap(nWidth, nHeight);
using (Graphics g = Graphics.FromImage(result))
g.DrawImage(b, 0, 0, nWidth, nHeight);
return result;
}
public override object Bitmap
{
get
{
if (GetClientType() == iManType.DeskSite)
{
return Resources.workshare_sync.GetHbitmap();
}
if (Application.iManInstance.ClientVersion == "9")
{
return ResizeBitmap(GetBitmap(GetSelectedFolders()), 32, 32).GetHbitmap();
}
return ResizeBitmap(GetBitmap(GetSelectedFolders()), 16, 16).GetHbitmap();
}
set
{
}
}
private void DrawFilesToSyncCountEllipse(string cnt, Graphics gr, int ellipseDeltaX, int stringDeltaX)
{
int initialStringX = 19;
int initialEllipseXData = 16;
gr.FillEllipse(new SolidBrush(Color.FromArgb(243, 129, 48)), new Rectangle(initialEllipseXData - ellipseDeltaX, 0, initialEllipseXData + ellipseDeltaX, 16));
gr.DrawEllipse(new Pen(Color.White), new Rectangle(initialEllipseXData - ellipseDeltaX, 0, initialEllipseXData + ellipseDeltaX, 16));
gr.DrawString(cnt, new Font("Arial", 8, FontStyle.Bold),
new SolidBrush(Color.White), initialStringX - stringDeltaX, 1);
}
private Bitmap GetBitmap(List<IDMSFolder> contextItems)
{
var bm = new Bitmap(Properties.Resources.sync_ribbon_icon32);
if (contextItems.Count() == 1)
{
int cnt =
Components.Common.ActivityCounter.Instance.ActivityCount(
contextItems.ToList()[0]);
if (cnt > 0 && Status == (int)CommandStatus.nrActiveCommand)
{
Graphics gr = Graphics.FromImage(bm);
if (Application.iManInstance.ClientVersion == "9")
{
if (cnt < 10)
{
DrawFilesToSyncCountEllipse(cnt.ToString(CultureInfo.InvariantCulture), gr, 0, 0);
}
else
{
if (cnt < 100)
{
DrawFilesToSyncCountEllipse(cnt.ToString(CultureInfo.InvariantCulture), gr, 2, 3);
}
else
{
DrawFilesToSyncCountEllipse("99+", gr, 6, 9);
}
}
}
else
{
gr.FillEllipse(new SolidBrush(Color.FromArgb(243, 129, 48)), new Rectangle(16, 0, 16, 16));
}
}
}
return bm;
}
public override object LargeBitmap
{
get
{
var bm = GetBitmap(GetSelectedFolders());
return bm.GetHbitmap();
}
set
{
}
}
public override string Name
{
get
{
return this.GetType().FullName;
}
set
{
}
}
public override string MenuText
{
get
{
return "Sync";
}
set
{
base.MenuText = value;
}
}
public override CommandType Type
{
get
{
return CommandType.nrStandardCommand;
}
set
{
base.Type = value;
}
}
public override int Status
{
get
{
if (IsAnyServerAvailable() || base.Status == (int)CommandStatus.nrActiveCommand)
{
var contextItems = GetSelectedFolders().ToList();
var fld = contextItems.FirstOrDefault();
Components.Common.ActivityCounter.Instance.CurrentFolder = fld;
return (int)((fld != null && fld.DoesUserHavePermissions(Permissions.EditItem)) ? CommandStatus.nrActiveCommand : CommandStatus.nrGrayedCommand);
}
else
{
return (int)CommandStatus.nrGrayedCommand;
}
}
set
{
}
}
public override string HelpText
{
get
{
return "Sync folder from Workshare\nSync folder";
}
set
{
}
}
public override void ExecuteAction()
{
if (Application.iManInstance.Presenter.IsAnyItemInProgress()) return;
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
Application.iManInstance.ClientType = GetClientType();
var contextItems = GetSelectedFolders().ToList();
var items = GetContextItems().ToList();
var hasPermissions = contextItems.All(p => p.DoesUserHavePermissions(Permissions.EditItem));
var anyTaskAlreadyInProgress = Application.iManInstance.Presenter.IsAnyItemInProgress();
var someCheckedOutToOther = GetContextItems().Any(a => a.IsCheckedOutToOtherUser());
var foldersIsSentToWS = contextItems.All(p => syncService.GetSyncInfo(p) != null);
bool canTrueSync = contextItems.OfType<BaseDMSFolder>().Count() == 1
&& foldersIsSentToWS
&& hasPermissions
&& !anyTaskAlreadyInProgress;
if (canTrueSync)
{
((ModuleView)Application.iManInstance.View).OnTrueSyncItemsClicked(new SyncItemsClickedArgs(contextItems.Select(p => new SyncItemInformation(p, ConflictOptions.None, ConflictVersionOptions.None))), (GetClientType() == iManType.FileSite));
}
else if (!hasPermissions)
{
ShowError(new DMSUnAuthorizedException(), items.FirstOrDefault());
}
else if (!foldersIsSentToWS)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ERROR_TEXT_Folder_is_not_sent_to_Workshare_), items.FirstOrDefault());
}
else if (anyTaskAlreadyInProgress)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ExecuteAction_Some_another_task_is_already_in_progress), items.FirstOrDefault());
}
else if (someCheckedOutToOther)
{
ShowError(new BaseException(Resources.STR_SyncItemsCommand_ExecuteAction_Unable_to_sync, Resources.STR_ExecuteAction_File_is_checked_out_to_another_users), items.FirstOrDefault());
}
else
{
ShowError(new DMSUnAuthorizedException(), items.FirstOrDefault());
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Interfaces/IWorkUnit.cs
using System.Collections.Generic;
using Workshare.Components.Common;
using Workshare.Integration.Interfaces;
namespace Workshare.Components.Interfaces
{
public interface IWorkUnit
{
void Execute();
WorkStatus Status { get; set; }
string StatusDescription { get; set; }
string Name { get; }
TargetItem[] TargetItems { get; }
string ItemName { set; get; }
void OnAdded();
}
public class TargetItem
{
public string Id { private set; get; }
public string Name { set; get; }
public TargetItem(string id)
{
this.Id = id;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/VMs/FileActivityBaseVm.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncDialog.VMs
{
public abstract class FileActivityBaseVm<T> : ItemMapActivityVm<T>, IFileActivityBaseVm
{
private FileMapActivity _data;
public FileActivityBaseVm(FileMapActivity activity)
{
if (activity == null) throw new ArgumentNullException("activity");
this._data = activity;
}
public override string Id
{
get { return _data.Id; }
}
public override string MapId
{
get { return _data.MapId; }
}
public override string Name
{
get { return _data.Filename; }
}
public string ViewOnlineUrl
{
get { return _data.ViewOnlineUrl; }
}
public override bool IsDiscarded
{
get
{
return _data.IsDiscarded;
}
set
{
_data.IsDiscarded = value;
}
}
public override object data
{
get { return _data; }
}
public string CheckOutTo
{
get { return _data.CheckedOutTo; }
}
public string CheckOutMachine
{
get { return _data.CheckOutMachine; }
}
public DateTime LastUpdateTime
{
get { return _data.WsLastUpdateDate; }
}
public virtual bool DiscardAlwaysVisible
{
get { return false; }
}
public Visibility DiscardVisibility
{
get
{
if (DiscardAlwaysVisible)
{
return Visibility.Visible;
}
return (_data.IsDiscarded ? Visibility.Hidden : Visibility.Visible);
}
}
public string Error
{
get
{
if (_data.Error != null)
{
#if DEBUG
return _data.Error.ToString();
#else
return _data.Error.Message;
#endif
}
return string.Empty;
}
}
public List<DocumentActionVM> Actions
{
get { return _data.Actions.Select(a => new DocumentActionVM(a, this)).ToList(); }
}
public virtual bool IsInProgress
{
get { return false; }
}
internal bool CanExecuteAction(DocumentActionVM action)
{
return _data.CanExecute(action.action);
}
}
public interface IFileActivityBaseVm
{
string ViewOnlineUrl { get; }
bool IsInProgress { get; }
List<DocumentActionVM> Actions { get; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Progress/VMs/TargetItemVm.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Components.Interfaces;
namespace Workshare.Components.Views.Progress.VMs
{
public class TargetItemVm
{
private readonly TargetItem _item;
public bool IsFor(TargetItem item)
{
return item == this._item;
}
public TargetItemVm(Interfaces.TargetItem item)
{
this._item = item;
}
public string Id
{
get
{
return _item.Id;
}
}
public string Name
{
get { return _item.Name; }
}
}
}
<file_sep>/WSComponents/src/WSCloudService/CookieAwareClient.cs
using System;
using System.Collections.Generic;
using System.Text;
using System.Net;
using System.IO;
using System.Collections.Specialized;
using System.Globalization;
namespace WorksharePlatform
{
public class CookieAwareClient
: WebClient
{
public const string TmpAuthToken = "<PASSWORD>";
public CookieAwareClient()
: this(new UserDetails())
{
}
public CookieAwareClient(UserDetails user)
{
User = user;
if (User.SessionCookies == null)
{
CookieContainer = new CookieContainer();
}
Timeout = 5 * 60 * 1000; // 5 Minutes
Encoding = Encoding.UTF8;
}
#region Private Methods
private int WriteEncoded(Stream formDataStream, string data)
{
byte[] bytes = Encoding.UTF8.GetBytes(data);
if (formDataStream != null)
{
formDataStream.Write(bytes, 0, bytes.Length);
}
return bytes.Length;
}
private int WriteData(Stream formDataStream, byte[] data, int bufferLength)
{
if (formDataStream != null)
{
formDataStream.Write(data, 0, bufferLength);
}
return bufferLength;
}
private void WriteMultipartFormData(Stream requestStream, Dictionary<string, object> parameters, string boundary)
{
foreach (var param in parameters)
{
if (param.Value is FileDetails)
{
// The parameter is file data. Add header info and file data bytes.
var fileToUpload = (FileDetails)param.Value;
string fileName = fileToUpload.Name;
string header = string.Format("--{0}\r\nContent-Disposition: form-data; name=\"{1}\"; filename=\"{2}\";\r\nContent-Type: {3}\r\n\r\n",
boundary,
param.Key,
fileName,
fileToUpload.CurrentVersion.ContentType ?? "application/octet-stream");
WriteEncoded(requestStream, header);
using (Stream fileStream = File.Open(fileToUpload.FilePath, FileMode.Open, FileAccess.Read, FileShare.None))
{
using (BinaryReader br = new BinaryReader(fileStream))
{
WriteData(requestStream, br.ReadBytes((int)fileStream.Length), (int)fileStream.Length);
}
}
WriteEncoded(requestStream, "\r\n");
}
else
{
string data = param.Value.ToString();
// The parameter is a value type. Add header info, convert the value data to a string and add that value string.
string postData = string.Format("--{0}\r\nContent-Disposition: form-data; name=\"{1}\"\r\n\r\n{2}\r\n",
boundary,
param.Key,
data);
WriteEncoded(requestStream, postData);
}
}
// Write the footer info.
string footer = string.Format("--{0}--\r\n", boundary);
WriteEncoded(requestStream, footer);
}
private void WriteMultipartFormData3(Stream requestStream, FileDetails file)
{
using (Stream fileStream = File.Open(file.FilePath, FileMode.Open, FileAccess.Read, FileShare.None))
{
using (BinaryReader br = new BinaryReader(fileStream))
{
WriteData(requestStream, br.ReadBytes((int)fileStream.Length), (int)fileStream.Length);
}
}
}
private void WritePutUserWDSValue(Stream requestStream, byte[] data)
{
WriteData(requestStream, data, data.Length);
}
private HttpWebResponse FollowRedirection(string location)
{
var request = (HttpWebRequest)WebRequest.Create(location);
request.CookieContainer = CookieContainer;
request.ReadWriteTimeout = Timeout;
request.AllowAutoRedirect = true;
if (User.IsProxyEnable)
{
request.Proxy = User.Proxy;
request.Proxy.Credentials = User.ProxyCredentials;
}
return (HttpWebResponse)request.GetResponse();
}
#endregion
#region Protected Methods
protected override WebRequest GetWebRequest(Uri address)
{
var urlWithToken = address.ToString();
if (!string.IsNullOrEmpty(User.AuthToken))
{
if (!urlWithToken.Contains("?"))
{
urlWithToken += "?";
}
else
{
if (!urlWithToken.EndsWith("&"))
{
urlWithToken += "&";
}
}
if (User.SessionCookies.Count < 2)
{
urlWithToken += TmpAuthToken + "=" + User.AuthToken;
}
User.AuthToken = null;
}
var request = base.GetWebRequest(new Uri(urlWithToken));
var httpRequest = request as HttpWebRequest;
if (httpRequest!=null)
{
httpRequest.CookieContainer = CookieContainer;
httpRequest.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
if (User.IsProxyEnable)
{
httpRequest.Proxy = User.Proxy;
httpRequest.Proxy.Credentials = User.ProxyCredentials;
}
}
return httpRequest;
}
public const string user_credentials = "<PASSWORD>";
public const string _session_id = "_session_id";
public const string device_credential = "device_credentials";
public const string cloudemail = "cloudemail";
protected override WebResponse GetWebResponse(WebRequest request)
{
var response = base.GetWebResponse(request);
var cookies = ((HttpWebResponse)response).Cookies;
var temp = new CookieContainer();
var authCookiesCount = 0;
foreach (Cookie cookie in cookies)
{
if (string.Equals(cookie.Name, device_credential, StringComparison.InvariantCultureIgnoreCase))
//|| string.Equals(cookie.Name, _session_id, StringComparison.InvariantCultureIgnoreCase))
{
authCookiesCount++;
}
temp.Add(cookie);
}
if (authCookiesCount == 1)
{
CookieContainer = temp;
}
return response;
}
#endregion
public string DownloadFile()
{
string filePath = Path.GetTempFileName();
DownloadFile(Uri, filePath);
return filePath;
}
public byte[] PostValues(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl, action));
return UploadValues(Uri, "POST", queries);
}
public byte[] PostValuesVersion3(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl3, action));
return UploadValues(Uri, "POST", queries);
}
public byte[] PostValues4(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl4, action));
return UploadValues(Uri, "POST", queries);
}
public byte[] PutValues(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl, action));
return UploadValues(Uri, "PUT", queries);
}
public string GetString(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl, action));
string res = DownloadString(Uri);
if (res.StartsWith("<!DOCTYPE html", StringComparison.InvariantCultureIgnoreCase))
{
throw new UnauthorizedAccessException();
}
return res;
}
public string GetString2(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl2, action));
string res = DownloadString(Uri);
if (res.StartsWith("<!DOCTYPE html", StringComparison.InvariantCultureIgnoreCase))
{
throw new UnauthorizedAccessException();
}
return res;
}
public string GetString3(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl3, action));
string res = DownloadString(Uri);
if (res.StartsWith("<!DOCTYPE html", StringComparison.InvariantCultureIgnoreCase))
{
throw new UnauthorizedAccessException();
}
return res;
}
public string GetString4(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl4, action));
string res = DownloadString(Uri);
if (res.StartsWith("<!DOCTYPE html", StringComparison.InvariantCultureIgnoreCase))
{
throw new UnauthorizedAccessException();
}
return res;
}
public string GetDictionaryString(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceDictionaryUrl, action));
string res = DownloadString(Uri);
if (res.StartsWith("<!DOCTYPE html", StringComparison.InvariantCultureIgnoreCase))
{
throw new UnauthorizedAccessException();
}
return res;
}
public byte[] DeleteValues(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl, action));
return UploadValues(Uri, "DELETE", queries);
}
public byte[] DeleteValues2(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl2, action));
return UploadValues(Uri, "DELETE", queries);
}
public byte[] DeleteValues3(string action, NameValueCollection queries)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl3, action));
return UploadValues(Uri, "DELETE", queries);
}
public void DeleteDictionary(string action)
{
Uri =
new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceDictionaryUrl,
action));
var request = (HttpWebRequest) WebRequest.Create(Uri);
request.Method = "DELETE";
request.Timeout = Timeout;
request.CookieContainer = CookieContainer;
if (User.IsProxyEnable)
{
request.Proxy = User.Proxy;
request.Proxy.Credentials = User.ProxyCredentials;
}
using (var response = (HttpWebResponse)request.GetResponse())
{
// Get the URL of the new page.
//string location = response.GetResponseHeader("Location");
// Close the previous response.
response.Close();
// Get the HTTP web response of the new page.
/*using (FollowRedirection(location))
{
}*/
}
}
public byte[] GetData(string action)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceUrl, action));
var data = DownloadData(Uri);
return RemoveTrailingNulls(data);
}
public static byte[] RemoveTrailingNulls(byte[] data)
{
int i = data.Length - 1;
while (data[i] == 0)
{
--i;
}
var newData = new byte[i + 1];
Array.Copy(data, newData, i + 1);
return newData;
}
public void MultipartFormData(string url, Dictionary<string, object> parameters)
{
const string formDataBoundary = "-----------------------------28947758029299";
const string contentType = "multipart/form-data; boundary=" + formDataBoundary;
// Creating HTTP web request.
var request = (HttpWebRequest) WebRequest.Create(url);
request.Method = "POST";
request.Timeout = Timeout;
request.ContentType = contentType;
request.AllowAutoRedirect = false;
request.AllowWriteStreamBuffering = true;
request.CookieContainer = CookieContainer;
if (User.IsProxyEnable)
{
request.Proxy = User.Proxy;
request.Proxy.Credentials = User.ProxyCredentials;
}
// Get request stream.
using (Stream requestStream = request.GetRequestStream())
{
// Writing to request stream.
WriteMultipartFormData(requestStream, parameters, formDataBoundary);
}
using (var response = (HttpWebResponse) request.GetResponse())
{
var statusCode = (int) response.StatusCode;
if (statusCode >= 300 && statusCode < 400)
{
// Get the URL of the new page.
string location = response.GetResponseHeader("Location");
// Close the previous response.
response.Close();
// Get the HTTP web response of the new page.
using (FollowRedirection(location))
{
}
}
}
}
public void MultipartFormData3(string url, FileDetails file, string method = "POST")
{
// Creating HTTP web request.
var request = (HttpWebRequest)WebRequest.Create(url);
request.Method = method;
request.Timeout = Timeout;
request.AllowAutoRedirect = false;
request.AllowWriteStreamBuffering = true;
request.CookieContainer = CookieContainer;
//Headers
request.ContentType = file.CurrentVersion.ContentType;
request.UserAgent = "WorkshareProtect";
request.Headers.Add(HttpRequestHeader.Authorization, file.CurrentVersion.Authorization);
request.Headers.Add("x-amz-date", file.CurrentVersion.AuthDate);
request.Headers.Add("x-amz-server-side-encryption", file.CurrentVersion.Encryption);
if (User.IsProxyEnable)
{
request.Proxy = User.Proxy;
request.Proxy.Credentials = User.ProxyCredentials;
}
// Get request stream.
using (Stream requestStream = request.GetRequestStream())
{
// Writing to request stream.
WriteMultipartFormData3(requestStream, file);
}
using (var response = (HttpWebResponse) request.GetResponse())
{
var statusCode = (int) response.StatusCode;
if (statusCode >= 300 && statusCode < 400)
{
// Get the URL of the new page.
string location = response.GetResponseHeader("Location");
// Close the previous response.
response.Close();
// Get the HTTP web response of the new page.
using (FollowRedirection(location))
{
}
}
}
}
public void PutUserWDSValue(string action, string dicData)
{
Uri = new Uri(string.Format(CultureInfo.CurrentCulture, "{0}/{1}", PlatformService.ServiceDictionaryUrl, action));
var request = (HttpWebRequest)WebRequest.Create(Uri);
request.Method = "PUT";
request.Timeout = Timeout;
request.AllowAutoRedirect = false;
request.AllowWriteStreamBuffering = true;
request.CookieContainer = CookieContainer;
request.ContentType = "application/json";
if (User.IsProxyEnable)
{
request.Proxy = User.Proxy;
request.Proxy.Credentials = User.ProxyCredentials;
}
var encoding = new ASCIIEncoding();
byte[] data = encoding.GetBytes(dicData);
// Get request stream.
using (Stream requestStream = request.GetRequestStream())
{
// Writing to request stream.
WritePutUserWDSValue(requestStream, data);
}
using (var response = (HttpWebResponse) request.GetResponse())
{
var statusCode = (int) response.StatusCode;
if (statusCode >= 300 && statusCode < 400)
{
// Get the URL of the new page.
string location = response.GetResponseHeader("Location");
// Close the previous response.
response.Close();
// Get the HTTP web response of the new page.
using (FollowRedirection(location))
{
}
}
}
}
public CookieContainer CookieContainer
{
get
{
return User.SessionCookies;
}
set
{
User.SessionCookies = value;
}
}
public Uri Uri { get; set; }
public UserDetails User { get; private set; }
public int Timeout { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/WorkspaceNameConverter.cs
using System;
using System.Windows.Data;
namespace Workshare.Components.Views.Common
{
public class WorkspaceNameConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
if (value is string)
{
return string.Format("Activity for the {0} on Workshare", value);
}
return "Activity for the workspace on Workshare";
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/SharePoint/Common/VersionInfoBlock/AssemblyVersionInfoBlock.cs
using System;
using System.Reflection;
using System.Runtime.InteropServices;
// This common assembly info file is used by all assemblies to define version, etc.
// If you want to add a Assembly unique value, you must add a AssemblyInfo file
// to your project.
// When you add this file to new projects, you must 'Add existing file' and
// open it with the dropdown arrow with 'Link File'. Otherwise it will copy
// the file to the local directory and it won't get updated.
[assembly: AssemblyInformationalVersion("1.0.0.0")]
[assembly: AssemblyVersion("1.0.0.0")]
#if !THIRD_PARTY_LIBRARY
[assembly: AssemblyProduct("Workshare SharePoint Integration")]
[assembly: AssemblyCompany("Workshare")]
[assembly: AssemblyCopyright("Copyright �1998-2013 Workshare")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
#endif
[assembly: AssemblyFileVersion("0.0.0.0")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyDelaySign(false)]
[assembly: AssemblyKeyName("")]
#if !_COMVISIBLE_DEFINED
[assembly: ComVisible(false)]
#endif
#if !_CLSCOMPLIANT_DEFINED
[assembly: CLSCompliant(true)]
#endif
<file_sep>/OpenText/src/Workshare.OpenText/CommandHelper.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using Workshare.Components;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using Workshare.OpenText.Concrete;
using Workshare.OpenText.Contrete;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.OpenText
{
public static class CommandHelper
{
static readonly string SendMenuID = "WorkshareSend";
static readonly string SyncMenuID = "WorkshareSync";
public static WSOTMenuItem GetCommandName(IDocProfiles items)
{
if (Application.OTInstance.Presenter.IsAnyItemInProgress())
{
return null;
}
if (items != null && items.Count > 0)
{
bool IsSend = true;
bool IsSync = true;
foreach (IProfile prof in items)
{
prof.Fetch();
var SendInfo = (OTSyncInfo)OTSyncInfo.Parse(prof.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString());
// send/sync operate allow
if (prof.Columns["STATUS"].Value.ToString() == "0" // for any items if it not check out and not locked
|| ((prof.Columns["STATUS"].Value.ToString() != "0" && string.Compare(Application.OTInstance.CurrentUser, HistoryHelper.GetLastCheckedOutUser(prof as Hummingbird.DM.Extensions.Interop.DECore.BCObject), StringComparison.InvariantCultureIgnoreCase) == 0) // if item is checked out/locked for current user
&& string.Compare("FOLDER", prof.Columns["TYPE_ID"].Value.ToString(), StringComparison.InvariantCultureIgnoreCase) != 0) // and item is not a locked folder
)
{
if ((prof.Columns["STATUS"].Value.ToString() == "0" && string.Compare("FOLDER", prof.Columns["TYPE_ID"].Value.ToString(), StringComparison.InvariantCultureIgnoreCase) != 0) || SendInfo == null || SendInfo.DMSItemId != prof.DocNumber)
{
IsSync = false;
}
else
{
IsSend = false;
}
}
else
{
IsSync = IsSend = false;
}
if (!IsSync && !IsSend)
{
return null;
}
}
var mnu = new WSOTMenuItem() { Parent = "DOCSOPEN.DOCUMENT", InsertBefore = "DOCSOPEN.CheckOut" };
if (IsSend)
{
mnu.ID = SendMenuID;
mnu.Name = RES.STR_MENU_SEND;
}
else
if (IsSync)
{
mnu.ID = SyncMenuID;
mnu.Name = RES.STR_MENU_SYNC;
}
return mnu;
}
else
{
return null;
}
}
public static IEnumerable<IDMSItem> GetSelectedItems(IDocProfiles items)
{
var selected = new List<IDMSItem>();
foreach (IProfile prof in items)
{
if (prof.CurrentVersion == null)
{
selected.Add(new ManFolder(prof as IProject));
}
else
{
selected.Add(new OTFile(prof));
}
}
return selected;
}
public static void ExecuteMenu(IDocProfiles items, string mnuID)
{
if (!Application.OTInstance.Presenter.IsAnyItemInProgress())
{
// we should do it every time before execute menu in case of we cannot create ApplicationClass manually and cannot store it one time for all work in version 5.3.1
if (items != null && items.Count > 0 && (string.Equals(mnuID, SendMenuID, StringComparison.InvariantCultureIgnoreCase) || string.Equals(mnuID, SyncMenuID, StringComparison.InvariantCultureIgnoreCase)))
{
if (items[1].Library != null && items[1].Library.Application != null)
{
var pApp = items[1].Library.Application;
Application.OTInstance.ParentWindow = pApp.DefParentWindow;
Application.OTInstance.UserDST = pApp.DST;
Application.OTInstance.CurrentLibraryName = pApp.CurrentLibrary.Name;
Application.OTInstance.CurrentLibrary = pApp.CurrentLibrary;
Application.OTInstance.CurrentUser = pApp.CurrentLibrary.UserName;
}
if (string.Equals(mnuID, SendMenuID, StringComparison.InvariantCultureIgnoreCase))
{
Application.OTInstance.View.OnSendItemsClicked(new SendItemsClickedArgs(GetSelectedItems(items)));
}
if (string.Equals(mnuID, SyncMenuID, StringComparison.InvariantCultureIgnoreCase))
{
Application.OTInstance.View.OnSynchItemsClicked(new SyncItemsClickedArgs(GetSelectedItems(items).Select(p => new SyncItemInformation(p, ConflictOptions.None, ConflictVersionOptions.None))));
}
}
}
}
}
public class WSOTMenuItem
{
public string Name { get; set; }
public string ID { get; set; }
public string InsertBefore { get; set; }
public string Parent { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/UserControls/ActivityToViewOnlineVisibilityConverter.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Data;
using Workshare.Components.Views.TrueSyncDialog.VMs;
namespace Workshare.Components.Views.TrueSyncDialog.UserControls
{
class ActivityToViewOnlineVisibilityConverter:IValueConverter
{
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
var activity = value as IFileActivityBaseVm;
if (activity != null)
{
return string.IsNullOrEmpty(activity.ViewOnlineUrl) ? Visibility.Collapsed : Visibility.Visible;
}
return Visibility.Visible;
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Views/ModuleView.cs
using System;
using System.Runtime.InteropServices;
using System.Windows.Interop;
using Workshare.Components.Presenter;
using Workshare.Components.Views;
using Workshare.Components.Views.ConflictResolver;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using Workshare.OpenText.Presenter;
namespace Workshare.OpenText.Views
{
public class ModuleView: ModuleViewBase
{
public ModuleView(IWSIntegration integration)
: base(integration)
{
Application.OTInstance.ParentChanged += OTInstance_ParentChanged;
}
void OTInstance_ParentChanged(object sender, EventArgs e)
{
if (progressWindow != null)
{
progressWindow.Dispatcher.BeginInvoke(new Action(() =>
{
if (progressWindow.IsLoaded)
{
new WindowInteropHelper(progressWindow).Owner = MainWindowHandle;
}
}));
}
}
[DllImport("user32.dll", ExactSpelling = true, CharSet = CharSet.Auto)]
public static extern IntPtr GetParent(IntPtr hWnd);
public override ModulePresenterBase CreatePresenter(IWSIntegration integration)
{
return new ModulePresenter(this);
}
public override IntPtr ActivieWindowHandle
{
get
{
if (Application.OTInstance.ParentWindow != 0)
{
var fromOT = new IntPtr(Application.OTInstance.ParentWindow);
var parent = GetParent(fromOT);
_MainWindowHandle = parent != IntPtr.Zero ? parent : fromOT;
}
return _MainWindowHandle;
}
set
{
_MainWindowHandle = value;
}
}
internal ConflictOptions ShowConflictDialog(ConflictItemData wsFile, ConflictItemData localFile)
{
Logger.Write("ModuleView::ShowConflictDialog() START", Severity.Information);
return (ConflictOptions)dispatcher.Invoke(new Func<ConflictOptions>(() =>
{
var dlg = new ConflictResolverWindow(wsFile, localFile, Properties.Resources.STR_CONFLICT_DLG_OPENTEXT_TITLE);
SetMainAsParent(dlg);
dlg.DisableOwner = true;
dlg.ShowDialog();
return dlg.Result;
}));
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/Visitors/GetCeaseCollaborationActionsVisitor.cs
using System.Collections.Generic;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor
{
public class GetCeaseCollaborationActionsVisitor : ItemMapVisitor
{
private CeaseCollaborationImportType _importAction;
private CeaseCollaborationType _fileAction;
public void Initialize(CeaseCollaborationImportType importAction, CeaseCollaborationType fileAction)
{
Result = new List<ItemMapActivityAction>();
_importAction = importAction;
_fileAction = fileAction;
}
public override void Visit(FileMap fileMap)
{
Result.Add(new CeaseCollaborationAction(fileMap.Id, _fileAction, _importAction));
}
public override void Visit(FolderMap folderMap)
{
Result.Add(new CeaseCollaborationAction(folderMap.Id, _fileAction, _importAction));
}
public List<ItemMapActivityAction> Result { get; private set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CheckOutDialog/CheckOutDilalogModel.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using Workshare.Components.Concrete;
using Workshare.Components.Views.Common;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using System.Reflection;
namespace Workshare.Components.Views.CheckOutDialog
{
public class CheckOutDilalogModel : OwnViewModel
{
public CheckOutDilalogModel()
{
this.FileName = "a.doc";
CheckOutCommand = new RelayCommand((p) =>
{
Result = CheckOutOptions.CheckOut;
RaiseClose(true);
});
DontCheckOutCommand = new RelayCommand((p) =>
{
Result = CheckOutOptions.DontCheckOut;
RaiseClose(true);
});
CancelCommand = new RelayCommand((p) =>
{
Result = CheckOutOptions.None;
RaiseClose(false);
});
}
public RelayCommand CheckOutCommand { get; set; }
public RelayCommand DontCheckOutCommand { get; set; }
public RelayCommand CancelCommand { get; set; }
public IDMSFile File { get; set; }
public string FileName
{
set
{
if (m_Filename != value)
{
m_Filename = value;
this.PropertyHasChanged("FileName");
}
}
get
{
return m_Filename;
}
}
public string DialogTitle { get { return m_Filename; } }
private string m_Filename = "";
internal CheckOutOptions Result { get; set; }
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/SendToWSCommand.cs
using System;
using System.Linq;
using System.Runtime.InteropServices;
using Com.Interwoven.Worksite.iManExt;
using Workshare.Components;
using Workshare.Components.Helpers;
using Workshare.Components.Properties;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
namespace Workshare.IManage
{
public class SendToWSCommand : Command
{
public override string MenuText
{
get
{
return "Send to Workshare";
}
set
{
base.MenuText = value;
}
}
public override object Bitmap
{
get
{
return Resources.workshare_sync_to.GetHbitmap();
}
set { }
}
public override string HelpText
{
get { return MenuText + "\nSend item"; }
set { }
}
public override void ExecuteAction()
{
var contextItems = GetContextItems();
if (!contextItems.Any())
{
return;
}
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
Application.iManInstance.ClientType = GetClientType();
var notLinkedItemsCount = contextItems.Count(p => syncService.GetSyncInfo(p, false) == null);
var allItemsAreNotLinked = notLinkedItemsCount == contextItems.Count;
var someItemsNotLinked = notLinkedItemsCount > 0;
var someCheckedOutToOther = contextItems.Any(a => a.IsCheckedOutToOtherUser());
var hasPermissions = contextItems.All(p => p.DoesUserHavePermissions(Permissions.EditItem));
var anyTaskAlreadyInProgress = Application.iManInstance.Presenter.IsAnyItemInProgress();
var canSend = !someCheckedOutToOther
&& allItemsAreNotLinked
&& hasPermissions
&& !anyTaskAlreadyInProgress;
if (canSend)
{
Application.iManInstance.View.OnSendItemsClicked(new SendItemsClickedArgs(contextItems));
}
else if (!someItemsNotLinked)
{
if (contextItems.HasOnlyOneFile())
{
ShowError(
new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send,
Resources.STR_ERROR_TEXT_File_is_sent_to_Workshare), contextItems.FirstOrDefault());
}
else if(contextItems.HasOnlyOneFolder())
{
ShowError(
new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send,
Resources.STR_ERROR_TEXT_Folder_is_sent_to_Workshare), contextItems.FirstOrDefault());
}
else
{
ShowError(
new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send,
Resources.STR_ExecuteAction_All_items_are_sent_to_Workshare), contextItems.FirstOrDefault());
}
}
else if (!allItemsAreNotLinked)
{
ShowError(new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send, Resources.STR_ExecuteAction_Some_items_are_already_sent_to_Workshare), contextItems.FirstOrDefault());
}
else if (anyTaskAlreadyInProgress)
{
ShowError(new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send, Resources.STR_ExecuteAction_Some_another_task_is_already_in_progress), contextItems.FirstOrDefault());
}
else if (someCheckedOutToOther)
{
ShowErrors(new BaseException(Resources.SendToWSCommand_ExecuteAction_Unable_to_send, Resources.STR_ExecuteAction_File_is_checked_out_to_another_users), contextItems.Where(i => i.IsCheckedOutToOtherUser()).ToList());
}
else
{
ShowError(new DMSUnAuthorizedException(),contextItems.FirstOrDefault());
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("AFDBB083-A23B-43F1-9005-0E4080668928")]
[ComVisible(true)]
public class DocumentSendToWSCommand : SendToWSCommand
{
public override string HelpText
{
get
{
return "Send document to Workshare\nSend to Workshare";
}
set
{
base.HelpText = value;
}
}
public override int Status
{
get
{
//Context.OfType<object>().ToList().ForEach(p => System.Diagnostics.Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
bool IsAnyDocumentSelected = false;
try
{
var items = GetContextItems();
if (items.Count == 1 && (items[0] is Workshare.Integration.Interfaces.IDMSFolder))
{
var fld = items[0] as Workshare.Integration.Interfaces.IDMSFolder;
IsAnyDocumentSelected = fld.Files.Count() > 0;
}
else
{
IsAnyDocumentSelected = items.OfType<Workshare.Integration.Interfaces.IDMSFile>().Any();
}
}
catch
{ }
return (int)((IsAnyDocumentSelected && !Application.iManInstance.Presenter.IsAnyItemInProgress() && IsAnyServerAvailable()) ? CommandStatus.nrActiveCommand : CommandStatus.nrGrayedCommand);
}
set
{
}
}
}
[ClassInterface(ClassInterfaceType.None)]
[Guid("C990BEBD-D0F4-4363-B869-6EA4E129462D")]
[ComVisible(true)]
public class FolderSendToWSCommand : SendToWSCommand
{
public override string HelpText
{
get
{
return "Send folder to Workshare\nSend to Workshare";
}
set
{
base.HelpText = value;
}
}
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/Exceptions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web.Script.Serialization;
using Microsoft.SharePoint;
namespace WorkshareCloud.Common
{
public class ItemException
{
public string Name
{
get
{
return Item != null ? SPItem.Name : "";
}
}
public string Url
{
get
{
return Item != null ? SPItem.Url : "";
}
}
public int ID
{
get
{
return Item != null ? SPItem.ID : -1;
}
}
public Guid ParentListID
{
get
{
return Item != null ? SPItem.ParentList.ID : Guid.Empty;
}
}
public object Item
{
set;
get;
}
public Exception Error
{
set;
get;
}
public SPListItem SPItem
{
get
{
return Item as SPListItem;
}
}
}
public class ItemsErrorsCollection : IDisposable
{
List<ItemException> m_errors = new List<ItemException>();
public void Add(object item, Exception ex)
{
m_errors.Add(new ItemException()
{
Item = item,
Error = ex
});
}
void Add(ItemException ex)
{
m_errors.Add(ex);
}
public void Clear()
{
m_errors.Clear();
}
public void AddRange(ItemsErrorsCollection errors)
{
errors.AsQueryble().ToList().ForEach(p => this.Add(p));
}
public IQueryable<ItemException> AsQueryble()
{
return m_errors.AsQueryable();
}
public int Count
{
get
{
return m_errors.Count;
}
}
public void Check()
{
if (this.Count > 0)
{
throw new ListProcessException(this);
}
}
public void Dispose()
{
Check();
}
}
interface IData
{
string ToJSON();
int Code { get; }
}
public class ListProcessException : BaseException
{
const int FOLDER_NOT_FOUND_ID = -1;
public ItemsErrorsCollection errorList;
public ListProcessException(ItemsErrorsCollection errList)
: base(Properties.Resources.STR_ERROR_HAPPEN, "")
{
errorList = errList;
}
public override string ToJSON()
{
var res = (errorList ?? new ItemsErrorsCollection()).AsQueryble()
//.Where(i => i.SPItem != null)
.Select(p =>
new
{
name = p.Name,
url = p.Url,
error = p.Error.Message,
caption = (p.Error is BaseException)?((BaseException)p.Error).Caption:"",
code = (p.Error is IData) ? ((IData)p.Error).Code : 0,
itemId = p.ID,
listGuid = "{" + p.ParentListID + "}",
folderId = (p.Error is CloudItemNotFound) ? ((((CloudItemNotFound)p.Error).IsParentFolderExists) ? ((CloudItemNotFound)p.Error).DestinationFolderID : FOLDER_NOT_FOUND_ID) : FOLDER_NOT_FOUND_ID,
data = (p.Error is IData) ? ((IData)p.Error).ToJSON() : ""
}
);
return new JavaScriptSerializer().Serialize(res);
}
}
public class BaseException : Exception, IData
{
public BaseException()
: base()
{ }
public BaseException(string caption, string msg)
: base(msg)
{
Caption = caption;
}
public BaseException(string caption, string msg, Exception ex)
: base(msg, ex)
{
Caption = caption;
}
public string Caption { get; private set; }
public virtual string ToJSON()
{
return "";
}
public virtual int Code { get { return 0; } }
}
public class CloudFieldNotFound : BaseException
{
public CloudFieldNotFound()
: base(Properties.Resources.STR_ERROR, Properties.Resources.STR_VIEWNOTINITIALIZED_TEXT)
{
}
public CloudFieldNotFound(string message)
: base(Properties.Resources.STR_ERROR, message)
{
}
}
public class OfflineException : BaseException
{
public OfflineException(string message, Exception inner)
: base(Properties.Resources.STR_CONNECTION_ERROR_CAPTION, message, inner)
{
}
}
public class FileConflictException : BaseException
{
public DateTime fileSP;
public DateTime fileCloud;
public string filenameSP, filenameCloud;
public int fileid;
public Guid listguid;
string userCloud, userSP;
public FileConflictException(string f_name_sp, string f_name_cloud, int f_id, Guid listID, DateTime fileOnSP, DateTime fileOnCloud, string user_sp, string user_cloud)
: base(Properties.Resources.STR_FILECONFLICT_CAPTION, Properties.Resources.STR_FILECONFLICT_TEXT)
{
filenameSP = f_name_sp;
filenameCloud = f_name_cloud;
listguid = listID;
userCloud = user_cloud;
userSP = user_sp;
fileid = f_id;
fileSP = fileOnSP;
fileCloud = fileOnCloud;
}
public override string ToJSON()
{
return new JavaScriptSerializer().Serialize(new
{
spModified=fileSP.ToString(),
wsModified=fileCloud.ToString(),
spName = filenameSP,
wsName = filenameCloud,
id=fileid,
listGUID=listguid.ToString(),
spUser=userSP,
wsUser=userCloud
});
}
public override int Code
{
get { return 503; }
}
}
public class BadRequest : BaseException
{
public BadRequest(string inputRequest)
: base(Properties.Resources.STR_ERROR, string.Format("BadRequest. Additional Data:\n{0}", inputRequest))
{
}
}
public class ItemNotFound : BaseException
{
public ItemNotFound()
: base(Properties.Resources.STR_LOCALFILENOTFOUND_CAPTION, Properties.Resources.STR_LOCALFILENOTFOUND_TEXT)
{
}
public ItemNotFound(Exception inner)
: base(Properties.Resources.STR_LOCALFILENOTFOUND_CAPTION, Properties.Resources.STR_LOCALFILENOTFOUND_TEXT, inner)
{
}
public override string ToJSON()
{
return new JavaScriptSerializer().Serialize(new
{
Description = Properties.Resources.STR_LOCALFILENOTFOUND_TEXT
});
}
public override int Code
{
get { return 310; }
}
}
public class CloudItemNotFound : BaseException
{
public bool IsParentFolderExists;
public int DestinationFolderID;
public CloudItemNotFound(string msg, int destFolderId, bool folderExists)
: base(Properties.Resources.STR_UNABLE_SYNC, msg)
{
DestinationFolderID = destFolderId;
IsParentFolderExists = folderExists;
}
public override string ToJSON()
{
return new JavaScriptSerializer().Serialize(new
{
Message
});
}
public override int Code
{
get { return 406; }
}
}
public class CloudFileNotFound : CloudItemNotFound
{
public CloudFileNotFound(int destFolderId, bool folderExists)
: base(Properties.Resources.STR_CLOUDFILENOTFOUND, destFolderId, folderExists)
{ }
}
public class CloudFolderNotFound : CloudItemNotFound
{
public CloudFolderNotFound(int destFolderId, bool folderExists)
: base(Properties.Resources.STR_CLOUDFOLDERNOTFOUND, destFolderId, folderExists)
{ }
}
public class CloudFolderAccessDenied : BaseException
{
public CloudFolderAccessDenied(string Caption, string Text)
: base(Caption, Text)
{ }
public CloudFolderAccessDenied(string Caption, string Text, Exception ex)
: base(Caption, Text, ex)
{ }
}
public class VersionDoesNotExist : BaseException
{ }
public class FileCheckoutedToAnotherUser : BaseException
{
public FileCheckoutedToAnotherUser()
: base(Properties.Resources.STR_UNABLE_SEND, Properties.Resources.STR_FILEISCHECKEDOUTBYANOTHERUSER_TEXT)
{ }
}
public class AlreadySentException : BaseException
{
public AlreadySentException()
: base(Properties.Resources.STR_FILE_ALREADY_SENT, "")
{ }
}
public class CloudUnAuthorized : BaseException
{
public CloudUnAuthorized()
: base(Properties.Resources.STR_WRONGAUTH_CAPTION, Properties.Resources.STR_WRONGAUTH_TEXT)
{ }
public CloudUnAuthorized(Exception ex)
: base(Properties.Resources.STR_WRONGAUTH_CAPTION, Properties.Resources.STR_WRONGAUTH_TEXT, ex)
{ }
}
}
<file_sep>/WSComponents/src/WSIntegration/WsIntegrationModule.cs
using Microsoft.Practices.Unity;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.DmsItems.Visitors;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Maps.Visitors;
using Workshare.Integration.SyncInfo;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration
{
public class WsIntegrationModule : ModuleBase
{
protected override void Initialize()
{
base.Initialize();
this.Container.RegisterType<WsProxy>();
this.Container.RegisterType<Processor.Processor>();
this.Container.RegisterType<ActivityFinder>();
this.Container.RegisterType<ScanVisitor>();
this.Container.RegisterType<ProcessVisitor>();
this.Container.RegisterType<MapController>();
this.Container.RegisterType<MapAnylizer>();
this.Container.RegisterType<TreePathFinder>();
this.Container.RegisterType<Processor.Changes.Visitors.ChangesDetector>();
this.Container.RegisterType<ISyncInfoFactory, Factory>();
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/OTItemID.cs
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using System;
using Workshare.Integration.Interfaces;
namespace Workshare.OpenText.Contrete
{
class ManItemID : IDMSItemID
{
int obj_ID;
public ManItemID(IProfile doc)
{
if (doc!=null)
{
obj_ID = doc.DocNumber;
}
else
{
throw new ArgumentException("doc");
}
}
public ManItemID(IProject fld)
{
if (fld != null)
{
obj_ID = fld.ProjectID;
}
else
{
throw new ArgumentException("folder");
}
}
public bool EqualTo(IDMSItemID obj)
{
if (obj is ManItemID)
{
return ((ManItemID)obj).obj_ID == this.obj_ID;
}
return false;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/VMs/FileResultsVm.cs
using System.Collections.ObjectModel;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs
{
public class ParentScan
{
public SummaryProcessResult Scan { set; get; }
public ObservableCollection<FileUploadActivityVm> Activities { set; get; }
}
}
<file_sep>/WSComponents/src/WSIntegration/SyncInfo/FolderSyncInfo.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web.Script.Serialization;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.SyncInfo
{
class SendDataInfo : ISendDataInfo
{
List<int> _items = new List<int>();
public string DataType
{
get { return "FolderSendDataInfo"; }
set { }
}
public static ISendDataInfo Parse(string value)
{
try
{
if (string.IsNullOrEmpty(value)) return Create();
var ser = new JavaScriptSerializer();
return ser.Deserialize<SendDataInfo>(value);
}
catch (Exception ex)
{
Logger.WriteError(ex);
return null;
}
}
public override string ToString()
{
var ser = new JavaScriptSerializer();
return ser.Serialize(this);
}
public static ISendDataInfo Create()
{
return new SendDataInfo();
}
public string FolderID
{
get;
set;
}
public List<int> Items
{
get
{
return _items;
}
set
{
_items = value;
}
}
}
class FolderSyncInfo:ISyncInfo
{
public FolderSyncInfo()
{
}
public string DataType
{
get { return "FolderSyncInfo"; }
set { }
}
public int ParentId
{
get;
set;
}
public int LastUploadedDmsVerNum
{
get;
set;
}
public int LastImportedWsVerId
{
get;
set;
}
public int ItemId
{
get;
set;
}
public string DMSItemId
{
get;
set;
}
public string Modified
{
get;
set;
}
public string ActivityId
{
get;
set;
}
public List<IVersionSyncInfo> vInfos
{
get { return new List<IVersionSyncInfo>(); }
set { }
}
public List<IVersionSyncInfo> GetvInfos()
{
return new List<IVersionSyncInfo>();
}
public void AddVInfo(IVersionSyncInfo info)
{
}
public override string ToString()
{
var ser = new JavaScriptSerializer();
return ser.Serialize(this);
}
public static FolderSyncInfo Parse(string value)
{
try
{
if (string.IsNullOrEmpty(value)) return null;
var ser = new JavaScriptSerializer();
return ser.Deserialize<FolderSyncInfo>(value);
}
catch (Exception ex)
{
Logger.WriteError(ex);
return null;
}
}
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/Extensions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
using WorksharePlatform;
using System.Xml.Linq;
namespace WorkshareCloud.Common
{
public static class Extensions
{
public static bool GuidTryParse(string stringGuid, out Guid trueGuid)
{
try
{
trueGuid = new Guid(stringGuid);
return true;
}
catch (Exception)
{
trueGuid = Guid.Empty; return false;
}
}
}
public static class AuditExtensions
{
const string SOURCE_NAME = "WorkshareCloud";
static string getAuditString(string _event, string _message)
{
return string.Format("Date: {0} || Event: {1} || Message: {2}", DateTime.Now.ToString("MMM/d/yyyy"), _event, _message);
}
static string getSendData(FolderDetails folder, FolderMembers members, string filename, string fileurl)
{
string mails = "";
members.Members.Select(x => x.Email + "; ").ToList().ForEach(x => mails += x);
return getAuditString("Sending", string.Format("Folder: ID - {0}, Name - {1}, Members - {2}, FileName - {3}, File URL - {4}", folder.Id.ToString(), folder.Name, mails, filename, fileurl));
}
static string getUpdateData(SPList list)
{
return getAuditString("Updating",string.Format("List title - {0}", list.Title));
}
static string getSyncData(SPListItem item, FileDetails file)
{
return getAuditString("Synchronization", string.Format("SharePoint file URL - {0}, Workshare file ID - {1}, Workshare file name - {2}, Workshare folder ID - {3}", item.Url, file.Id.ToString(), file.Name, file.FolderId.ToString()));
}
static string getSyncedData(SPListItem item, FileDetails file)
{
return getAuditString("File was added to SharePoint", string.Format(" Sharepoint file URL - {0}, Workshare file ID - {1}, Workshare file name - {2}, Workshare folder ID - {3}", item.Url, file.Id.ToString(), file.Name, file.FolderId.ToString()));
}
static string getUnSyncData(SPListItem item, string reason, FileDetails file)
{
return getAuditString("Stop Synchronization", string.Format("SharePoint file URL - {0}, file ID - {1}, Reason: {2}, User who deleted - {3}", item.Url, file.Id.ToString(), reason, file.Updater.Email));
}
static string getUnSyncData(SPListItem item, string reason)
{
return getAuditString("Stop Synchronization", string.Format("SharePoint file URL - {0}, Reason: {1}, ", item.Url, reason));
}
public static void WriteItemSent(this SPAudit audit, ICollection<SPListItem> items, FolderDetails folder, FolderMembers members)
{
try
{
if (audit != null)
{
foreach (var item in items)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getSendData(folder, members, item.Name, item.Url));
}
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemSent(this SPAudit audit, SPListItem item, FolderDetails folder, FolderMembers members)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getSendData(folder, members, item.Name, item.Url));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemSync(this SPAudit audit, SPListItem item, FileDetails file)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getSyncData(item, file));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemSynced(this SPAudit audit, SPListItem item, FileDetails file)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getSyncedData(item, file));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemUpdate(this SPAudit audit, SPList list)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getUpdateData(list));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemStopSync(this SPAudit audit, SPListItem item, string reason, FileDetails file)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getUnSyncData(item, reason, file));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
public static void WriteItemStopSync(this SPAudit audit, SPListItem item, string reason)
{
try
{
if (audit != null)
{
audit.WriteAuditEvent(SPAuditEventType.Custom, SOURCE_NAME, getUnSyncData(item, reason));
}
}
catch (Exception ex)
{ Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.High, Logging.Category.CloudService); }
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Application.cs
using System;
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using Hummingbird.DM.Server.Interop.PCDClient;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Interfaces;
using Workshare.Integration.Interfaces;
using Workshare.OpenText.Integration;
using Workshare.OpenText.Views;
namespace Workshare.OpenText
{
class Application : WSApplication
{
public override string APP_CODE { get { return "Workshare-OpenText"; } }
public readonly string DATASTORE_FIELD = "WS_DATAINFO";
public readonly string SENDINFO_FIELD = "WS_SENDINFO";
//public static IApplication _app;
Application():base()
{
WorksharePlatform.PlatformService.Host = WorksharePlatform.PlatformService.MY_SERVER;
WorksharePlatform.PlatformService.UseHostFromRegistry = true;
}
public static Application OTInstance
{
get
{
if (Instance == null)
{
Instance = new Application();
}
return Instance as Application;
}
}
public string UserDST
{
get;
set;
}
public string CurrentUser
{
get;
set;
}
public string CurrentLibraryName
{
get;
set;
}
ILibrary _currentLib = null;
public ILibrary CurrentLibrary
{
get
{
return _currentLib;
}
set
{
_currentLib = value;
}
}
public string GetItemName(string itemName)
{
int ver = 1;
string newName = itemName;
PCDSearch s = new PCDSearch();
s.SetDST(UserDST);
s.SetSearchObject("def_qbe");
s.AddSearchLib(CurrentLibraryName);
s.AddReturnProperty("docname");
s.AddReturnProperty("docnum");
int res = s.AddSearchCriteria("DOCNAME", newName);
res = s.Execute();
while (s.GetRowsFound() > 0)
{
s.ReleaseResults();
newName = itemName+"_"+ver.ToString();
ver++;
s.AddSearchCriteria("DOCNAME", newName);
s.Execute();
}
return newName;
}
private string GetCompatibleFileExtension(string fext)
{
string res = fext.ToUpper().Replace(".", "");
if (res == "DOCX" || res == "XLSX" || res == "PPTX" || res == "VSDX" || res =="MPPX")
{
res = res.Substring(0, 3);
}
return res;
}
private readonly string[] imageFormats = { "BMP", "ICO", "JPG", "JPEG", "GIF", "TIFF", "PNG", "TIF", "WMF", "EMF" };
public string GetDocClassByFileExtension(string fext)
{
string ext = GetCompatibleFileExtension(fext);
/*if (imageFormats.Contains(ext))
{
return "IMG";
}
else
{
return "DOC";
}*/
return "DEFAULT";
}
public string GetAppIDByFileExtension(string fext, ILibrary lib)
{
string ext = GetCompatibleFileExtension(fext);
IApplications apps = lib.AppInformation;
for (int i = 1; i<= apps.Count; i++)
{
IAppInfo info = apps[i];
if (string.Compare(info.DefaultExtension, ext, StringComparison.InvariantCultureIgnoreCase) == 0)
{
return info.ID;
}
}
return "";
}
int _ParentWindow = 0;
public event EventHandler ParentChanged;
public int ParentWindow
{
get { return _ParentWindow; }
set
{
if(_ParentWindow!=value)
{
_ParentWindow = value;
if (ParentChanged != null)
{
ParentChanged(null, EventArgs.Empty);
}
}
}
}
private IModuleView _view;
public override IModuleView View
{
get
{
if (_view == null)
{
_view = new ModuleView(Application.OTInstance.Integration);
}
return _view;
}
set { _view = value; }
}
public override IWSIntegration Integration
{
get
{
if (_integration == null)
{
_integration = new WorkshareIntegration(this.AuthProvider);
}
return _integration;
}
}
public override Workshare.Integration.Modularity.ModuleBase Module
{
get { throw new NotImplementedException(); }
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.Integration/WorkshareIntegration.cs
//#define SLEEP
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using Workshare.Components;
using Workshare.Components.Exceptions;
using Workshare.Components.Extensions;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Integration
{
public class WorkshareIntegration : WSIntegrationBase
{
#region public
public event EventHandler<ItemAddedFromWSArgs> ItemAddedFromWS;
public event EventHandler<ItemSentArgs> ItemSentToWS;
public event EventHandler<ItemUpdatedLocallyArgs> ItemUpdatedLocally;
public event EventHandler<ItemUpdatedOnWSArgs> ItemUpdatedOnWS;
public WorkshareIntegration(IAuthProvider authProvider)
{
ServicePointManager.DefaultConnectionLimit = int.MaxValue;//TODO remove this line. fix problem with multiple files uploading
m_authProvider = authProvider;
}
public override void SyncItems(IEnumerable<SyncItemInformation> localItems, OperationContext context)
{
if (localItems.Any())
{
using (var errorlist = new ItemsErrorsCollection(context))
{
var user = m_authProvider.CheckIfAuth();
foreach (SyncItemInformation item in localItems)
{
SafeFuncCall(errorlist, item.item, new ExceptionListSignal() { IsAll = true, IsList = true }, Operations.Sync, () =>
{
if (item.item is IDMSFolder)
{
SyncFolder((item.item as IDMSFolder), user, context);
}
else if (item.item is IDMSFile)
{
SyncFile((item.item as IDMSFile), item.syncOption, user, context);
}
});
}
}
}
}
public override void SendItems(IEnumerable<IDMSItem> localItems, FolderDetails folder, OperationContext args)
{
#if SLEEP
Thread.Sleep(5000);
#endif
SendItems(localItems, folder.Id, args);
}
public override void SendItems(IEnumerable<IDMSItem> localItems, int folderId, OperationContext context)
{
#if SLEEP
Thread.Sleep(5000);
#endif
using (var errorlist = new ItemsErrorsCollection(context))
{
try
{
var user = m_authProvider.CheckIfAuth();
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsAll = true, IsList = true, FolderError = true }, Operations.Send, () =>
{
if (!CheckFolderExistsWhenSendOrThrow(user, folderId, -1))
{
throw new CloudFolderNotFound(RES.STR_UNABLE_SEND);
}
if (localItems.Any())
{
foreach (IDMSItem item in localItems)
{
SafeFuncCall(errorlist, item, new ExceptionListSignal() { IsAll = true, IsList = true, FolderError = true }, Operations.Send, () =>
{
if (item is IDMSFolder)
{
SendFolder((IDMSFolder)item, folderId, user, context, null);
}
else if (item is IDMSFile)
{
SendFile((IDMSFile)item, folderId, user, context);
}
});
}
errorlist.Check();
}
});
}
catch (CloudFolderNotFound ex)
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsAll = true, IsList = true }, Operations.Send, () =>
{
if (localItems.Any())
{
var user = m_authProvider.CheckIfAuth();
foreach (IDMSItem item in localItems)
{
SafeFuncCall(errorlist, item, new ExceptionListSignal() { IsAll = true, IsList = true }, Operations.Send, () =>
{
if (item is IDMSFolder)
{
SyncFolder((IDMSFolder)item, user, context, true);
}
else if (item is IDMSFile)
{
((IDMSFile)item).UpdateSyncInfo(null, true);
}
});
}
}
});
errorlist.Clear();
errorlist.Add(null, ex);
errorlist.Check();
}
}
}
#endregion
#region privates
void RaiseItemSent(ItemSentArgs args)
{
if (ItemSentToWS != null)
{
ItemSentToWS(this, args);
}
}
void RaiseItemUpdatedLocally(ItemUpdatedLocallyArgs args)
{
if (ItemUpdatedLocally != null)
{
ItemUpdatedLocally(this, args);
}
}
void RaiseItemUpdatedOnWS(ItemUpdatedOnWSArgs args)
{
if (ItemUpdatedOnWS != null)
{
ItemUpdatedOnWS(this, args);
}
}
void RaiseItemAddedFromWS(ItemAddedFromWSArgs args)
{
if (ItemAddedFromWS != null)
{
ItemAddedFromWS(this, args);
}
}
#region Sync
private void SyncFilesInFolder(IDMSFolder folder, List<FileDetails> filesOnCloud, UserDetails user, OperationContext context)
{
using (var errorlist = new ItemsErrorsCollection(context))
{
// sync existing files
List<int> localFiles = new List<int>();
foreach (IDMSFile item in folder.Files)
{
var synhInfo = item.GetSyncInfo();
if (synhInfo != null)
{
localFiles.Add(synhInfo.ItemId);
}
SafeFuncCall(errorlist, item, new ExceptionListSignal() { IsAll = true, IsWeb = true }, Operations.Sync, () =>
{
SyncFile(item, ConflictOptions.None, user, context);
});
}
List<int> addedFiles = new List<int>();
var processedFiles = folder.GetSendDataInfo();
foreach (var fileID in filesOnCloud.Select(x => x.Id).Except(localFiles))
{
if (!processedFiles.Contains(fileID))
{
SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsAll = true, IsWeb = true, FileError = true}, Operations.Sync, () =>
{
FileDetails cloudFile = filesOnCloud.Where(x => x.Id == fileID).First();
PlatformService.GetVersionDetails(user, cloudFile);
cloudFile.Data = PlatformService.DownloadFile(user, fileID);
IDMSFile newFile = folder.AddFile(cloudFile);
newFile.Description = cloudFile.FriendlyName;
newFile.OnAfterSync(context);
RaiseItemAddedFromWS(new ItemAddedFromWSArgs(newFile));
addedFiles.Add(cloudFile.Id);
});
}
}
folder.UpdateSendDataInfo(addedFiles);
}
}
private void SyncSubFolderInFolder(IDMSFolder folder, IEnumerable<FolderDetails> foldersOnCloud, UserDetails user, OperationContext context)
{
using (var errorlist = new ItemsErrorsCollection(context))
{
// sync existing folders
Dictionary<int, string> folderOnSP = new Dictionary<int, string>();
foreach (IDMSFolder subFolder in folder.SubFolders)
{
SafeFuncCall(errorlist, subFolder, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true }, Operations.Sync, () =>
{
SyncFolder(subFolder, user, context);
});
var synhInfo = subFolder.GetSyncInfo();
if (synhInfo != null)
{
folderOnSP.Add(synhInfo.ItemId, subFolder.Name);
}
}
// add new folders from cloud to SP and sync it
List<int> addedFolders = new List<int>();
var processedItems = folder.GetSendDataInfo();
var newFolders = foldersOnCloud.Select(f => f.Id).Except(folderOnSP.Select(x => x.Key));
foreach (var folderID in newFolders)
{
if (!processedItems.Contains(folderID))
{
SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true }, Operations.Sync, () =>
{
var cloudFolder = foldersOnCloud.Where(x => x.Id == folderID).First();
var localsubFolder = folder.AddSubFolder(cloudFolder);
RaiseItemAddedFromWS(new ItemAddedFromWSArgs(folder));
addedFolders.Add(cloudFolder.Id);
SyncFolder(localsubFolder, user, context);
});
}
}
folder.UpdateSendDataInfo(addedFolders);
}
}
private void SyncFolder(IDMSFolder folder, UserDetails user, OperationContext context, bool ClearFieldData = false)
{
using (var errorlist = new ItemsErrorsCollection(context))
{
SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsWeb = true }, Operations.Sync, () =>
{
var synhInfo = folder.GetSyncInfo();
if (synhInfo != null)
{
if (!ClearFieldData && CheckFolderExistsWhenSyncOrThrow(user, synhInfo.ItemId, synhInfo.ParentFolderId))
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsList = true }, Operations.Sync, () =>
{
SyncFilesInFolder(folder, PlatformService.GetFiles(user, synhInfo.ItemId), user, context);
});
SafeFuncCall(errorlist, null, new ExceptionListSignal() { IsList = true }, Operations.Sync, () =>
{
SyncSubFolderInFolder(folder, PlatformService.GetChildFolders(user, synhInfo.ItemId), user, context);
});
folder.OnAfterSync(context);
}
else
{// folder was removed from cloud so we should clear Cloud field for all subitems and clear cloud field for this folder
// clear cloud field for files in the folder
foreach (IDMSFile file in folder.Files)
{
SafeFuncCall(errorlist, file, new ExceptionListSignal() { IsAll = true }, Operations.Sync, () =>
{
var sInfo = file.GetSyncInfo();
if (sInfo != null && sInfo.ParentFolderId == synhInfo.ItemId)
{
file.CheckNotCheckeOutToOtherUserOrThrow();
file.UpdateSyncInfo(null, true);
RaiseItemUpdatedLocally(new ItemUpdatedLocallyArgs(file));
}
});
// file.Item.Web.Audit.WriteItemStopSync(file.Item, MISSEDONCLOUD);
}
//clear cloud field for all subfolders
foreach (IDMSFolder item in folder.SubFolders)
{
SafeFuncCall(errorlist, item, new ExceptionListSignal() { IsAll = true, IsList = true }, Operations.Sync, () =>
{
SyncFolder(item, user, context, true);
});
}
if (errorlist.Count == 0)
{
SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsAll = true }, Operations.Sync, () =>
{
folder.UpdateSyncInfo(null, true);
RaiseItemUpdatedLocally(new ItemUpdatedLocallyArgs(folder));
});
}
if (!ClearFieldData)
{
throw new CloudFolderNotFound(RES.STR_UNABLE_SYNC);
}
}
}
});
}
}
private void SyncFile(IDMSFile localFile, ConflictOptions SyncOption, UserDetails user, OperationContext context)
{
localFile.CheckPermissionkOrThrow(Workshare.Integration.Enums.Permissions.EditItem);
var syncInfo = localFile.GetSyncInfo();
if (syncInfo != null)//already synhed
{
localFile.CheckNotCheckeOutToOtherUserOrThrow();
try
{
var wsFile = GetFileDetailsFromWorkshare(user, syncInfo.ItemId, syncInfo.ParentFolderId);
// if file is null so it was deleted from cloud and we should clear synchronization data from sharepoint
if (wsFile != null)
{
var wsPermissions = PlatformService.GetFolder(user, wsFile.FolderId).Permissions;
var wsFileVersions = PlatformService.GetVersionDetails(user, wsFile);
var wsVersionForLocalFile = wsFileVersions.Where(x => x.Id == syncInfo.VersionId).FirstOrDefault();
if (wsVersionForLocalFile != null)
{
var wsFileIsNewer = wsFileVersions.Where(x => x.Version > wsVersionForLocalFile.Version).Any();
// file on the cloud is newer
if (wsFileIsNewer && (SyncOption == ConflictOptions.Replace || (!localFile.WasUpdatedAfterSend(syncInfo) && SyncOption == ConflictOptions.None)))
{
// if cloud file was updated and we select "Replcae file" on the file conflict dialog or select Save as new version in file synchronize dialog
if (SyncOption == ConflictOptions.Replace)
{
var wsFileData = PlatformService.DownloadFile(user, syncInfo.ItemId);
UpdateLocalFileName(localFile, wsFile);
localFile.Update(wsFileData, wsFile);
RaiseItemUpdatedLocally(new ItemUpdatedLocallyArgs(localFile));
localFile.OnAfterSync(context);
}
else
{
if (!localFile.WasUpdatedAfterSend(syncInfo) && SyncOption == ConflictOptions.None)
{
throw new FileOnCloudIsNewerException(localFile, wsFile.FriendlyName, wsFile.Version.CreateDate, wsFile.Version.Creator.UserName);
}
}
}
// if file on the cloud is newer and we select "Keep both files" on the file conflict dialog or select Save as new document in file synchronize dialog
else if (wsFileIsNewer /*&& localFile.ItemIsNewer(syncInfo)*/ && SyncOption == ConflictOptions.KeepBoth)
{
// add new copy of the file to iManager
wsFile.Data = PlatformService.DownloadFile(user, wsFile.Id);
bool newFileIsAdded = false;
IDMSFile newFile = null;
if (localFile.ParentFolder != null)
{
newFile = ((IDMSFolder)localFile.ParentFolder).AddFile(wsFile, out newFileIsAdded, true);
}
else
{
newFile = localFile.AddFile(wsFile, out newFileIsAdded, true);
}
if (newFileIsAdded)
{
UpdateLocalFileName(localFile, wsFile);
localFile.UpdateSyncInfo(null, true);
if (newFile != null)
{
RaiseItemAddedFromWS(new ItemAddedFromWSArgs(newFile));
}
// if local file was changed then we check in changes, otherwise we discard check out for file
if (localFile.WasUpdatedAfterSend(syncInfo))
{
localFile.OnAfterSync(context);
}
else
{
localFile.UpdateSyncInfo(null, true);
}
}
}
else if (localFile.WasUpdatedAfterSend(syncInfo) && SyncOption == ConflictOptions.None)//file on local DMS is newer
{
if (wsFileIsNewer)
{
throw new FileConflictException(localFile, wsFile.FriendlyName, wsFile.Version.CreateDate, wsFile.Version.Creator.UserName);
}
else
{
UploadNewFileVersionToWorkshareCloud(user, localFile, wsFile);
RaiseItemUpdatedOnWS(new ItemUpdatedOnWSArgs(localFile));
localFile.UpdateSyncInfo(null, true);
//localFile.OnAfterSync(args);
}
}
else if (!wsFileIsNewer && !localFile.WasUpdatedAfterSend(syncInfo))
{
// file wasn't changed neither on WS nor on local machine, but could be renamed
if (!wsPermissions.DownloadFiles)
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SYNC, RES.STR_UNABLESYNCFILE_TEXT);
}
UpdateLocalFileName(localFile, wsFile);
localFile.UpdateSyncInfo(null, true);
if (!syncInfo.WasAlreadyCheckedOut)
{
localFile.DiscardCheckout();
}
}
}
else
{
//how they can delete version??
throw new VersionDoesNotExist(); ;
}
}
else
{
// file was removed from cloud
localFile.UpdateSyncInfo(null, true);
RaiseItemUpdatedLocally(new ItemUpdatedLocallyArgs(localFile));
//if all parent folder was deleted from workshare
if (localFile.ParentFolder != null)
{
var flddata = localFile.ParentFolder.GetSyncInfo();
if (flddata != null)
{
var cloudfolder = PlatformService.GetFolder(user, flddata.ItemId);
if (cloudfolder == null || cloudfolder.IsDeleted || ItemWasMoved(user, cloudfolder.ParentId, flddata.ParentFolderId))
{
SyncFolder(localFile.ParentFolder, user, context, true);
throw new CloudFolderNotFound(RES.STR_UNABLE_SYNC);
}
}
}
throw new CloudFileNotFound(RES.STR_UNABLE_SYNC);
}
}
catch (CloudFolderAccessDenied)
{
throw;
}
catch (WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SYNC, RES.STR_UNABLESYNCFILE_TEXT, ex);
}
else
{
throw;
}
}
}
if (context.Stage == OperationStage.Second)
{
if (!context.IsRootElementForOperation(localFile))
{
localFile.ParentFolder.OnAfterSync(context);
}
}
localFile.RemoveTempFileIfNeed();
}
#endregion
#region Sending
private int SendFolder(IDMSFolder folder, int parentFolderId, UserDetails user, OperationContext context, List<int> parentChildren)
{
int newFolderID = -1;
using (var errorlist = new ItemsErrorsCollection(context))
{
List<int> sentItems = new List<int>();
SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsWeb = true, FolderError = true }, Operations.Send, () =>
{
folder.OnBeforeSending(context);
FolderDetails newFolder = null;
try
{
newFolder = PlatformService.CreateFolder(user, folder.Name, folder.Name, parentFolderId);
}
catch(WebException ex)
{
if (IsStatusCode(ex, (HttpStatusCode)403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SEND, RES.STR_UNANBESENDFOLDER_TEXT, ex);
}
else
{
throw;
}
}
folder.UpdateSyncInfo(newFolder, true);
if (parentChildren != null)
{
parentChildren.Add(newFolder.Id);
}
RaiseItemSent(new ItemSentArgs(folder));
foreach (IDMSFolder item in folder.SubFolders)
{
SafeFuncCall(errorlist, item, new ExceptionListSignal() { IsAll = true, IsList = true, FolderError = true }, Operations.Send, () =>
{
var si = item.GetSyncInfo();
if (si == null)
{
SendFolder(item, newFolder.Id, user, context, sentItems);
}
else
{
sentItems.Add(si.ItemId);
throw new FolderAlreadySentException();
}
});
}
foreach (IDMSFile file in folder.Files)
{
SafeFuncCall(errorlist, file, new ExceptionListSignal() { IsAll = true, IsList = true, IsWeb = true, FolderError = true }, Operations.Send, () =>
{
if (file.GetSyncInfo() == null)
{
sentItems.Add(SendFile(file, newFolder.Id, user, context));
}
else
{
throw new FileAlreadySentException();
}
});
}
folder.UpdateSendDataInfo(sentItems);
newFolderID = newFolder.Id;
});
}
return newFolderID;
}
private int SendFile(IDMSFile localfile, int folderId, UserDetails user, object args)
{
localfile.CheckPermissionkOrThrow(Workshare.Integration.Enums.Permissions.EditItem);
localfile.OnBeforeSending(args);
localfile.CheckNotCheckeOutToOtherUserOrThrow();
try
{
var fileId = UploadFileToWorkshareCloud(localfile, folderId, user);
RaiseItemSent(new ItemSentArgs(localfile));
return fileId;
}
catch (Exception ex)
{
localfile.OnSendError(args, ex);
throw;
}
}
#endregion
#region utility
#endregion
#endregion
}
}
<file_sep>/WSComponents/src/WSIntegration/WsUtils.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Integration
{
public static class WsUtils
{
static readonly char[] InvalidFileNameCharacters = { '~', '#', '%', '&', '*', ':', '<', '>', '?', '/', '\\', '{', '}', '|' };
public static string RemoveInvalidSymbolsFromFileName(string name)
{
var _name = new StringBuilder(name);
for (int i = 0; i < InvalidFileNameCharacters.Length; i++)
{
_name.Replace(InvalidFileNameCharacters[i], '_');
}
return _name.ToString().Trim();
}
public static string GetEmailDomain(string email)
{
if (string.IsNullOrEmpty(email)) return string.Empty;
var indexAt = email.LastIndexOf('@');
if (indexAt > -1)
{
return email.Substring(indexAt+1, email.Length - indexAt-1);
}
else
{
return string.Empty;
}
}
public static IEnumerable<WsMember> GetWithoutCurrentUser(this IEnumerable<WsMember> members, WsUser user)
{
var currentUserEmail = user == null ? string.Empty : user.Email;
if (string.IsNullOrEmpty(currentUserEmail))
{
return members;
}
return members.Where(a => !string.Equals(a.Email, currentUserEmail, StringComparison.InvariantCultureIgnoreCase));
}
public static string IdOrNull(this Activity a)
{
return a == null ? null : a.ActyvityId;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/SyncDialog/SyncDialogViewModel.cs
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using Workshare.Components.Concrete;
using Workshare.Components.Views.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.SyncDialog
{
public class SyncDialogViewModel : OwnViewModel
{
private readonly IEnumerable<ImportFileActivity> _activities;
public RelayCommand OKCommand { get; set; }
public RelayCommand CancelCommand { get; set; }
public bool? ApplyToAll { get; set; }
public Visibility ShowApplyToAll { get; set; }
public Visibility SubTitleVisibility { get; set; }
public int VersionCount { get; set; }
public int FilesCount { get; set; }
public int NewFilesCount { get; set; }
public bool AddFiles { get; set; }
public string LocalFileName { get; set; }
public string WsFileName { get; set; }
internal CheckinDlgResult Result { get; set; }
ConflictOptions? _selected;
ConflictVersionOptions? _versions;
public ConflictOptions? SelectedAction
{
set
{
_selected = value;
if (OKCommand != null)
{
OKCommand.RaiseCanExecutechanged();
}
}
}
private ImportType Convert(ConflictOptions conflictOptions)
{
switch (conflictOptions)
{
case ConflictOptions.Replace:
return ImportType.AsNewVersion;
case ConflictOptions.KeepBoth:
return ImportType.AsNewDocument;
default:
return ImportType.None;
}
}
public IEnumerable<ImportDocumentAction> GetSelectedActions()
{
var res = new List<ImportDocumentAction>();
foreach (var activity in _activities)
{
var neededImportType = activity.HasChange(ChangeType.RemoteAdded)
? ImportType.AsNewDocument
: Convert(Result.option);
var matchedAction = activity.GetAllAvailableActions()
.OfType<ImportDocumentAction>()
.Where(
a =>
a.ImportVersionsAction == Result.versions
&& a.ImportType == neededImportType)
.ToList();
if (matchedAction.Any())
{
res.Add(matchedAction.First());
}
}
return res;
}
public ConflictVersionOptions? SelectedVersionsOption
{
set
{
_versions = value;
if (OKCommand != null)
{
OKCommand.RaiseCanExecutechanged();
}
}
}
public SyncDialogViewModel()
{
LocalFileName = "testDocx.docx";
VersionCount = 2;
}
CheckinDlgResult GetResult(ConflictOptions opt, ConflictVersionOptions ver)
{
return new CheckinDlgResult() { ApplyToAll = true /*this.ApplyToAll.HasValue ? this.ApplyToAll.Value : false*/, option = opt, versions = ver };
}
public SyncDialogViewModel(IEnumerable<ImportFileActivity> activities, string wsFileName, string localFileName, int versionsCount, int filesCount, int newFilesCount, bool onlyNewFiles)
{
_activities = activities;
this.LocalFileName = localFileName ?? string.Empty;
this.WsFileName = wsFileName ?? string.Empty;
this.AddFiles = onlyNewFiles;
this.NewFilesCount = newFilesCount;
VersionCount = versionsCount;
FilesCount = filesCount;
ShowApplyToAll = Visibility.Collapsed;
SubTitleVisibility = Visibility.Visible;
OKCommand = new RelayCommand((p) => (_selected.HasValue), (p) =>
{
Result = GetResult(GetAction(), GetVersionOption());
RaiseClose(true);
});
CancelCommand = new RelayCommand((p) =>
{
Result = GetResult(ConflictOptions.None, ConflictVersionOptions.None);
RaiseClose(false);
});
}
private ConflictOptions GetAction()
{
return _selected ?? ConflictOptions.None;
}
private ConflictVersionOptions GetVersionOption()
{
return _versions ?? ConflictVersionOptions.None;
}
public string DialogTitle
{
get
{
if (FilesCount == 1)
{
if (NewFilesCount > 0)
{
return string.Format("You have selected to add the file \"{0}\"", (WsFileName.Length > 40) ? WsFileName.Substring(0, 37) + "..." : WsFileName);
}
else
{
return string.Format("You have selected to sync the file \"{0}\"", (LocalFileName.Length > 40) ? LocalFileName.Substring(0, 37) + "..." : LocalFileName);
}
}
else
{
var sb = new StringBuilder();
if (NewFilesCount == FilesCount)
{
sb.Append(string.Format("You have selected to add {0} files", NewFilesCount));
}
else
{
sb.Append(string.Format("You have selected to sync {0} files", FilesCount));
if (NewFilesCount > 0)
{
sb.Append(string.Format(" ({0} existed, {1} new on Workshare)", FilesCount - NewFilesCount,
NewFilesCount));
}
}
return sb.ToString();
}
}
}
public string DialogSubTitle
{
get
{
var sb = new StringBuilder();
if (FilesCount == 1)
{
sb.Append("This file possibly has more than one new version on Workshare.");
}
else
{
sb.Append("Some of this files may have multiple new versions on Workshare");
}
return sb.ToString();
}
}
public string AllVersionCaption
{
get
{
var sb = new StringBuilder();
if (FilesCount == 1)
{
sb.Append(string.Format("All versions ({0} version(s))", VersionCount));
}
else
{
sb.Append("All versions of all files");
};
return sb.ToString();
}
}
}
}
<file_sep>/WSComponents/src/WSComponents.Tests/TestCommandInvoker.cs
using Moq;
using NUnit.Framework;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using System.Windows.Forms;
namespace WSComponents.Tests
{
[TestFixture]
public class TestCommandInvoker
{
[Test]
public void TestExecuteCommand()
{
bool cmd1Called = false;
bool cmd2Called = false;
bool checkActionAddedEvent = false;
bool checkActionCompleteEvent = false;
bool checkIsBusyChangedEvent = false;
CommandInvoker inv = new CommandInvoker();
inv.WorkCompleted += (a) => { checkActionCompleteEvent = true; };
inv.WorkUnitAdded += (a) => { checkActionAddedEvent = true; };
inv.IsBusyChanged += (a) => { if (a) checkIsBusyChangedEvent = true; };
var cmd = new Mock<IWorkUnit>();
cmd.Setup(x => x.Execute()).Callback(() => { cmd1Called = true; Assert.IsTrue(inv.IsBusy); });
var cmd2 = new Mock<IWorkUnit>();
cmd2.Setup(x => x.Execute()).Callback(() => { cmd2Called = true; Assert.IsTrue(inv.IsBusy); });
Assert.IsFalse(inv.IsBusy, "Invoker is empty and should be free");
inv.AddToQueue(cmd.Object);
inv.AddToQueue(cmd2.Object);
Utility.WaitWithPeriodicalCallback(ref cmd1Called, Utility.DEFAULT_TIMEOUT, Application.DoEvents);
Utility.WaitWithPeriodicalCallback(ref cmd2Called, Utility.DEFAULT_TIMEOUT, Application.DoEvents);
Assert.IsTrue(cmd1Called, "command 1 was not called");
Assert.IsTrue(cmd2Called, "command 2 was not called");
Assert.IsFalse(inv.IsBusy, "Invoker is empty and should be free");
Assert.IsTrue(checkActionAddedEvent, "Action added event was not fired");
Assert.IsTrue(checkActionCompleteEvent, "Action complete event was not fired");
Assert.IsTrue(checkIsBusyChangedEvent, "Chenged busy state event was not fired");
}
}
}
<file_sep>/WSComponents/src/WSCloudService/ProxyHelper/ProxyDetector.cs
using System;
using System.Net;
using System.Threading;
using System.Windows.Forms;
using WorksharePlatform;
using WorksharePlatform.ServiceProxy.ProxyHelper;
namespace WorkshareCloud.ServiceProxy.ProxyHelper
{
public static class ProxyDetector
{
static CredentialCache GetCredentialsCache(Uri address, NetworkCredential creds)
{
if (creds == null) return null;
var cache = new CredentialCache();
cache.Add(address, "Basic", creds);
if (!string.IsNullOrEmpty(creds.Domain))//ntlm requires domain should be set separately
{
cache.Add(address, "NTLM", creds);
}
else
{
cache.Add(address, "NTLM", new NetworkCredential(GetLogin(creds.UserName), creds.Password, GetDomain(creds.UserName)));
}
return cache;
}
static string GetDomain(string s)
{
int stop = s.IndexOf("\\");
if (stop > -1)
{
return s.Substring(0, stop);
}
stop = s.IndexOf("@");
if (stop > -1 && stop < s.Length - 1)
{
return s.Substring(stop + 1, s.Length - stop - 1);
}
return string.Empty;
}
static string GetLogin(string s)
{
int stop = s.IndexOf("\\");
if (stop > -1 && stop < s.Length - 1)
{
return s.Substring(stop + 1, s.Length - stop - 1);
}
stop = s.IndexOf("@");
if (stop > -1)
{
return s.Substring(0, stop);
}
return s;
}
public static bool CheckForProxy(UserDetails user, IntPtr owner)
{
if (user.IsProxyEnable)
{
return true;
}
Uri _host = PlatformService.HostWithSchema;
Uri proxyURI = null;
var t = new Thread(() =>
{
proxyURI = WebRequest.GetSystemWebProxy().GetProxy(_host);
});
t.Start();
while (t.IsAlive)
{
Thread.Sleep(10);
}
ICredentials resultCredentials = new CredentialCache();
NetworkCredential proxyCred = null;
WebProxy proxy = new WebProxy(proxyURI, true)
{
Credentials = CredentialCache.DefaultNetworkCredentials
};
if (proxyURI != null && !string.IsNullOrEmpty(proxyURI.AbsoluteUri) && !proxy.Address.Equals(_host))
{
user.Proxy = proxy;
var dlg = new CredentialDialog();
dlg.Target = proxyURI.AbsoluteUri;
dlg.MainInstruction = "The server " + proxyURI.Host + " at proxy requires a username and password";
dlg.ShowSaveCheckBox = true;
dlg.IsSaveChecked = true;
dlg.ShowUIForSavedCredentials = false;
dlg.UseApplicationInstanceCredentialCache = false;
dlg.WindowTitle = "Windows Security";
string usrname = PlatformRegistryHelper.getFromRegisrty(RegData.Username);
string usrpass = PlatformRegistryHelper.getFromRegisrty(RegData.Userpass);
if (!string.IsNullOrEmpty(usrname))
{
proxyCred = new NetworkCredential(usrname, usrpass);
dlg.UserName = usrname;
dlg.Password = <PASSWORD>;
}
bool ProxySettingsDetected = false;
while (!ProxySettingsDetected)
{
HttpWebRequest myRequest = (HttpWebRequest)HttpWebRequest.Create(_host);
HttpWebResponse myResponse;
try
{
if (proxyCred != null)
{
resultCredentials = GetCredentialsCache(proxy.Address, proxyCred);
}
else
{
resultCredentials = CredentialCache.DefaultNetworkCredentials;
}
myRequest.Proxy = proxy;
myRequest.Proxy.Credentials = resultCredentials;
myResponse = (HttpWebResponse)myRequest.GetResponse();
ProxySettingsDetected = true;
}
catch(Exception e)
{
if (e is WebException && e.Message.Contains("407"))
{
if (dlg.ShowDialog(owner) == DialogResult.OK)
{
proxyCred = dlg.Credentials;
if (dlg.IsSaveChecked && dlg.ShowSaveCheckBox)
{
PlatformRegistryHelper.setToRegisrty(RegData.Username, dlg.UserName);
PlatformRegistryHelper.setToRegisrty(RegData.Userpass, dlg.Password);
}
}
else
{
user.Proxy = null;
user.ProxyCredentials = null;
return false;
}
}
else
{
ProxySettingsDetected = true;
}
}
}
}
user.ProxyCredentials = resultCredentials;
return true;
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Presenter/ModulePresenter.cs
using System.Collections.Generic;
using System.Linq;
using System.Windows.Forms;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Presenter;
using Workshare.Components.WSLogger;
using Workshare.IManage.Views;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Services;
namespace Workshare.IManage.Presenter
{
public class ModulePresenter :ModulePresenterBase
{
readonly ModuleView _view;
public ModulePresenter(IModuleView view)
: base(view)
{
_view = view as ModuleView;
}
public void OnSyncItemsClicked(SyncItemsClickedArgs args, bool fromRibbon)
{
var folders = args.Items.Select(p => p.item).OfType<IDMSFolder>().ToList();
if (folders.Count() == 1)
{
if (LoginIfNeeded())
{
if (Application.iManInstance.ClientType == iManType.FileSite)
{
ActivityCounter.Instance.UpdateUserFolder(folders.FirstOrDefault());
}
_view.ShowTrueSyncDialog(invoker, folders.OfType<IDMSItem>(), (res, itemsToSkip) =>
{
if (Application.iManInstance.ClientType == iManType.FileSite)
{
ActivityCounter.Instance.UpdateUserFolder(ActivityCounter.Instance.CurrentFolder, true);
}
}, fromRibbon);
}
}
else
{
if (LoginIfNeeded())
{
var syncWU = new TrueSynhWorkUnit(_view, args.Items);
invoker.AddToQueue(syncWU);
}
// base.OnSyncItemsClicked(args);
}
}
public override void OnSyncItemsClicked(SyncItemsClickedArgs args)
{
this.OnSyncItemsClicked(args, false);
}
public void UpdateSyncInfo(object doc)
{
if (doc is IManDocument)
{
var manfile = new Workshare.IManage.Contrete.ManFile((IManDocument)doc);
Logger.Write("Try get sync info", Severity.Information);
var syncinfo = Application.iManInstance.Module.Resolve<SyncInfoService>().GetSyncInfo(manfile);
if (syncinfo != null)
{
Logger.Write("try update sync info", Severity.Information);
Application.iManInstance.Module.Resolve<SyncInfoService>().UpdateSyncInfo(
manfile.GetVersions().Where(a => a.Number == ((IManDocument)doc).Version).FirstOrDefault(),
new ActionContext(Workshare.Integration.Processor.Services.ActionType.Upload));
}
}
}
public override void OnSendItemsClicked(SendItemsClickedArgs args)
{
//TODO move to base class
InvokeInSTAThreadIfNeed(() =>
{
bool repeat = true;
while (repeat)
{
if (LoginIfNeeded())
{
if (Application.iManInstance.ClientType == iManType.FileSite)
{
ActivityCounter.Instance.UpdateUserFolder(ActivityCounter.Instance.CurrentFolder);
}
var checkOutDialogResult = ShowCheckOutDialogIfNeed(args);
if (checkOutDialogResult == Workshare.Integration.Enums.CheckOutOptions.None)
{
repeat = false;
}
else
{
var res = _view.ShowSelectFolder(GetCurrentDialogSettings(Application.iManInstance.AuthProvider.GetCurrentWSUser()));
if (res > -1)
{
invoker.AddToQueue(GetSendCommand(args.Items, new List<FileMapActivity>(), res, checkOutDialogResult));
repeat = false;
}
else if (res == -1)
{
repeat = false;
}
else if (res == -10) // we lost auth credentials
{
var user = Application.iManInstance.AuthProvider.GetCurrentWSUser();
if (user != null)
{
user.AuthToken = string.Empty;
user.SessionCookies = null;
}
}
}
}
else
{
repeat = false;
}
}
});
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ScanStrategy.cs
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Strategies
{
public class ScanStrategy : DmsScanStrategyBase
{
readonly WsProxy _wsProxy;
readonly IAuthProvider _auth;
readonly SyncInfoService _syncInfoService;
public ScanStrategy(WsProxy wsProxy, IAuthProvider auth, SyncInfoService syncInfoService)
{
_wsProxy = wsProxy;
_auth = auth;
_syncInfoService = syncInfoService;
}
public void EnsureIndirectParent(ItemMap map, ISyncInfo syncInfo, bool useForceRequest)
{
var parent = map.Parent as FolderMap;
var parentWsId = -1;
var wsParent = (parent != null) ? parent.WsFolder : null;
if (wsParent != null)
{
parentWsId = wsParent.Id;
}
else
{
if (parent!=null && parent.LocalFolder != null)
{
var parentSyncInfo = _syncInfoService.GetSyncInfo(parent.LocalFolder, useForceRequest);
if (parentSyncInfo!=null)
{
parentWsId = parentSyncInfo.ItemId;
}
}
}
if (parentWsId != -1)
{
if (syncInfo != null && parentWsId != syncInfo.ParentId)
{
var actualParentFoldermap = new FolderMap();
try
{
actualParentFoldermap.WsFolder = _wsProxy.TryGetFolder(_auth.GetCurrentWSUser2(), syncInfo.ParentId);
}
catch (Exception ex)
{
Logger.WriteError(ex);
actualParentFoldermap.ProcessState = ProcessState.Error;
actualParentFoldermap.Error = ex;
}
map.IndirectParent = actualParentFoldermap;
}
}
}
public Exception GetAncestorParentsError(ItemMap item)
{
if (item.IndirectParent != null)
{
return item.IndirectParent.ProcessState == ProcessState.Error ? item.IndirectParent.Error??new Exception("Parent in error") : null;
}
if (item.IndirectParent == null)
{
if (item.Parent != null)
{
if (item.Parent.ProcessState==ProcessState.Error)
{
return item.Parent.Error ?? new Exception("Parent in error");
}
return GetAncestorParentsError(item.Parent);
}
}
return null;
}
public override ScanResult Scan(BaseDMSFile dmsfile, FileMap fileMap, DmsFileScanOptions dmsScanOptions)
{
var manFile = (IDMSFile) dmsfile;
var parentFolderMap = fileMap.Parent as FolderMap;
WsFile file = null;
if (fileMap.WsFile != null)
{
file = fileMap.WsFile;
}
else
{
var syncInfo = _syncInfoService.GetSyncInfo(manFile, dmsScanOptions.UseForceRequest);
if (syncInfo != null)
{
var wsParentFolder = (parentFolderMap != null) ? parentFolderMap.WsFolder : null;
var parentFolderFiles = ((wsParentFolder != null) ? parentFolderMap.WsFolder.Files : null) ??
new List<WsFile>();
file =
parentFolderFiles.FirstOrDefault(
f => f.Id == syncInfo.ItemId && f.FolderId == syncInfo.ParentId);
if (file == null && (wsParentFolder == null || wsParentFolder.Id != syncInfo.ParentId))
{
var user = _auth.GetCurrentWSUser2();
file = _wsProxy.TryGetFile(user, syncInfo.ItemId, syncInfo.ParentId);
}
EnsureIndirectParent(fileMap, syncInfo, dmsScanOptions.UseForceRequest);
}
}
fileMap.WsFile = file;
var fileMapError = GetAncestorParentsError(fileMap);
if (fileMapError != null)
{
return ScanResult.ErrorResult(fileMapError);
}
return ScanResult.Scanned;
}
public override ScanResult Scan(BaseDMSFolder baseDMSFolder, FolderMap folderMap, DmsFolderScanOptions param)
{
var folder = (IDMSFolder) baseDMSFolder;
var parentFolderMap = folderMap.Parent as FolderMap;
WsFolder wsFolder = null;
if (folderMap.WsFolder != null)
{
wsFolder = folderMap.WsFolder;
}
else
{
var syncInfo = _syncInfoService.GetSyncInfo(folder, param.UseForceRequest);
if (syncInfo != null)
{
var wsParentFolder = (parentFolderMap != null) ? parentFolderMap.WsFolder : null;
if (parentFolderMap != null && parentFolderMap.WsFolder != null)
{
wsFolder = parentFolderMap.WsFolder.SubFolders.FirstOrDefault(p => p.Id == syncInfo.ItemId);
}
if (wsFolder == null)
{
wsFolder = _wsProxy.TryGetFolder(_auth.GetCurrentWSUser2(), syncInfo.ItemId);
}
}
EnsureIndirectParent(folderMap, syncInfo, param.UseForceRequest);
}
folderMap.WsFolder = wsFolder;
var folderMapError = GetAncestorParentsError(folderMap);
if (folderMapError != null)
{
return ScanResult.ErrorResult(folderMapError);
}
return ScanResult.Scanned;
}
public override ScanResult Scan(BaseDmsVersion dmsVersion, VersionMap versionMap, DmsVersionScanOptions args)
{
var info = _syncInfoService.GetSyncInfo((IDmsVersion)dmsVersion, args.UseForceRequest);
var fileMap = (FileMap)versionMap.Parent;
var wsFile = fileMap.WsFile;
if (wsFile != null && info!=null)
{
versionMap.WsVersion = wsFile.Versions.FirstOrDefault(p => p.Id.ToString(CultureInfo.InvariantCulture) == info.CurWsVerId);
}
return ScanResult.Scanned;
}
//TODO this method should not know anything about new or not new versions. It should just build maps.But in this case Changes detector should be revisited as it depends on current behavior
public override ScanResult ScanForNewVersionsOnWs(BaseDMSFile dmsfile, FileMap fileMap, DmsFileScanOptions options)
{
var syncInf = _syncInfoService.GetSyncInfo((IDMSFile)dmsfile, options.UseForceRequest);
var wsFile = fileMap.WsFile;
if (syncInf != null && wsFile != null)
{
var wsVersions = wsFile.Versions.ToList();
var lastImportedWsVersion = wsVersions.FirstOrDefault(a => a.Id == syncInf.LastImportedWsVerId);
foreach (var version in wsVersions)
{
if (lastImportedWsVersion != null && version.Version <= lastImportedWsVersion.Version) continue;
if (fileMap.ContainsChildWsVersion(version.Id)) continue;
if (syncInf.AnyVersionIsLinkedWithWsVersion(version.Id)) continue;
var versionMap = new VersionMap {WsVersion = version};
fileMap.AddIf(versionMap);
versionMap.ProcessState = ProcessState.Scanned;
}
}
return ScanResult.Scanned;
}
public override ScanResult GetWsItemsToVisit(FolderMap parentFolderMap, out List<WsFolder> wsFoldersToVisit, out List<WsFile> wsFilesToVisit, bool useForceRequest = true)
{
wsFoldersToVisit=new List<WsFolder>();
wsFilesToVisit=new List<WsFile>();
var wsIdsOfSentFiles = parentFolderMap.Maps.OfType<FileMap>()
.Select(a => _syncInfoService.GetSyncInfo(a, useForceRequest))
.Where(p => p != null)
.Select(p => p.ItemId);
var wsIdsOfSentFolders = parentFolderMap.Maps.OfType<FolderMap>()
.Select(a => _syncInfoService.GetSyncInfo(a, useForceRequest))
.Where(p => p != null)
.Select(p => p.ItemId);
var filesOnWorkshareThatIsNotNew = wsIdsOfSentFiles.Concat(_syncInfoService.GetSendData(parentFolderMap, useForceRequest));
var foldersOnWorkshareThatIsNotNew = wsIdsOfSentFolders.Concat(_syncInfoService.GetSendData(parentFolderMap, useForceRequest));
if (parentFolderMap.WsFolder == null)
{
try
{
parentFolderMap.WsFolder = _wsProxy.TryGetFolder(_auth.GetCurrentWSUser2(), _syncInfoService.GetSyncInfo(parentFolderMap, useForceRequest).ItemId);
}
catch {}
}
if (parentFolderMap.WsFolder != null)
{
wsFilesToVisit =
parentFolderMap.WsFolder.Files.Where(f => !filesOnWorkshareThatIsNotNew.Contains(f.Id)).ToList();
wsFoldersToVisit =
parentFolderMap.WsFolder.SubFolders.Where(f => !foldersOnWorkshareThatIsNotNew.Contains(f.Id))
.ToList();
}
return ScanResult.Scanned;
}
}
}
<file_sep>/WSComponents/src/WSIntegration.Tests/TestOperationContext.cs
using System.Collections.Generic;
using Moq;
using NUnit.Framework;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
namespace WSIntegration.Tests
{
class TestItemID : IDMSItemID
{
int obj_ID;
public TestItemID(int obj_id)
{
obj_ID = obj_id;
}
public bool EqualTo(IDMSItemID obj)
{
if (obj is TestItemID)
{
return ((TestItemID)obj).obj_ID == this.obj_ID;
}
return false;
}
}
[TestFixture]
public class TestOperationContext
{
[Test]
public void TestOperationContext_IsRootElement()
{
var item1 = new Mock<IDMSItem>();
var item2 = new Mock<IDMSItem>();
var item3 = new Mock<IDMSItem>();
var erroritem = new Mock<IDMSItem>();
var itemID1 = new TestItemID(1);
var itemID2 = new TestItemID(2);
var itemID3 = new TestItemID(3);
var errorItemID = new TestItemID(4);
item1.Setup(x => x.ID).Returns(itemID1);
item2.Setup(x => x.ID).Returns(itemID2);
item3.Setup(x => x.ID).Returns(itemID3);
erroritem.Setup(x => x.ID).Returns(errorItemID);
List<IDMSItem> lst = new List<IDMSItem>();
lst.Add(item1.Object);
lst.Add(item2.Object);
lst.Add(item3.Object);
List<FileMapActivity> lstToSkip = new List<FileMapActivity>();
var ctx = new OperationContext(lst, lstToSkip);
Assert.IsTrue(ctx.IsRootElementForOperation(item2.Object), "Item should be triggered as root, but shouldn't");
Assert.IsTrue(ctx.IsRootElementForOperation(item3.Object), "Item should be triggered as root, but shouldn't");
Assert.IsFalse(ctx.IsRootElementForOperation(erroritem.Object), "Error item triggered as root");
Assert.IsTrue(ctx.Stage == OperationStage.First,"Step did not set up to First");
}
}
}
<file_sep>/WSComponents/src/WSCloudService/Utils/FileUtils.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
//TODO uncomment
//using Workshare.Components.WSLogger;
namespace Workshare.Components.Helpers
{
public static class FileUtils
{
public static string GetExtension(string name)
{
if (string.IsNullOrEmpty(name)) return name;
var lastIndex = name.LastIndexOf('.');
if (lastIndex > 0)
{
return name.Substring(lastIndex);
}
return string.Empty;
}
public static string GetFileNameWithoutExtension(string name)
{
if (string.IsNullOrEmpty(name)) return name;
var lastIndex = name.LastIndexOf('.');
if (lastIndex > 0)
{
return name.Remove(lastIndex);
}
return name;
}
public static string ChangeExtension(string name, string extension)
{
return GetFileNameWithoutExtension(name) + extension;
}
public static void SafeDelete(string filePath)
{
try
{
if (string.IsNullOrEmpty(filePath)) return;
File.Delete(filePath);
}
catch (Exception ex)
{
//TODO uncomment
//Logger.WriteError(ex);
}
}
public static bool IsFileLocked(string filePath)
{
try
{
if (string.IsNullOrEmpty(filePath) || !File.Exists(filePath)) return false;
using (var stream = File.OpenWrite(filePath))
{
return false;
}
}
catch (DirectoryNotFoundException)
{
return false;
}
catch (FileNotFoundException)
{
return false;
}
catch
{
return true;
}
}
}
}<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/Visitors/DMSVisitor.cs
using System.Collections.Generic;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.DmsItems.Visitors
{
public abstract class DMSVisitor
{
public abstract PageIterator Iterator { set; get; }
public abstract bool VisitEnter(BaseDMSFolder baseDMSFolder);
public abstract void Visit(BaseDMSFolder dmsFolder);
public abstract void VisitLeave(BaseDMSFolder baseDMSFolder);
public abstract bool VisitEnter(BaseDMSFile baseDMSFile);
public abstract void Visit(BaseDMSFile dmsfile);
public abstract void VisitLeave(BaseDMSFile baseDMSFile);
public abstract void Visit(BaseDmsVersion baseDmsVersion);
public abstract void GetNewEntries(out List<WsFolder> wsfolders, out List<WsFile> wsfiles);
public abstract bool VisitEnter(WsFolder wsFolder, out List<WsFolder> wsFolders, out List<WsFile> wsFiles);
public abstract void VisitLeave(WsFolder wsFolder);
public abstract void Visit(WsFolder wsFolder);
public abstract void Visit(WsFile file);
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/Visitors/PageIterator.cs
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.DmsItems.Visitors
{
/// <summary>
/// iterates throught local items and new remote also stops when page count number of new items was got.
/// </summary>
public class PageIterator
{
private readonly int _pageItems;
private readonly DMSVisitor _visitor;
private readonly BaseDMSItem _root;
public bool HasMore { private set; get; }
private readonly IItemsDetector _itemsDetector;
private int _prevTotalItemsCount;
private int _currentTotalItemsCount;
public PageIterator(int pageItems, DMSVisitor visitor, BaseDMSItem root, IItemsDetector counter)
{
_itemsDetector = counter;
_pageItems = pageItems;
_visitor = visitor;
_root = root;
HasMore = true;
}
private bool _skipping;
public void VisitNextPage()
{
_skipping = false;
_prevTotalItemsCount = _currentTotalItemsCount;
if (HasMore)
{
HasMore = false;
if (_root is BaseDMSFolder) Apply((BaseDMSFolder)_root);
else if (_root is BaseDMSFile) Apply((BaseDMSFile)_root);
}
_visitor.Iterator = this;
}
void CheckPage(bool isLastItem)
{
if (_itemsDetector!=null)
{
_currentTotalItemsCount = _itemsDetector.GetCurrentItemsCount(_visitor);
if (_currentTotalItemsCount - _prevTotalItemsCount >= _pageItems)
{
_skipping = !isLastItem;
}
}
}
private void Apply(SingleChildDMSFolder folder)
{
if (_visitor.VisitEnter(folder._folder))
{
if (folder.Next != null)
{
Apply(folder.Next);
}
if (folder.File != null)
{
Apply(folder.File);
}
}
_visitor.VisitLeave(folder._folder);
}
void Apply(BaseDMSFolder folder)
{
var singleChildFolder = folder as SingleChildDMSFolder;
if (singleChildFolder != null)
{
this.Apply(singleChildFolder);
return;
}
if (_visitor.VisitEnter(folder))
{
var folders = ((IDMSFolder)folder).SubFolders;
foreach (var subfolder in folders.OfType<BaseDMSFolder>())
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(subfolder);
}
//Task 22006. GroupBy used to select only latest version when worksite shows all versions in Document view.
var files = ((IDMSFolder)folder).Files.GroupBy(x => x.DMSId).Select(x => x.First().GetLatest()).OfType<BaseDMSFile>().ToArray();
foreach (var file in files)
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(file);
CheckPage(false);
}
List<WsFile> wsfiles;
List<WsFolder> wsfolders;
_visitor.GetNewEntries(out wsfolders, out wsfiles);
foreach (var wsFolder in wsfolders)
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(wsFolder);
}
foreach (var wsFile in wsfiles)
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(wsFile);
CheckPage(false);
}
_visitor.Visit(folder);
}
_visitor.VisitLeave(folder);
}
void Apply(WsFile file)
{
_visitor.Visit(file);
}
void Apply(WsFolder wsFolder)
{
List<WsFolder> wsFolders;
List<WsFile> wsFiles;
if (_visitor.VisitEnter(wsFolder, out wsFolders, out wsFiles))
{
foreach (var wsFld in wsFolders)
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(wsFld);
}
foreach (var wsFile in wsFiles)
{
if (_skipping)
{
HasMore = true;
continue;
}
Apply(wsFile);
CheckPage(false);
}
_visitor.Visit(wsFolder);
}
_visitor.VisitLeave(wsFolder);
}
void Apply(BaseDmsVersion version)
{
_visitor.Visit(version);
}
void Apply(BaseDMSFile file)
{
if (_visitor.VisitEnter(file))
{
var dmsFile = (IDMSFile)file;
var versions = dmsFile.GetVersions();
foreach (var ver in versions.OfType<BaseDmsVersion>())
{
Apply(ver);
}
_visitor.Visit(file);
}
_visitor.VisitLeave(file);
}
}
/// <summary>
/// Helps iterator to understand whether iteration should be ended or not
/// </summary>
public interface IItemsDetector
{
int GetCurrentItemsCount(DMSVisitor visitor);
}
}
<file_sep>/WSComponents/src/WSComponents/Enums/ImportOptions.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Workshare.Components.Enums
{
public enum ImportOptions
{
AsNewFile = 1,
AsNewVersion = 2,
AsRelatedDocument = 3
}
}
<file_sep>/WSComponents/src/WSIntegration/Exceptions/ExceptionExts.cs
using System;
using System.Net;
using System.Runtime.InteropServices;
using Workshare.Components.WSLogger;
using WorksharePlatform;
namespace Workshare.Integration.Exceptions
{
public static class ExceptionExts
{
public static bool IsConnectionError(this Exception ex)
{
var wex = ex as WebException;
if (wex != null)
{
if (wex.IsStatusCode((System.Net.HttpStatusCode) 502))
return true;
var responce = wex.Response as HttpWebResponse;
return responce == null;
}
var cex = ex as COMException;
if (cex != null)
{
if (cex.ErrorCode == -2147221399 || cex.ErrorCode == -2147221402 || cex.ErrorCode == -2147221503)
{
Logger.WriteError((Exception) cex);
return true;
}
}
return ex is OfflineException;
}
public static bool IsConnectionTimeOutError(this Exception ex)
{
var wex = ex as WebException;
if (wex != null)
{
return wex.IsStatusCode(HttpStatusCode.GatewayTimeout);
}
return false;
}
public static bool IsCloudFolderAccessDenied(this Exception ex)
{
var wex = ex as WebException;
return ex is CloudFolderAccessDenied || wex.IsStatusCode(HttpStatusCode.Forbidden);
}
public static bool IsUnauthorized(this Exception ex)
{
var wex = ex as WebException;
return ex is CloudUnAuthorized || wex.IsStatusCode(HttpStatusCode.Unauthorized);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Resources/injectedJS.js
function init() {
if (window.addEventListener) {
window.addEventListener("message", receiver, false);
}
else {
window.attachEvent("onmessage", receiver);
}
try
{
window.onload = function () {
var el = document.body;
el.style.overflowY = "hidden";
}
}
catch (err) {
}
};
function jsLogger(e) {
window.external.Log(e);
}
function GetDialogResult(data) {
try {
var res = JSON.parse(data);
if (res) {
return res;
}
else {
return data;
}
}
catch (err) {
return data;
}
}
function receiver(e) {
var res = GetDialogResult(e.data);
if (res) {
if (res.cancel) {
window.external.onCancel();
}
else if (res.folder_id || res.folder_id==0) {
window.external.onOk(res.folder_id);
}
else {
alert("no folder id");
window.external.onCancel();
}
}
};
<file_sep>/SharePoint/src/WorkshareCloud.Common/ListItemExtension.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
using System.IO;
using WorksharePlatform;
using System.Collections;
namespace WorkshareCloud.Common
{
public static class ListItemExtension
{
public static void CheckPermissionkOrThrow(this SPSecurableObject obj, SPBasePermissions permissions)
{
if (!obj.DoesUserHavePermissions(permissions))
{
throw new UnauthorizedAccessException();
}
}
public static void CheckWSFieldOrThrow(this SPList obj)
{
if (!obj.Fields.ContainsFieldWithStaticName(CloudPathFieldValue.CloudField))
{
throw new CloudFieldNotFound();
}
}
public static void CheckNotCheckedOutToOtherUserOrThrow(this SPListItem item)
{
if (item.File != null)
{
var checkoutedTo = item.File.CheckedOutByUser;
if (checkoutedTo != null && checkoutedTo.ID != item.Web.CurrentUser.ID) throw new FileCheckoutedToAnotherUser();
}
}
public static bool IsCheckedOutToCurrentUser(this SPListItem item)
{
if (item.File != null)
{
var checkoutedTo = item.File.CheckedOutByUser;
if (checkoutedTo != null && checkoutedTo.ID == item.Web.CurrentUser.ID)
{
return true;
}
}
return false;
}
public static CloudPathFieldValue GetCloudValue(this SPListItem item)
{
if (item.Fields.ContainsFieldWithStaticName(CloudPathFieldValue.CloudField))
{
return new CloudPathFieldValue((string)item[CloudPathFieldValue.CloudField]);
}
else
{
throw new CloudFieldNotFound(CloudPathFieldValue.CloudField);
}
}
public static bool IsFolder(this SPListItem item)
{
if (item.FileSystemObjectType == SPFileSystemObjectType.Folder)
{
return true;
}
return false;
}
public static CloudPathFieldValue SetCloudValue(this SPListItem item, FileDetails fileDetails)
{
if (item.Fields.ContainsFieldWithStaticName(CloudPathFieldValue.CloudField))
{
if (fileDetails != null)
{
var val = new CloudPathFieldValue(fileDetails, item);
item[CloudPathFieldValue.CloudField] = val.ToString();
return val;
}
else
{
item[CloudPathFieldValue.CloudField] = CloudPathFieldValue.Empty.ToString();
return null;
}
}
else
{
throw new CloudFieldNotFound(CloudPathFieldValue.CloudField);
}
}
public static void ClearCloudValue(this Hashtable item)
{
item[CloudPathFieldValue.CloudField] = string.Empty;
}
public static CloudPathFieldValue SetCloudValue(this SPListItem item, FolderDetails folderDetails)
{
if (item.Fields.ContainsFieldWithStaticName(CloudPathFieldValue.CloudField))
{
if (folderDetails != null)
{
var val = new CloudPathFieldValue(folderDetails, item);
item[CloudPathFieldValue.CloudField] = val.ToString();
return val;
}
else
{
item[CloudPathFieldValue.CloudField] = CloudPathFieldValue.Empty.ToString();
return null;
}
}
else
{
throw new CloudFieldNotFound(CloudPathFieldValue.CloudField);
}
}
public static SPFile AddWSFile(this SPFileCollection files, string name, string filePath)
{
var basename = name;
var filename = basename;
int fileaddings = 1;
while (true)
{
try
{
//TO DO - need to redevelop to give ability to add files which size is more than Int32.MaxValue
SPFile item = null;
int fileSize = (int)FileDetails.GetFileSizeByFilePath(filePath);
using (Stream stream = File.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.None))
{
using (BinaryReader br = new BinaryReader(stream))
{
item = files.Add(files.Folder.ServerRelativeUrl + "/" + filename, br.ReadBytes(fileSize), false, "", false);
}
}
if (item != null)
item.Update();
//files.Folder.Item.SystemUpdate(false);
return item;
}
catch (SPException ex)
{
if (ex.ErrorCode == -2130575257)//file with such name exists
{
filename = string.Format("{0} - ({1})", Path.GetFileNameWithoutExtension(basename), fileaddings) + Path.GetExtension(basename);
}
else
{
throw;
}
}
fileaddings++;
}
}
public static byte[] GetFile(this SPListItem item)
{
using (var stream = item.File.OpenBinaryStream())
{
return stream.ReadFully();
}
}
public static byte[] ReadFully(this Stream input)
{
byte[] buffer = new byte[16 * 1024];
using (MemoryStream ms = new MemoryStream())
{
int read;
while ((read = input.Read(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/WorkUnits/BreakFolderLinkWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Components.Views.TrueSyncDialog.WorkUnits
{
class BreakFolderLinkWorkUnit:WorkUnit
{
private readonly TrueSyncDialogViewModel _vm;
public BreakFolderLinkWorkUnit(TrueSyncDialogViewModel vm, IModuleView view):base(view)
{
_vm = vm;
}
public override void Execute()
{
var processor = _vm.Module.Resolve<Processor>();
processor.BreakLink(_vm.ScanResult);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/ISyncInfoFactory.cs
namespace Workshare.Integration.Interfaces
{
public interface ISyncInfoFactory
{
ISyncInfo CreateFileInfo();
ISyncInfo CreateFolderInfo();
IVersionSyncInfo CreateVersionInfo();
ISendDataInfo CreateSendDataInfo();
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/UserControls/ChangeTemplateSelector.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using Workshare.Components.WSLogger;
using Workshare.Integration.Processor.Changes;
namespace Workshare.Components.Views.TrueSyncDialog.UserControls
{
public class ChangeTemplateSelector:DataTemplateSelector
{
public override DataTemplate SelectTemplate(object item, DependencyObject container)
{
try
{
var change = item as FileActivityChange;
var element = container as FrameworkElement;
if (element != null && change != null)
{
var templateName = string.Empty;
switch (change.Type)
{
case ChangeType.FolderSharedWithOthers:
templateName = "FolderSharedWithOthersTemplete";
break;
default:
templateName = "CommonChangeTemplete";
break;
}
return element.FindResource(templateName) as DataTemplate;
}
return null;
}
catch (Exception ex)
{
Logger.WriteError(ex);
return null;
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/AsposePdf/AsposePdf.cs
using System.Collections.Generic;
using WorksharePlatform;
namespace Workshare.Integration.AsposePdf
{
public class AsposePdfHelper
{
public AsposePdfHelper()
{
}
public byte[] AddCommentInData(byte[] data, List<FileComment> comments)
{
if (comments.Count == 0)
{
return data;
}
//MemoryStream inStream = new MemoryStream(data);
//Document doc = new Document(inStream);
//var list = comments.OrderBy(x => x.ReplyToId == 0 ? x.Id : x.ReplyToId).ThenBy(x => x.CreatedAt);
//Aspose.Pdf.InteractiveFeatures.DefaultAppearance default_appearance = new DefaultAppearance("Arial", 10, System.Drawing.Color.Black);
//FreeTextAnnotation lastAnnotation;
//foreach (var comment in comments)
//{
// FreeTextAnnotation ann = new FreeTextAnnotation(doc.Pages[comment.PageNumber], new Aspose.Pdf.Rectangle(comment.Positional.x, comment.Positional.y, comment.Positional.width, comment.Positional.height), default_appearance);
// ann.Contents = comment.Creator.UserName + ":\n" + comment.Body;
// ann.Opacity = 0.0;
// ann.Modified = comment.CreatedAt;
// if (comment.ReplyToId == 0)
// {
// lastAnnotation = ann;
// }
// else
// {
// ann.InReplyTo = lastAnnotation;
// }
// doc.Pages[comment.PageNumber].Annotations.Add(ann);
//}
//MemoryStream stream = new MemoryStream();
//doc.Save(stream);
//return stream.ToArray();
return data;
}
}
}
<file_sep>/iManageIntegration/Common/VersionInfoBlock/AssemblyVersionInfoBlock.cs
using System;
using System.Reflection;
using System.Runtime.InteropServices;
// This common assembly info file is used by all assemblies to define version, etc.
// If you want to add a Assembly unique value, you must add a AssemblyInfo file
// to your project.
// When you add this file to new projects, you must 'Add existing file' and
// open it with the dropdown arrow with 'Link File'. Otherwise it will copy
// the file to the local directory and it won't get updated.
#if !THIRD_PARTY_LIBRARY
[assembly: AssemblyProduct("Workshare IManage Integration")]
#endif
<file_sep>/WSComponents/src/WSComponents/Views/CeaseCollaborationDialog/WorkUnits/BreakWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Windows.Forms.VisualStyles;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.DmsItems.Visitors;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies.ActionStrategy;
namespace Workshare.Components.Views.CeaseCollaborationDialog.WorkUnits
{
public class BreakWorkUnit : WorkUnit
{
private readonly CeaseCollaborationDialogViewModel _vm;
public BreakWorkUnit(CeaseCollaborationDialogViewModel vm, IModuleView view)
: base(view)
{
_vm = vm;
}
public override void Execute()
{
var processor = _vm.Module.Resolve<Processor>();
processor.BreakLink(_vm.ScanResult.Maps.First());
}
}
}
<file_sep>/WSComponents/src/WSCloudService/Activity.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public class Activity
{
public string Noun { get; set; }
public string Verb { get; set; }
public string UserName { get; set; }
public string FolderName { get; set; }
public string ChangesNameOld { get; set; }
public string ChangesNameNew { get; set; }
public string TimeCreated { get; set; }
public int ChangesVersionNew { get; set; }
public int ChangesVersionOld { get; set; }
public bool HistoryModify { get; private set; }
public int MemberId { get; set; }
public string MemberName { get; set; }
public int CurrentVersion { get; set; }
public string ActyvityId { get; set; }
public const string TimeFormat = "dd MMM yyyy HH:mm";
bool UserNameIsEmpty(string userName )
{
return string.Equals(userName, "null", StringComparison.InvariantCulture) || string.IsNullOrEmpty(userName);
}
private String GetUserNameOrAnonymousUser(string userName)
{
return UserNameIsEmpty(userName) ? "Unauthenticated user" : userName;
}
public string GetComment()
{
DateTime date;
DateTime.TryParse(TimeCreated, out date);
if (string.Equals(Noun, "File", StringComparison.InvariantCulture) && string.Equals(Verb, "Update", StringComparison.InvariantCulture))
{
HistoryModify = true;
if ((ChangesNameOld != null && ChangesNameNew != null) && !string.Equals(ChangesNameOld, ChangesNameNew, StringComparison.InvariantCulture))
return string.Format(Properties.Resources.STR_ACTIVITY_FILE_RENAMED, GetUserNameOrAnonymousUser(UserName), date.ToString(TimeFormat));
if (ChangesVersionOld < ChangesVersionNew)
return string.Format(Properties.Resources.STR_ACTIVITY_ADDED_NEW_VERSION, GetUserNameOrAnonymousUser(UserName), date.ToString(TimeFormat));
}
else if (string.Equals(Verb, "Download", StringComparison.InvariantCulture))
{
HistoryModify = false;
return string.Format(Properties.Resources.STR_ACTIVITY_FILE_DOWNLOADED, GetUserNameOrAnonymousUser(UserName), date.ToString(TimeFormat));
}
if (string.Equals(Noun, "Comment", StringComparison.InvariantCulture) && string.Equals(Verb, "Create", StringComparison.InvariantCulture))
{
HistoryModify = true;
return string.Format(Properties.Resources.STR_ACTIVITY_COMMENT_ADDED, date.ToString(TimeFormat));
}
if (string.Equals(Noun, "Member", StringComparison.InvariantCulture) &&
string.Equals(Verb, "Create", StringComparison.InvariantCulture))
{
HistoryModify = false;
if (!UserNameIsEmpty(UserName))
{
return string.Format(Properties.Resources.STR_ACTIVITY_USER_ADDED, UserName,
GetUserNameOrAnonymousUser(MemberName), date.ToString(TimeFormat));
}
else
{
return string.Format(Properties.Resources.STR_ACTIVITY_USER_ADDED_BY_SYSTEM,
GetUserNameOrAnonymousUser(MemberName), date.ToString(TimeFormat));
}
}
return string.Empty;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Extensions/DMSItemExtensions.cs
using System.Collections.Generic;
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.Extensions
{
public static class DMSItemExtensions
{
public static List<IDMSFolder> GetParentFolders(this IDMSItem item)
{
if (item is IDMSFile)
{
return ((IDMSFile)item).ParentFolders;
}
else if (item is IDMSFolder)
{
var parent = ((IDMSFolder)item).ParentFolder;
if (parent != null)
{
return new List<IDMSFolder> { parent };
}
}
return new List<IDMSFolder>();
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/FileMapActivityChange.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Changes
{
public enum ChangeType {
NotModified,
BothChanged,
LocalChanged,
RemoteChanged,
RemoteAdded,
NotSent,
RemoteDeleted,
NamesDiffer,
Uploaded,
DocTypeChanged,
FolderSharedWithOthers,
NotSentVersion,
VersionChangedAfterSend }
public class FileActivityChange
{
public FileActivityChange()
{
this.ChangeDate = DateTime.Now;
}
public ChangeType Type { set; get; }
public DateTime ChangeDate { set; get; }
public string LocalName { set; get; }
public string WsName { set; get; }
public string LocalDocType { get; set; }
public string WsDocType { get; set; }
public string OtherMemberViewLink { get; set; }
public int OtherMemebersCount { set; get; }
List<WsVersion> _newerVersions = new List<WsVersion>();
public List<WsVersion> NewerVersions
{
get { return _newerVersions; }
set
{
_newerVersions = value ?? new List<WsVersion>();
}
}
public override string ToString()
{
return Type.ToString();
}
public FileMapActivity Parent { get; internal set; }
public DateTime SavedTime { get; set; }
public int VersionNumber { get; set; }
public string LocalVersionId { get; set; }
public WsUser FolderSharedBy { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/WorkUnits/ProcessWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Workshare.Components.Common;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog.WorkUnits
{
public class ProcessWorkUnit:WorkUnit
{
private TrueSyncFilesScanDialogVm vm;
private IEnumerable<DocumentActionVM> actions;
public ProcessWorkUnit(TrueSyncFilesScanDialogVm vm, IEnumerable<DocumentActionVM> actionsToExecute)
: base(vm.View)
{
this.vm = vm;
this.actions = actionsToExecute;
}
public override void OnAdded()
{
foreach (var actionVm in actions)
{
actionVm.activityVm. OnStartProcessing();
}
}
public override void Execute()
{
try
{
foreach (var documentActionVm in actions)
{
ParentScan parentScan = vm.ParentScans.First();
var processor = vm.Module.Resolve<Processor>();
var processOptions = new ProcessOptions();
processOptions.ActionsToApply.AddRange(actions.Select(a => a.action).ToList());
var processedScan = processor.Process(parentScan.Scan, processOptions);
var activityFinder = vm.Module.Resolve<ActivityFinder>();
var uploadActivities = parentScan.Activities.Select(a => a.data).OfType<UploadFileActivity>();
activityFinder.UpdateUploadActivities(processedScan, uploadActivities);
var toUpdate = actions.Select(a => a.activityVm).ToList();
vm.Dispatcher.Invoke(new Action(() => toUpdate.ForEach(a => a.PropertyChangedAll())));
if (uploadActivities.All(a => a.State == UploadFileActivityState.Uploaded))
{
vm.Dispatcher.BeginInvoke(new Action(() => vm.RaiseClose(true)));
return;
}
}
}
catch (Exception ex)
{
vm.View.ShowError(ex);
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/DmsScanStrategyBase.cs
using System;
using System.Collections.Generic;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Strategies
{
public abstract class DmsScanStrategyBase
{
//scans file and populates map with appropriate values
public abstract ScanResult Scan(BaseDMSFile dmsFile, FileMap fileResult, DmsFileScanOptions args);
//scans _folder and populates map with appropriate values
public abstract ScanResult Scan(BaseDMSFolder dmsFolder, FolderMap map, DmsFolderScanOptions args);
//scan _folder for new wsFilesToVisit on ws and adds new child maps to flder map
//public abstract ScanResult ScanForNewFilesOnWs(FolderMap parentFolderMap, DmsFolderScanOptions args);
//scans document version and populates map with appropriate values
public abstract ScanResult Scan(BaseDmsVersion dmsVersion, VersionMap versionMap, DmsVersionScanOptions args);
public abstract ScanResult ScanForNewVersionsOnWs(BaseDMSFile dmsfile, FileMap currentFileMap, DmsFileScanOptions options);
public abstract ScanResult GetWsItemsToVisit(FolderMap parentFolderMap, out List<WsFolder> wsFoldersToVisit, out List<WsFile> wsFilesToVisit, bool useForceRequest = true);
}
public class ScanResult
{
public static ScanResult Scanned = new ScanResult(ProcessState.Scanned);
public static ScanResult Cancelled = new ScanResult(ProcessState.Cancelled);
public static ScanResult ErrorResult(Exception ex)
{
return new ScanResult(ProcessState.Error)
{
Error = ex
};
}
public ScanResult(ProcessState result)
{
Result = result;
}
public ProcessState Result { private set; get; }
public Exception Error { set; get; }
}
public class DmsFileScanOptions
{
public bool SkipChildren { get; set; }
public bool UseForceRequest { get; set; }
}
public class DmsFolderScanOptions
{
public bool SkipChildren { get; set; }
public bool UseForceRequest { get; set; }
}
public class DmsVersionScanOptions
{
public bool UseForceRequest { get; set; }
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/IDMSItems.cs
using System;
using System.Collections.Generic;
using Workshare.Integration.Common;
using WorksharePlatform;
namespace Workshare.Integration.Interfaces
{
public interface IDMSItemID
{
bool EqualTo(IDMSItemID obj);
}
public interface IDMSItem
{
IDMSItemID ID { get; }
int DMSId { get; }
string DMSItemKey { get; }
string Name { get; set; }
bool DoesUserHavePermissions(Enums.Permissions permissions);
bool CheckedOutToUser { get; }
bool CheckedOut { get; }
string DisplayName { get; set; }
IDMSFolder ParentFolder { get; }
bool WasUpdatedAfterSend2(bool useForceRequest = true);
void OnBeforeSending(OperationContext context);
void OnAfterSending(OperationContext context);
void OnSendError(object args, Exception e);
void AddHistory(string eventName, string eventComment, Workshare.Integration.Operations operation);
void AddHistories(List<Activity> activities);
IDMSFolder RootFolder();
}
public interface IDMSFolder:IDMSItem
{
IDMSFolder AddSubFolder(FolderDetails cloudFolder);
IEnumerable<IDMSFolder> SubFolders { get; set; }
IEnumerable<IDMSFile> Files { get; set; }
bool IsDeleted { get; }
}
public interface IDMSFile:IDMSItem
{
string GetFilePath();
DateTime Modified { get; }
bool IsCheckedOutFileExists();
List<IDMSFolder> ParentFolders { get; }
IDMSFile GetLatest();
void DiscardCheckout(bool deleteCheckOutFile = false);
IEnumerable<IDmsVersion> GetVersions();//all versions including latest version
IDmsVersion AddVersion(string filePath, FileDetails file, List<Activity> activities, string versionFriendlyName = null, bool checkInVersion = false, bool keepLocalState = false);
}
public interface IDmsVersion
{
string Id{get;}
int Number{get;}
object ID { get; }
DateTime EditTime { get; }
IDMSFile GetLatestFile();
bool WasChangedAfterSend2(bool useForcerequest = true );
IDMSFile AsFile();
}
}
<file_sep>/WSComponents/src/WSCloudService/Member.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public class Member
{
public string MemberName { get; set; }
public int MemberId { get; set; }
public bool IsDeleted { get; set; }
public string Email { get; set; }
}
}
<file_sep>/WSComponents/src/WSCloudService/MultiPartDetails.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public class MultiPartDetails
{
public int PartNum { get; set; }
public string Action { get; set; }
public string ContentType { get; set; }
public string Authorisation { get; set; }
public string AmsDate { get; set; }
public long Loaded { get; set; }
}
}
<file_sep>/WSComponents/src/WSCloudService/PlatformService.cs
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using Microsoft.Win32;
namespace WorksharePlatform
{
class CompareStrAsDate : IComparer<string>
{
public int Compare(string x, string y)
{
var d1 = DateTime.Now;
var d2 = DateTime.Now;
if (DateTime.TryParse(x, out d1) && DateTime.TryParse(y, out d2))
{
return DateTime.Compare(d1, d2);
}
return 0;
}
}
public enum RoleOfDictionary { users, accounts }
public static class PlatformService
{
private static int _retried;
const int LimitEntriesPerPage = 100;
static PlatformService()
{
ServicePointManager.ServerCertificateValidationCallback += ((sender, certificate, chain, sslPolicyErrors) => true);
}
public static void SignUp(UserDetails user)
{
var queryStrings = new NameValueCollection();
queryStrings.Add("user[start_trial_on_validation]", "false");
queryStrings.Add("user[utm_source]", "");
queryStrings.Add("user[utm_content]", "");
queryStrings.Add("start_trial", "true");
queryStrings.Add("user[company_name]", user.Company);
queryStrings.Add("user[username]", user.UserName);
queryStrings.Add("user[email]", user.Email);
queryStrings.Add("user[password]", user.Password);
queryStrings.Add("user[password_confirmation]", user.ConfirmPassword);
using (var client = new CookieAwareClient(user))
{
var result = client.PostValues("sign_ups.json", queryStrings);
var returnResponse = Encoding.UTF8.GetString(result);
returnResponse.Contains(string.Format("\"name\":\"{0}\"", user.UserName));
}
// Response could be {"id":362,"name":"WPLTEST 01","email":"<EMAIL>","uuid":"cb25e9c7-b82f-4509-a228-dcef5340a870",
//"avatar":"https://skydox-public.s3.amazonaws.com/images/profile_pic.png","phone_number":null,"company_name":"Workshare Testing",
//"is_unknown_user":true,"is_deactivated":false,"is_admin":false,"is_enterprise_admin":false,"account_multipart_allowed":false,
//"start_trial_on_validation":true,"created_at":"2012-10-11T09:46:34Z","updated_at":"2012-10-11T10:34:21Z",
//"validated_at":null,"account":{"id":345,"name":"Workshare","created_at":"2012-10-11T09:46:34Z","domain":null,"custom_logo":null},
//"root_folder_id":3180,"inbox_folder_id":3181,"sent_folder_id":3182}
}
public static void RefreshUserIfNeeded(UserDetails user)
{
if (user.RootFolderId == 0)
{
RefreshUserData(user);
}
}
public static void RefreshUserData(UserDetails user)
{
using (var client = new CookieAwareClient(user))
{
var result = client.GetString4("current_user.json?includes=u.profile,u.folders,a.core");
var userData = JsonParser.ParseCurrentUserData(result);
user.UserId = userData.UserId;
user.AccountId = userData.AccountId;
user.AccountUuId = userData.AccountUuId;
user.RootFolderId = userData.RootFolderId;
user.Email = userData.Email;
user.UserName = userData.UserName;
user.Domain = userData.Domain;
}
}
public static List<UserDetails> GetAccountUsersData(UserDetails user)
{
using (var client = new CookieAwareClient(user))
{
var result = client.GetString3(string.Format("accounts/{0}/users.json", user.AccountId));
var usersData = JsonParser.ParseAccountUsersData(result);
var paginationDataDict = JsonParser.Parsepagination(result);
int pagesCount = paginationDataDict.ContainsKey("total_pages") ? paginationDataDict["total_pages"] : 0;
for (int i = 2; i <= pagesCount; i++)
{
result = client.GetString3(string.Format("accounts/{0}/users.json?page={1}", user.AccountId, i));
usersData.AddRange(JsonParser.ParseAccountUsersData(result));
}
usersData.ForEach(u =>
{
u.Domain = user.Domain;
u.AccountId = user.AccountId;
u.AccountUuId = user.AccountUuId;
});
return usersData;
}
}
public static void Login(UserDetails user)
{
var queryStrings = new NameValueCollection();
queryStrings.Add("user_session[email]", user.Email);
queryStrings.Add("user_session[password]", user.Password);
using (var client = new CookieAwareClient(user))
{
client.PostValues("user_sessions.json", queryStrings);
if (client.CookieContainer.Count == 0) //refactored
{
throw new UnauthorizedAccessException();
}
}
}
public static WSServer GetWSServer(UserDetails user, string type = "dms")
{
using (var client = new CookieAwareClient(user))
{
var result = client.GetString("push_settings.json?ctype=" + type);
if (client.CookieContainer.Count == 0) //refactored
{
throw new UnauthorizedAccessException();
}
return JsonParser.ParseWSServerData(result);
}
}
public static UserDetails GetUserByUserId(UserDetails user, string userid)
{
using (var client = new CookieAwareClient(user))
{
string result = client.GetString2(string.Format("users/find_by_token.json?token={0}", userid));
return JsonParser.ParseUserInfo(result);
}
}
public static List<FolderDetails> GetFolders(UserDetails user)
{
var found = new List<FolderDetails>();
RefreshUserIfNeeded(user);
using (var client = new CookieAwareClient(user))
{
string qry = "folders.json";
var result = client.GetString4(qry);
if (string.IsNullOrEmpty(result) == false)
{
found.AddRange(JsonParser.ParseFoldersResult(result));
}
}
return found;
}
public static bool IsFolderExists(UserDetails user, int folderId, int parentFolderId)
{
if (folderId == 0)
{
return true;
}
try
{
var folder = GetFolder(user, folderId);
return folder != null && !folder.IsDeleted;
}
catch (WebException ex)
{
if (ex.Response != null && ((HttpWebResponse)ex.Response).StatusCode == HttpStatusCode.NotFound)
{
return false;
}
throw;
}
}
public static FolderDetails GetFolder(UserDetails user, int folderId)
{
using (var client = new CookieAwareClient(user))
{
var result = client.GetString4(string.Format("folders/{0}.json", folderId));
if (!string.IsNullOrEmpty(result))
{
var folder = JsonParser.ParseFolderResult(result);
if (folder.ParentId != folder.Ancestry)
{
string parentResult = string.Empty;
try
{
parentResult = client.GetString4(string.Format("folders/{0}.json", folder.ParentId));
}
catch (WebException e)
{
// skip if we haven't access to a parent folder otherwise throw exeption
if (!e.IsStatusCode(HttpStatusCode.Forbidden)) throw;
}
if (!string.IsNullOrEmpty(parentResult))
{
var parentFolder = JsonParser.ParseFolderResult(parentResult);
if (parentFolder.IsDeleted)
{
folder.IsDeleted = true;
}
}
}
UserDetails userDetails = GetUserByUserId(user, folder.Owner.UserId);
folder.Owner.UserName = userDetails.UserName;
return folder;
}
}
return null;
}
public static FolderDetails TryGetFolder(UserDetails user, int folderId)
{
FolderDetails folder;
try
{
folder = GetFolder(user, folderId);
}
catch (WebException ex)
{
if (ex.IsStatusCode(HttpStatusCode.Unauthorized)) throw;
if (ex.IsStatusCode(HttpStatusCode.NotFound))
{
return null;
}
throw;
}
return folder;
}
public static DialogSettings GetDialogSettings(UserDetails user)
{
using (var client = new CookieAwareClient(user))
{
string result = client.GetString2(string.Format("settings.json"));
if (string.IsNullOrEmpty(result) == false)
{
return JsonParser.ParseDialogSettings(result);
}
return null;
}
}
public static FolderMembers GetAncestor(UserDetails user, int folderId)
{
var members = new List<UserDetails>();
using (var client = new CookieAwareClient(user))
{
string result = client.GetString(string.Format("folders/{0}/members/ancestry.json", folderId));
members.AddRange(JsonParser.ParseMembersResult(result));
}
return new FolderMembers { Members = members };
}
public static FolderDetails CreateFolder(UserDetails user, string name, string description, int parentFolderId = 0, bool notifyOwner = true, DateTime? expireAt = null)
{
var queryStrings = new NameValueCollection
{
{"folder[name]", name},
{"folder[description]", description},
{"folder[notify_owner]", notifyOwner.ToString().ToLower()},
{"folder[expires_at]", (expireAt == null) ? "null" : expireAt.Value.ToUniversalTime().ToString(CultureInfo.InvariantCulture)},
{"folder[parent_id]", parentFolderId.ToString(CultureInfo.InvariantCulture)}
};
using (var client = new CookieAwareClient(user))
{
var result = client.PostValues4("folders.json", queryStrings);
if (result != null)
{
var folder = JsonParser.ParseFolderResult(result);
UserDetails userDetails = GetUserByUserId(user, folder.Owner.UserId);
folder.Owner.UserName = userDetails.UserName;
return folder;
}
}
return null;
}
private static void SetInheritFromParentFolderPermission(UserDetails user, FolderDetails folder, bool state)
{
string action = string.Format("folders/{0}/permission.json", folder.Id);
var queryStrings = new NameValueCollection
{
{"permission[inherits_from_parent]", state.ToString().ToLower()}
};
using (var client = new CookieAwareClient(user))
{
client.PutValues(action, queryStrings);
}
}
public static void ApplyPermissionToFolder(UserDetails user, FolderDetails folder)
{
SetInheritFromParentFolderPermission(user, folder, false);
string action = string.Format("folders/{0}/permission.json", folder.Id);
var queryStrings = new NameValueCollection
{
{"permission[can_download_original]", folder.Permissions.DownloadFiles.ToString().ToLower()},
{"permission[can_download_pdf]", folder.Permissions.DownloadFiles.ToString().ToLower()},
{"permission[can_upload_changes]", folder.Permissions.CanAddVersions.ToString().ToLower()},
{"permission[can_manipulate]", folder.Permissions.CanAddOrRemoveFiles.ToString().ToLower()},
{"permission[can_access_anonymously]", (!folder.Permissions.User_must_login).ToString().ToLower()},
{"permission[can_invite_with_link]", folder.Permissions.Member_may_invite.ToString().ToLower()}
};
using (var client = new CookieAwareClient(user))
{
client.PutValues(action, queryStrings);
}
}
public static void ShareFolderWith(UserDetails user, FolderDetails folder, string emails, string mailSubject, string mailBody)
{
string action = string.Format("folders/{0}/add_member_by_email.json", folder.Id);
var queryStrings = new NameValueCollection
{
{"invite_email_body", mailBody},
{"invite_email_subject", mailSubject},
{"invite_email_list", emails}
};
using (var client = new CookieAwareClient(user))
{
client.PostValues(action, queryStrings);
}
}
public static void DeleteFolder(UserDetails user, int folderId)
{
using (var client = new CookieAwareClient(user))
{
client.DeleteValues(string.Format("folders/{0}.json", folderId), new NameValueCollection());
}
}
public static void DeleteFile2(UserDetails user, int fileId)
{
try
{
using (var client = new CookieAwareClient(user))
{
client.DeleteValues2(string.Format("files/{0}.json", fileId), new NameValueCollection());
}
}
catch (WebException)
{
throw;
}
}
public static void CopyFile(UserDetails user, int sourceFileId, int destinationFolderId)
{
const string action = "files/copy_from.json";
var queryStrings = new NameValueCollection
{
{"source_file_id", sourceFileId.ToString(CultureInfo.InvariantCulture)},
{"target_folder_id", destinationFolderId.ToString(CultureInfo.InvariantCulture)}
};
using (var client = new CookieAwareClient(user))
{
client.PostValues(action, queryStrings);
}
}
public static void CreateVersionForNewFile3(UserDetails user, FileDetails file)
{
var queryStrings = new NameValueCollection { { "file_version[binary_file_file_name]", file.Name } };
if (file.IsChunkingRequired == false)
{
queryStrings.Add("file_version[complete_file_provide_upload_info]", "true");
}
queryStrings.Add("file_version[name]", file.FriendlyName);
queryStrings.Add("file_version[folder_id]", file.FolderId.ToString(CultureInfo.InvariantCulture));
using (var client = new CookieAwareClient(user))
{
var results = client.PostValuesVersion3("file_versions.json", queryStrings);
var currentVersion = JsonParser.ParseVersionResult3(results, file.IsChunkingRequired);
file.CurrentVersion = currentVersion;
file.Id = file.CurrentVersion.FileId;
}
}
public static void CreateNewVersionOfFile(UserDetails user, FileDetails file)
{
var queryStrings = new NameValueCollection { { "deck_version[binary_file_file_name]", file.Name } };
if (file.IsChunkingRequiredForUpload == false)
{
queryStrings.Add("deck_version[complete_file_provide_upload_info]", "true");
}
queryStrings.Add("deck_version[name]", file.FriendlyName);
queryStrings.Add("deck_version[folder_id]", file.FolderId.ToString(CultureInfo.InvariantCulture));
using (var client = new CookieAwareClient(user))
{
var results = client.PostValuesVersion3(string.Format("files/{0}/file_versions.json", file.Id), queryStrings);
file.CurrentVersion = JsonParser.ParseVersionResult3(results, file.IsChunkingRequiredForUpload);
file.CurrentVersion.FileId = file.Id;
}
}
public static void CommitNewFile(UserDetails user, FileDetails file)
{
var start = Stopwatch.StartNew();
try
{
var queryStrings = new NameValueCollection { { "file_version[apply_changes]", "true" } };
using (var client = new CookieAwareClient(user))
{
var results = client.PutValues(string.Format("file_versions/{0}.json", file.CurrentVersion.Id), queryStrings);
start.Stop();
var ver = JsonParser.ParseVersionResult(results, file.IsChunkingRequired);
if (ver != null)
{
file.CurrentVersion.Size = ver.Size;
_retried = 0;
return;
}
}
}
catch (WebException ex)
{
start.Stop();
var response = ex.Response as HttpWebResponse;
if (response != null)
{
if ((int)response.StatusCode == 422
&& _retried < 3)
{
//Debug.WriteLine(string.Format("Retrying error as it is 422"), "PLATFORMTESTS");
Thread.Sleep(4000);
CommitNewFile(user, file);
}
else { throw; }
}
_retried = 0;
throw;
}
throw new Exception();
}
public static void CommitNewVersionOfFile(UserDetails user, FileDetails file)
{
var start = Stopwatch.StartNew();
try
{
var queryStrings = new NameValueCollection { { "deck_version[apply_changes]", "true" } };
using (var client = new CookieAwareClient(user))
{
var results = client.PutValues(string.Format("/deck_versions/{0}.json", file.CurrentVersion.Id), queryStrings);
start.Stop();
var ver = JsonParser.ParseVersionResult(results, file.IsChunkingRequired);
if (ver != null)
{
file.CurrentVersion.Size = ver.Size;
_retried = 0;
return;
}
}
}
catch (WebException ex)
{
start.Stop();
var response = ex.Response as HttpWebResponse;
if (response != null)
{
if ((int)response.StatusCode == 403
&& _retried < 3)
{
//Debug.WriteLine(string.Format("Retrying error as it is 403"), "PLATFORMTESTS");
Thread.Sleep(4000);
CommitNewVersionOfFile(user, file);
}
else { throw; }
}
_retried = 0;
}
//return false;
throw new Exception();
}
public static void CreateOrUpdateWDSDictionary(UserDetails user, string dicName, string dicData, RoleOfDictionary role)
{
RefreshUserIfNeeded(user);
var id = role == RoleOfDictionary.users ? user.UserId : user.AccountUuId;
using(var client = new CookieAwareClient(user))
{
client.PutUserWDSValue(string.Format(CultureInfo.CurrentCulture, "{0}/{1}/dictionaries/{2}.json", role, id, dicName), dicData);
}
}
public static string GetWDSDictionary(UserDetails user, string dicName, RoleOfDictionary role)
{
RefreshUserIfNeeded(user);
var id = role == RoleOfDictionary.users ? user.UserId : user.AccountUuId;
using (var client = new CookieAwareClient(user))
{
var result = client.GetDictionaryString(string.Format(CultureInfo.CurrentCulture, "{0}/{1}/dictionaries/{2}.json", role, id, dicName));
if (result != null)
{
return result;
}
}
return null;
}
public static List<string> GetWDSDictionaries(UserDetails user, RoleOfDictionary role)
{
RefreshUserIfNeeded(user);
var results = new List<string>();
var id = role == RoleOfDictionary.users ? user.UserId : user.AccountUuId;
using (var client = new CookieAwareClient(user))
{
var result = client.GetDictionaryString(string.Format(CultureInfo.CurrentCulture, "{0}/{1}/dictionaries.json", role, id));
if (result != null)
{
var returnedStrings = result.Split(new[] { "}," }, StringSplitOptions.None).ToList();
var correctStrings = new List<string>();
int count = 0;
foreach (var item in returnedStrings)
{
var val = item;
count++;
if (returnedStrings.Count != count)
{
val = item + "}";
}
results.Add(val);
}
return results;
}
}
return null;
}
public static void DeleteWDSDictionary(UserDetails user, string dicName, RoleOfDictionary role)
{
RefreshUserIfNeeded(user);
var id = role == RoleOfDictionary.users ? user.UserId : user.AccountUuId;
using (var client = new CookieAwareClient(user))
{
client.DeleteDictionary(string.Format(CultureInfo.CurrentCulture, "{0}/{1}/dictionaries/{2}.json", role, id, dicName));
}
}
public static void PostToAwsSingle(UserDetails user, FileDetails file)
{
var queryStrings = new Dictionary<string, object>
{
{"key", file.CurrentVersion.Key},
{"Cache-Control", file.CurrentVersion.Cache},
{"AWSAccessKeyId", file.CurrentVersion.AwsAccessKey},
{"acl", file.CurrentVersion.AclType},
{"success_action_redirect", file.CurrentVersion.SuccessRedirect},
{"policy", file.CurrentVersion.Policy},
{"signature", file.CurrentVersion.Signature},
{"Content-Type", file.CurrentVersion.ContentType},
{"x-amz-server-side-encryption", file.CurrentVersion.Encryption},
{"file", file}
};
using (var client = new CookieAwareClient(user))
{
client.MultipartFormData(file.CurrentVersion.Action, queryStrings);
}
//return false;
}
public static void PostToAwsSingle3(UserDetails user, FileDetails file)
{
var queryStrings = new Dictionary<string, object>
{
{"success_action_redirect", file.CurrentVersion.SuccessRedirect},
{"file", file}
};
using (var client = new CookieAwareClient(user))
{
client.MultipartFormData3(file.CurrentVersion.Action, file, file.CurrentVersion.HttpVerb);
}
}
public static void InitiateChunking(UserDetails user, FileDetails file)
{
var queryStrings = new NameValueCollection { { "initiate_multipart", "true" } };
using (var client = new CookieAwareClient(user))
{
var result = client.PutValues(string.Format("/cloud_files/{0}.json", file.CurrentVersion.CompleteId), queryStrings);
file.CurrentVersion.MultiPartId = JsonParser.ParseMultiPartId(result);
}
}
public static void UploadFileInChunks(UserDetails user, FileDetails file)
{
long fileSize = FileDetails.GetFileSizeByFilePath(file.FilePath);
var parts = fileSize / FileDetails.ChunkSize;
if (fileSize % FileDetails.ChunkSize != 0)
{
// There was a remainder to the division, so add one to account for it.
parts++;
}
using (Stream stream = File.Open(file.FilePath, FileMode.Open, FileAccess.Read, FileShare.None))
{
using (BinaryReader br = new BinaryReader(stream))
{
for (int partNum = 1; partNum <= parts; partNum++)
{
GetMultiPartInfo(user, file, partNum);
long chunkSize = ((stream.Length - file.CurrentPart.Loaded) > FileDetails.ChunkSize) ? FileDetails.ChunkSize : (stream.Length - file.CurrentPart.Loaded);
UploadChunkToS3Bucket(user, file, br.ReadBytes((int)chunkSize));
}
}
}
}
public static void GetMultiPartInfo(UserDetails user, FileDetails file, int partNum)
{
using (var client = new CookieAwareClient(user))
{
// Notify Platform of the chunk to be uploaded
var p = string.Format("multipart_upload_id={0}&part_number={1}", file.CurrentVersion.MultiPartId, partNum);
var result = client.GetString(string.Format("/cloud_files/{0}.json?{1}"
, file.CurrentVersion.CompleteId, p));
var loaded = file.CurrentPart == null ? 0 : file.CurrentPart.Loaded;
file.CurrentPart = JsonParser.ParseMultiPart(result);
if (string.IsNullOrEmpty(file.CurrentPart.Action) == false)
{
file.CurrentPart.PartNum = partNum;
file.CurrentPart.Loaded = loaded;
}
}
}
public static List<FileVersionDetails> GetVersionDetails(UserDetails user, FileDetails file)
{
var versions = GetVersionDetails(user, file.Id);
var maxVersionId = versions.Max(p => p.Version);
file.Versions = versions;
if (file.CurrentVersion == null || file.CurrentVersion.Version != maxVersionId)
{
file.CurrentVersion = versions.FirstOrDefault(p => p.Version == maxVersionId);
}
return file.Versions;
}
public static List<FileVersionDetails> GetVersionDetails(UserDetails user, int fileId)
{
var res = new List<FileVersionDetails>();
GetFromAllPages(user, (client, paginationPart) =>
{
string action = string.Format("files/{0}/file_versions.json?{1}", fileId, paginationPart);
var result = client.GetString2(action);
res.AddRange(JsonParser.ParseVersionsResult(result).Except(res, new FileVersionIdComparer()));
return result;
});
return res;
}
public static List<FileComment> GetFileComments(UserDetails user, int file_id)
{
using (var client = new CookieAwareClient(user))
{
string action = string.Format("comments.json?file_id={0}", file_id.ToString());
var result = client.GetString(action);
return JsonParser.ParseFileCommentsResult(result);
}
}
public static List<FileVersionDetails> GetVersionDetails2(UserDetails user, FileDetails file)
{
var versions = GetVersionDetails2(user, file.Id);
var maxVersionId = versions.Max(p => p.Version);
file.Versions = versions;
if (file.CurrentVersion == null || file.CurrentVersion.Version != maxVersionId)
{
file.CurrentVersion = versions.FirstOrDefault(p => p.Version == maxVersionId);
}
return file.Versions;
}
public static List<FileVersionDetails> GetVersionDetails2(UserDetails user, int fileId)
{
var res = new List<FileVersionDetails>();
GetFromAllPages(user, (client, paginationPart) =>
{
string action = string.Format("files/{0}/file_versions.json?{2}", fileId, paginationPart);
var result = client.GetString(action);
res.AddRange(JsonParser.ParseVersionsResult(result).Except(res, new FileVersionIdComparer()));
return result;
});
return res;
}
private static void UploadChunkToS3Bucket(UserDetails user, FileDetails file, byte[] data)
{
Stopwatch start = Stopwatch.StartNew();
// S3 Upload
using (var s3 = new CookieAwareClient(user))
{
s3.Uri = new Uri(file.CurrentPart.Action);
s3.Headers.Clear();
s3.Headers.Add("Content-Type", file.CurrentPart.ContentType);
s3.Headers.Add("authorization", file.CurrentPart.Authorisation);
s3.Headers.Add("x-amz-date", file.CurrentPart.AmsDate);
file.CurrentPart.Loaded += data.Length;
s3.UploadData(s3.Uri, "PUT", data);
start.Stop();
}
}
public static void CompleteChunking(UserDetails user, FileDetails file)
{
var queryStrings = new NameValueCollection
{
{"complete_multipart", "true"},
{"multipart_upload_id", file.CurrentVersion.MultiPartId}
};
using (var client = new CookieAwareClient(user))
{
var result = client.PutValues(string.Format("/cloud_files/{0}.json", file.CurrentVersion.CompleteId), queryStrings);
file.CurrentVersion.MultiPartId = JsonParser.ParseMultiPartId(result);
if (string.IsNullOrEmpty(file.CurrentVersion.MultiPartId))
{
throw new Exception("Cannot Complete cunking");
}
}
}
public static void UploadFile3(UserDetails user, FileDetails file)
{
CreateVersionForNewFile3(user, file);
TryToUploadFile3(user, file);
CommitNewFile(user, file);
}
private static void TryToUploadFile3(UserDetails user, FileDetails file, int count = 0)
{
int counter = count;
try
{
if (file.IsChunkingRequired == false)
{
PostToAwsSingle3(user, file);
}
else
{
InitiateChunking(user, file);
UploadFileInChunks(user, file);
CompleteChunking(user, file);
}
}
catch (WebException e)
{
if (e.IsStatusCode(HttpStatusCode.RequestTimeout) || e.IsStatusCode(HttpStatusCode.GatewayTimeout))
{
counter++;
if (counter <= 3)
{
TryToUploadFile3(user, file, counter);
}
}
throw;
}
}
public static void UploadNewVersionOfFile(UserDetails user, FileDetails file)
{
CreateNewVersionOfFile(user, file);
if (file.IsChunkingRequired == false)
{
if (file.CurrentVersion.ApiVersion == ApiVersion.One)
{
PostToAwsSingle(user, file);
}
else { PostToAwsSingle3(user, file); }
}
else
{
InitiateChunking(user, file); //?
UploadFileInChunks(user, file);
CompleteChunking(user, file);
#region Old_code
//if (InitiateChunking(user, file))
//{
// UploadFileInChunks(user, file);
// CompleteChunking(user, file);
//}
#endregion
}
CommitNewVersionOfFile(user, file);
}
public static void UploadNewVersionOfFile3(UserDetails user, FileDetails file)
{
CreateNewVersionOfFile(user, file);
TryToUploadFile3(user, file);
CommitNewVersionOfFile(user, file);
}
public static FileDetails GetFile(UserDetails user, int fileId, int folderId)
{
var stop = Stopwatch.StartNew();
string result;
RefreshUserIfNeeded(user);
result = TryToGetFileResponse(user, fileId);
stop.Stop();
return JsonParser.ParseFileData(result, folderId);
}
private static string TryToGetFileResponse(UserDetails user, int fileId, int count = 0)
{
int counter = count;
try
{
var client = new CookieAwareClient(user);
return client.GetString2(string.Format("files/{0}.json", fileId));
}
catch (WebException e)
{
if (e.IsStatusCode(HttpStatusCode.RequestTimeout) || e.IsStatusCode(HttpStatusCode.GatewayTimeout))
{
counter++;
if (counter <= 3)
{
TryToGetFileResponse(user, fileId, counter);
}
}
throw;
}
}
public static FileDetails TryGetFile(UserDetails user, int fileId, int parentFolderIdToCheck = -1)
{
try
{
if (parentFolderIdToCheck == 0) parentFolderIdToCheck = user.RootFolderId;
FileDetails file = GetFile(user, fileId, parentFolderIdToCheck);
if (file.IsDeleted || (parentFolderIdToCheck != -1 && file.FolderId != parentFolderIdToCheck))
{
return null;
}
return file;
}
catch (WebException ex)
{
if (ex.IsStatusCode(HttpStatusCode.Unauthorized)) throw;
if (ex.IsStatusCode(HttpStatusCode.NotFound))
{
return null;
}
throw;
}
}
public static void GetFromAllPages(UserDetails user, Func<CookieAwareClient, string, string> action)
{
using (var client = new CookieAwareClient(user))
{
var prevPage = 0;
while (true)
{
string result = action(client, string.Format("limit={0}&page={1}", LimitEntriesPerPage, prevPage + 1));
var pagination = JsonParser.ParsePagination(result);
if (prevPage >= pagination.CurrentPage || pagination.CurrentPage >= pagination.TotalPages)
{
break;
}
prevPage = pagination.CurrentPage;
}
}
}
class FileIdComparer : IEqualityComparer<FileDetails>
{
public bool Equals(FileDetails x, FileDetails y)
{
return x.Id == y.Id;
}
public int GetHashCode(FileDetails obj)
{
return obj.Id;
}
}
class FileVersionIdComparer : IEqualityComparer<FileVersionDetails>
{
public bool Equals(FileVersionDetails x, FileVersionDetails y)
{
return x.Id == y.Id;
}
public int GetHashCode(FileVersionDetails obj)
{
return obj.Id;
}
}
public static List<FileDetails> GetFiles(UserDetails user, int folderId)
{
var files = new List<FileDetails>();
RefreshUserIfNeeded(user);
GetFromAllPages(user, (client, arg) =>
{
string result = TryToGetFilesResponse(client, folderId, arg);
files.AddRange(JsonParser.ParseFilesResult(result, folderId).Except(files, new FileIdComparer()));
return result;
});
return files;
}
private static string TryToGetFilesResponse(CookieAwareClient client, int folderId, string arg, int count = 0)
{
int counter = count;
try
{
return client.GetString2(string.Format("folders/{0}/files.json?{1}", folderId, arg));
}
catch (WebException e)
{
if (e.IsStatusCode(HttpStatusCode.RequestTimeout) || e.IsStatusCode(HttpStatusCode.GatewayTimeout))
{
counter++;
if (counter <= 3)
{
TryToGetFilesResponse(client, folderId, arg, counter);
}
}
throw;
}
}
public static string DownloadFile(UserDetails user, FileDetails file)
{
return DownloadFile(user, file.Id);
}
public static string DownloadFile(UserDetails user, int fileId)
{
using (var client = new CookieAwareClient(user))
{
client.Uri = new Uri(ServiceUrl + string.Format("/files/{0}/download", fileId));
return client.DownloadFile();
}
}
public static string DownloadFileVersion(UserDetails user, int fileId, int versionId)
{
using (var client = new CookieAwareClient(user))
{
client.Uri = new Uri(ServiceUrl2 + string.Format("/files/{0}/download?version={1}", fileId, versionId));
return client.DownloadFile();
}
}
public static string DownloadFileInPdf(UserDetails user, int file_id)
{
using (var client = new CookieAwareClient(user))
{
client.Uri = new Uri(ServiceUrl + string.Format("/files/{0}/download_pdf", file_id));
return client.DownloadFile();
}
}
public static string DownloadFileVersionInPdf(UserDetails user, int file_id, int version_id)
{
using (var client = new CookieAwareClient(user))
{
client.Uri = new Uri(ServiceUrl4 + string.Format("/files/{0}/download_pdf?version={1}", file_id, version_id));
return client.DownloadFile();
}
}
public static string DownloadFileVersionInPdfWithComment(UserDetails user, string password, int versionId)
{
using (var client = new CookieAwareClient(user))
{
client.Uri = new Uri(ServiceUrl4 + string.Format("/d/{0}/pdfcomments?version={1}", password, versionId));
return client.DownloadFile();
}
}
public static List<FolderDetails> GetChildFolders(UserDetails user, int Folder_Id)
{
var found = new List<FolderDetails>();
RefreshUserIfNeeded(user);
using (var client = new CookieAwareClient(user))
{
string action = string.Format("folders.json?folder_id={0}", Folder_Id);
var result = client.GetString(action);
if (string.IsNullOrEmpty(result) == false)
{
found.AddRange(JsonParser.ParseFoldersResult(result));
}
}
return found;
}
public static readonly string DEFAULT_SERVER = MY_SERVER;
public const string QA_SERVER = "qa.workshare.com";
public const string MY_SERVER = "my.workshare.com";
public const string DEV_SERVER = "dev2.workshare.com";
public static bool UseHostFromRegistry = false;
public static string _Host = null;
public static string Host
{
get
{
if (UseHostFromRegistry)
{
_Host = PlatformRegistryHelper.getFromRegisrty(RegData.Platform);
}
return _Host ?? DEFAULT_SERVER;
}
set
{
_Host = value;
}
}
public static Uri HostWithSchema
{
get { return new Uri(string.Format("https://{0}", Host)); }
}
public static string ServiceUrl { get { return string.Format(CultureInfo.CurrentCulture, "https://{0}/api/v1.1", Host); } }
public static string ServiceUrl2 { get { return string.Format(CultureInfo.CurrentCulture, "https://{0}/api/v1.2", Host); } }
public static string ServiceUrl3 { get { return string.Format(CultureInfo.CurrentCulture, "https://{0}/api/v1.3", Host); } }
public static string ServiceUrl4 { get { return string.Format(CultureInfo.CurrentCulture, "https://{0}/api/v1.4", Host); } }
public static string ServiceDictionaryUrl { get { return string.Format(CultureInfo.CurrentCulture, "https://{0}/dictionaries/api/v1.0", Host); } }
public static List<Activity> GetActivities(string request, UserDetails user, int folderId = 0, int fileId = 0, int version = 0)
{
Dictionary<string, int> pagination = null;
var activityList = new List<Activity>();
int page = 1;
int totalPages = -1;
RefreshUserIfNeeded(user);
using (var client = new CookieAwareClient(user))
{
while (true)
{
string result = client.GetString(request);
var activities = JsonParser.ParseActivities(result, folderId, fileId, version);
if (activities == null)
return null;
activityList.AddRange(activities);
if (pagination == null)
{
pagination = JsonParser.Parsepagination(result);
totalPages = pagination["total_pages"];
}
if (page >= totalPages || totalPages <= 0)
{
break;
}
page++;
}
}
return activityList;
}
public static List<Activity> GetFileActivitiesImanage(UserDetails user, FileDetails wsFile, int version = 0, string lastActyvityId = "")
{
try
{
var requestFileActivity = string.IsNullOrEmpty(lastActyvityId) ? string.Format("notifications.json?file_id={0}&limit={1}&page={2}", wsFile.Id, LimitEntriesPerPage, 1) :
string.Format("notifications.json?file_id={0}&limit={1}&page={2}&later_than_id={3}", wsFile.Id, LimitEntriesPerPage, 1, lastActyvityId);
var activityListFile = GetActivities(requestFileActivity, user, fileId: wsFile.Id);
if (activityListFile == null)
return null;
if (user.RootFolderId != wsFile.FolderId)
{
var requestBase = string.Format("notifications.json?folder_id={0}&limit={1}&page={2}", wsFile.FolderId, LimitEntriesPerPage, 1);
var request = string.IsNullOrEmpty(lastActyvityId)
? requestBase
: string.Format("{0}&later_than_id={1}", requestBase, lastActyvityId);
var activityListfolder = GetActivities(request, user, wsFile.FolderId);
if (activityListfolder.Any())
{
var sorted = activityListfolder.Where(p => p.Noun == "Member" && p.Verb == "Create").ToList();
var members = GetMembers(user, wsFile.FolderId);
foreach (var activity in sorted)
{
if (activity.CurrentVersion == 0)
{
if (wsFile.CurrentVersion == null)
{
GetVersionDetails(user, wsFile);
}
DateTime dt;
if (DateTime.TryParse(activity.TimeCreated, out dt))
{
var ver = wsFile.Versions.Where(v => v.CreateDate < dt)
.Where(r => r != null)
.OrderByDescending(t => t.CreateDate)
.FirstOrDefault();
activity.CurrentVersion = ver != null ? ver.Version : version;
}
else
{
activity.CurrentVersion = wsFile.CurrentVersion != null ? wsFile.CurrentVersion.Version : version;
}
}
var firstMember = members.FirstOrDefault(p => p.MemberId == activity.MemberId);
if (firstMember != null)
{
activity.MemberName = firstMember.MemberName;
}
}
activityListFile.AddRange(sorted);
return activityListFile.OrderBy(p => p.TimeCreated, new CompareStrAsDate()).ToList();
}
}
activityListFile.Reverse();
return activityListFile;
}
catch (WebException wex)
{
if (wex.IsStatusCode(HttpStatusCode.Unauthorized)) throw;
return new List<Activity>();
}
catch (Exception)
{
return new List<Activity>();
}
}
private static string GetRequestUrl(int fileId, int folderId, string lastActyvityId = "")
{
if (fileId != 0)
{
var requestFileActivity = string.IsNullOrEmpty(lastActyvityId)
? string.Format("notifications.json?file_id={0}&limit={1}&page={2}", fileId, LimitEntriesPerPage, 1)
: string.Format("notifications.json?file_id={0}&limit={1}&page={2}&later_than_id={3}", fileId, LimitEntriesPerPage, 1,
lastActyvityId);
return requestFileActivity;
}
var requestBase = string.Format("notifications.json?folder_id={0}&limit={1}&page={2}", folderId, LimitEntriesPerPage, 1);
var request = string.IsNullOrEmpty(lastActyvityId)
? requestBase
: string.Format("{0}&later_than_id={1}", requestBase, lastActyvityId);
return request;
}
public static List<Member> GetMembers(UserDetails user, int folderId)
{
string result;
Dictionary<string, int> pagination = null;
var members = new List<Member>();//null;
int page = 1;
RefreshUserIfNeeded(user);
using (var client = new CookieAwareClient(user))
{
while (true)
{
//GET /api/v1.2/folders/368384/members/ancestry.json?page=1&rqtime=1390580996217 HTTP/1.1
result = client.GetString(string.Format("folders/{0}/members/ancestry.json?limit={1}&page={2}", folderId, LimitEntriesPerPage, page));
members.AddRange(JsonParser.ParseMembers(result));
if (pagination == null)
{
pagination = JsonParser.Parsepagination(result);
}
page++;
if (pagination["current_page"] == pagination["total_pages"])
{
break;
}
}
}
return members;
}
public static bool IsStatusCode(this WebException ex, HttpStatusCode code)
{
if (ex == null) return false;
return ex.Response != null && ex.Response is HttpWebResponse && (((HttpWebResponse)ex.Response).StatusCode == code);
}
public static DateTime GetFolderDeleteTime(UserDetails user, int folderId)
{
var deleteFolderActivity = GetActivities(GetRequestUrl(0, folderId), user, folderId).FirstOrDefault(a => (a.Noun == "Folder" && a.Verb == "Delete"));
var folderDeleteTime = DateTime.Now;
if (deleteFolderActivity != null)
{
DateTime.TryParse(deleteFolderActivity.TimeCreated, out folderDeleteTime);
}
return folderDeleteTime.ToLocalTime();
}
}
public enum RegData { Platform, Username, Userpass }
static class PlatformRegistryHelper
{
private static Boolean keyValueShouldBeEncrypted(RegData key)
{
if (key == RegData.Username || key == RegData.Userpass)
return true;
else
return false;
}
private static string getEncryptedValueFromRegisrty(RegData key)
{
String ret = getValueFromRegisrty(key);
if (!String.IsNullOrEmpty(ret))
{
//earlier there wasn't the encryption - if we read unencrypted value then encrypt it and resave in registry
if (!PlatformCryptoHelper.IsStringEncrypted(ret))
setValueToRegisrty(key, PlatformCryptoHelper.GetEncryptedString(ret));
else
ret = PlatformCryptoHelper.GetDecryptedString(ret);
}
return ret;
}
private static string getValueFromRegisrty(RegData key)
{
var regKey = Registry.CurrentUser.CreateSubKey(@"Software\Workshare");
var platformValue = regKey.GetValue(key.ToString()) as string;
if (string.IsNullOrEmpty(platformValue) && key == RegData.Platform)
{
regKey.SetValue(key.ToString(), PlatformService._Host ?? PlatformService.DEFAULT_SERVER);
platformValue = PlatformService._Host;
}
regKey.Close();
return platformValue;
}
private static void setEncryptedValueToRegisrty(RegData key, string value)
{
value = PlatformCryptoHelper.GetEncryptedString(value);
setValueToRegisrty(key, value);
}
private static void setValueToRegisrty(RegData key, string value)
{
var regKey = Registry.CurrentUser.CreateSubKey(@"Software\Workshare");
regKey.SetValue(key.ToString(), value);
regKey.Close();
}
// <<<<<<< HEAD
public static string getFromRegisrty(RegData key)
{
if (keyValueShouldBeEncrypted(key))
return getEncryptedValueFromRegisrty(key);
else
return getValueFromRegisrty(key);
}
public static void setToRegisrty(RegData key, string value)
{
if (keyValueShouldBeEncrypted(key))
setEncryptedValueToRegisrty(key, value);
else
setValueToRegisrty(key, value);
}
}
static class PlatformCryptoHelper
{
private static readonly String base64Text = "BASE64";
private static readonly String publicKeyXML = @"<RSAKeyValue><<KEY>><Exponent>AQAB</Exponent></RSAKeyValue>";
private static readonly String privatePublicKeyPairXML = @"<<KEY>><Exponent>AQAB</Exponent><P>4<KEY>175Yzmwakng4ShPQ==</P><Q>xuf5HEv+3g/1ZIulF1EvT72j4OGdHYqAt/AiqDuJuDtJnRiid4v/qTsjv6Sn3bFg82vDW5f63vARnln/BLTgdw==</Q><DP>dy/kswGGkYmlD3glAQObQ165O/oIRkwD3kNLhdMsWZu4gVNTAdmAAK/2oRNN7+YZEVeqFwrh8Cck8oAGE5CFzQ==</DP><DQ>rcReps/les5qTUqKEdLFCF7eWkn/3pzvUsIhJOPOrvpslye8V8AgvBGEa5pHZK/fyQjuBIjDjBwi6DorRktWhQ==</DQ><InverseQ>LPP7hAqOwTLoGcu426KKtmoBS9LIYoVdM2NxLgjT7i2fc3H0rR4cv4n3LkOVk94kPfy/JlV5rpT6xvVcN9qprQ==</InverseQ><D>hZ4Z4h49UBWblOcY5b89CNxVHpO6MvyeIH+qDwKRfNlHivhxQ3R/XUvZM1dWxPKmOE8RCCPpi42TkEJ8GmijQzpfU5QtEOBlQn4GJ3Al9Yhzaok7s7ca+/g6rItplKrfN+LiG7qkuNZ5UVkkW/GZvUAlUfOFsoZHpo7L7bnm1OE=</D></RSAKeyValue>";
public static string EncryptString(string stringToEncrypt)
{
RSACryptoServiceProvider rsaProvider = new RSACryptoServiceProvider(new CspParameters() { ProviderType = 1 /*PROV_RSA_FULL */ });
// Import public key
rsaProvider.FromXmlString(PlatformCryptoHelper.publicKeyXML);
//Encrypt
Byte[] bytesToEncrypt = Encoding.ASCII.GetBytes(stringToEncrypt);
Byte[] encryptedBytes = rsaProvider.Encrypt(bytesToEncrypt, false);
return Convert.ToBase64String(encryptedBytes);
}
/// <summary>
/// Encrypt the string by using the RSA algorithm
/// </summary>
/// <param name="stringToEncrypt">String to be encrypted</param>
/// <returns>Encrypted string (Base-64 string)</returns>
public static string GetEncryptedString(string stringToEncrypt)
{
String ret = String.Empty;
try
{
//need for IsStringEncrypted method
stringToEncrypt = stringToEncrypt + base64Text;
ret = EncryptString(stringToEncrypt);
}
//ignore - Workshare.Components.WSLogger is not accessible here but we should add the ability to log in
catch (EncoderFallbackException)
{
}
catch (ArgumentNullException)
{
}
catch (CryptographicException)
{
}
return ret;
}
private static string DecryptString(string stringToDecrypt)
{
RSACryptoServiceProvider rsaProvider = new RSACryptoServiceProvider(new CspParameters() { ProviderType = 1 /*PROV_RSA_FULL */ });
// Import private/public key pair
rsaProvider.FromXmlString(PlatformCryptoHelper.privatePublicKeyPairXML);
//Decrypt
Byte[] bytesToDecrypt = Convert.FromBase64String(stringToDecrypt);
Byte[] decryptedBytes = rsaProvider.Decrypt(bytesToDecrypt, false);
return Encoding.ASCII.GetString(decryptedBytes);
}
/// <summary>
/// Decrypt the string by using the RSA algorithm
/// </summary>
/// <param name="stringToDecrypt">String to be decrypted</param>
/// <returns>Decrypted string (ASCII character set)</returns>
public static string GetDecryptedString(string stringToDecrypt)
{
String ret = String.Empty;
try
{
ret = PlatformCryptoHelper.DecryptString(stringToDecrypt);
//GetEncryptedString method inserts base64Text into the end of string to be encrypted - need to remove it
ret = ret.Remove(ret.LastIndexOf(base64Text), (base64Text.Length));
}
//ignore - Workshare.Components.WSLogger is not accessible here but we should add the ability to log in
catch (ArgumentOutOfRangeException)
{
}
catch (EncoderFallbackException)
{
}
catch (ArgumentNullException)
{
}
catch (CryptographicException)
{
}
return ret;
}
/// <summary>
/// Determine if string is a decrypted string or not
/// </summary>
/// <param name="value">String to determine</param>
/// <returns>true - string is a decrypted string, false - otherwise</returns>
public static bool IsStringEncrypted(string value)
{
try
{
if (DecryptString(value).EndsWith(base64Text))
return true;
}
catch (Exception)
{
}
return false;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/CommandInvoker.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
namespace Workshare.Components.Views.Common
{
public class CommandInvoker
{
System.ComponentModel.BackgroundWorker worker = new BackgroundWorker();
Queue<IWorkUnit> workqueue = new Queue<IWorkUnit>();
AutoResetEvent workAdded = new AutoResetEvent(false);
public event Action<IWorkUnit> WorkUnitAdded;
public event Action<IWorkUnit> WorkCompleted;
public CommandInvoker()
{
worker.DoWork += worker_DoWork;
worker.RunWorkerAsync();
}
public void AddToQueue(IWorkUnit workUnit)
{
lock (workqueue)
{
workUnit.Status = WorkStatus.Pending;
workUnit.OnAdded();
if (WorkUnitAdded != null)
{
WorkUnitAdded(workUnit);
}
workqueue.Enqueue(workUnit);
workAdded.Set();
}
}
bool m_IsBusy = false;
public bool IsBusy
{
set
{
if (m_IsBusy != value)
{
m_IsBusy = value;
if (IsBusyChanged != null)
{
IsBusyChanged(m_IsBusy);
}
}
}
get
{
return m_IsBusy;
}
}
public List<IWorkUnit> GetAllWorks()
{
var res = new List<IWorkUnit>();
res.AddRange(workqueue.ToList());
var workinprogress = workInProgress;
if(workinprogress!=null)
{
res.Add(workinprogress);
}
return res;
}
IWorkUnit workInProgress = null;
void worker_DoWork(object sender, DoWorkEventArgs e)
{
try
{
while (true)
{
try
{
lock (workqueue)
{
if (workqueue.Count > 0)
{
IsBusy = true;
workInProgress = workqueue.Dequeue();
}
}
if (workInProgress == null)
{
IsBusy = false;
workAdded.WaitOne();
}
else
{
workInProgress.Status = WorkStatus.Processing;
workInProgress.Execute();
workInProgress.Status = WorkStatus.Success;
}
}
catch (ThreadAbortException)
{
throw;
}
catch (Exception ex)
{
Trace.TraceError(ex.ToString());
if (workInProgress != null)
{
workInProgress.StatusDescription = ex.Message;
}
}
finally
{
if (workInProgress != null)
{
if (workInProgress.Status != WorkStatus.Success)
{
workInProgress.Status = WorkStatus.Error;
}
if (WorkCompleted != null)
{
WorkCompleted(workInProgress);
}
workInProgress = null;
}
}
}
}
catch (Exception ex)
{
Trace.TraceError(ex.ToString());
}
}
public event Action<bool> IsBusyChanged;
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/WorkUnits/ProcessWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Interfaces;
using Workshare.Components.Services;
using Workshare.Components.Views.TrueSyncDialog;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.TrueSyncDialog.WorkUnits
{
public class ProcessWorkUnit : WorkUnit
{
private readonly IEnumerable<DocumentActionVM> _actionsVmToProcess;
private readonly TrueSyncDialogViewModel _vm;
private readonly ErrorReporterService _errorsReporter;
private readonly ModuleBase _module;
public ProcessWorkUnit(ModuleBase module, IModuleView view, IEnumerable<DocumentActionVM> actionsVmToProcess,
TrueSyncDialogViewModel vm)
: base(view)
{
_module = module;
_actionsVmToProcess = actionsVmToProcess;
_vm = vm;
_errorsReporter = _module.Resolve<ErrorReporterService>();
var activity = _actionsVmToProcess.Select(a => a.activityVm).First();
if (activity is ImportFileActivityVm)
{
this.Name = string.Format("Importing from Workshare");
}
else if (activity is UploadFileActivityVm)
{
this.Name = string.Format("Uploading to Workshare");
}
else
{
this.Name = string.Format("Processing");
}
TargetItems = new TargetItem[]
{
new TargetItem(activity.Id)
{
Name = activity.Name
}
};
this.StatusDescription = "Importing...";
}
public override void OnAdded()
{
foreach (var documentAction in _actionsVmToProcess)
{
documentAction.activityVm.OnStartProcessing();
}
base.OnAdded();
}
public override void Execute()
{
try
{
var scanResult = _vm.ScanResult;
var processor = _module.Resolve<Processor>();
var processOptions = new ProcessOptions();
processOptions.ActionsToApply.AddRange(_actionsVmToProcess.Select(a => a.action));
scanResult = processor.Process(scanResult, processOptions);
var adapter = _module.Resolve<ActivityFinder>();
var errors = adapter.GetErrors(scanResult);
var processedIds = _actionsVmToProcess.Select(a => a.action.ActivityId).ToList();
var errorIds = _actionsVmToProcess.Select(a => a.action.Activity.MapId).ToList();
var activities = _vm.AllActivities.Select(a => a.data).OfType<FileMapActivity>();
adapter.UpdateActivities(scanResult, activities, processedIds);
var errorsForWorkUnit = errors.Where(e => errorIds.Contains(e.Id)).ToList();
if (_vm.IsViewClosed)
{
if (errorsForWorkUnit.Any())
{
_errorsReporter.AddRange(_vm.Id, Integration.Processor.Maps.Ext.GetItemErrorsList(errorsForWorkUnit));
}
//don't use Dispatcher !!! Outlook is crashed if user opens pop-up menu during syncing process
//_vm.Dispatcher.Invoke(new Action(() =>
//{
if (!_vm.AllActivities.Any(a => ((IFileActivityBaseVm) a).IsInProgress))
{
_errorsReporter.ReportIfNeeded(_vm.Id);
}
//}));
}
else
{
_vm.OnCompleted(errors, activities, scanResult);
}
}
catch (Exception ex)
{
_view.ShowError(ex);
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/SyncInfo/FileSyncInfo.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web.Script.Serialization;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.SyncInfo
{
internal class FileSyncInfo : ISyncInfo
{
public string DataType
{
get { return "FileSyncInfo"; }
set { }
}
public int ParentId
{
get;
set;
}
public int LastImportedWsVerId
{
get;
set;
}
public int LastUploadedDmsVerNum
{
get;
set;
}
public int ItemId
{
get;
set;
}
public string DMSItemId
{
get;
set;
}
public string Modified
{
get;
set;
}
public string ActivityId
{
get;
set;
}
public List<VersionInfo> VerInfos { set; get; }
public List<IVersionSyncInfo> GetvInfos()
{
return VerInfos.OfType<IVersionSyncInfo>().ToList();
}
public void AddVInfo(IVersionSyncInfo info)
{
if (!VerInfos.Contains((VersionInfo) info))
{
VerInfos.Add((VersionInfo) info);
}
}
public FileSyncInfo()
{
VerInfos = new List<VersionInfo>();
}
public override string ToString()
{
var ser = new JavaScriptSerializer();
return ser.Serialize(this);
}
public IVersionSyncInfo GetVersionInfo(string id)
{
return VerInfos.FirstOrDefault(a => a.DmsVerId == id);
}
public static FileSyncInfo Parse(string value)
{
try
{
if (string.IsNullOrEmpty(value)) return null;
var ser = new JavaScriptSerializer();
return ser.Deserialize<FileSyncInfo>(value);
}
catch (Exception ex)
{
Logger.WriteError(ex);
return null;
}
}
}
public class VersionInfo : IVersionSyncInfo
{
public VersionInfo()
{
WsVerIds=new List<string>();
}
public string DmsVerId { set; get; }
public string CurWsVerId { get; set; }
public List<string> WsVerIds { set; get; }
public string DmsEditTime { set; get; }
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Visitors/ImportActivityFinder.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor.Changes.Visitors
{
public class ImportActivityFinder : ItemMapVisitor
{
readonly DiscardedService _discardedService;
readonly ChangesDetector _detector;
private readonly DmsWorkerBase _dmsWorker;
private ActivityFinder.Options _options;
public ActivityFinder.Options Options
{
get
{
return _options;
}
set
{
_options = value ?? new ActivityFinder.Options();
}
}
private SyncInfoService syncInfoService;
public ImportActivityFinder(DiscardedService discardedService, ChangesDetector detector, DmsWorkerBase dmsWorker, SyncInfoService syncInfoService)
{
this._discardedService = discardedService;
this._detector = detector;
this._dmsWorker = dmsWorker;
_options=new ActivityFinder.Options();
this.syncInfoService = syncInfoService;
}
public IEnumerable<string> ActivitiesToUpdate = null;
public List<ImportFileActivity> FoundedActivities = new List<ImportFileActivity>();
public void UpdateState(ImportFileActivity activity, FileMap fileMap)
{
if (fileMap.ProcessState == ProcessState.Error)
{
if (fileMap.Error is FileCheckoutedToAnotherUser)
{
activity.State = ImportFileActivityState.CheckedOutToAnother;
activity.CheckedOutTo = ((FileCheckoutedToAnotherUser)fileMap.Error).UserName;
}
else if (fileMap.Error is CheckedOutOnAnotherMachine)
{
activity.State = ImportFileActivityState.CheckedOutOnAnotherMachine;
activity.CheckOutMachine = ((CheckedOutOnAnotherMachine)fileMap.Error).MachineName;
}
else if (fileMap.Error is LockedByAnotherProcessException)
{
activity.State = ImportFileActivityState.LockedByAnotherProcess;
activity.Error = fileMap.Error;
}
else if (fileMap.Error.IsCloudFolderAccessDenied())
{
activity.State = ImportFileActivityState.NoAccessOnWorkshare;
activity.Error = fileMap.Error;
}
else if (fileMap.Error is DMSUnAuthorizedException)
{
activity.State = ImportFileActivityState.NoAccessOnDMS;
activity.Error = fileMap.Error;
}
else
{
activity.State = ImportFileActivityState.Error;
activity.Error = fileMap.Error;
}
}
else if (fileMap.ProcessState == ProcessState.Processed)
{
if (activity.State == ImportFileActivityState.Importing)
{
activity.State = ImportFileActivityState.Imported;
}
}
else if (fileMap.ProcessState == ProcessState.Scanned)
{
activity.State = ImportFileActivityState.Scanned;
}
else if (fileMap.ProcessState == ProcessState.Cancelled)
{
activity.State = ImportFileActivityState.Scanned;
}
}
public override void Visit(FileMap fileMap)
{
var existed = FoundedActivities.FirstOrDefault(a => a.MapId == fileMap.Id);
if (existed != null && _options.DoNotUpdate) return;
var importActivity = existed ?? new ImportFileActivity();
_detector.opt = new DetectorOptions() { UseForceRequest = _options.UseForceRequest };
try
{
if (ActivitiesToUpdate == null || ActivitiesToUpdate.Contains(importActivity.Id))
{
importActivity.Refresh(fileMap, _dmsWorker);
UpdateState(importActivity, fileMap);
if (!importActivity.InKindOfErrorState())
{
importActivity.ClearChanges();
_detector.AddSharedChangeIfDetected(fileMap, importActivity);
_detector.AddChangesIfDetected(fileMap, importActivity);
_detector.AddRenameChangeIfDetected(fileMap, importActivity);
_detector.AddTypeChangedIfDetected(fileMap, importActivity);
_detector.AddDeleted(fileMap, importActivity);
_detector.AddNotModifiedIfdetected(fileMap, importActivity);
_detector.AddAddedOnWs(fileMap, importActivity);
_detector.AddNotSent(fileMap, importActivity);
importActivity.IsDiscarded = _discardedService.IsDiscarded(importActivity);
}
importActivity.IsLinked = (syncInfoService.GetSyncInfo(fileMap, _options.UseForceRequest) != null);
this.RefreshAction(importActivity);
}
if (!FoundedActivities.Contains(importActivity) && ShouldDetectActivity(importActivity))
{
FoundedActivities.Add(importActivity);
}
}
catch (Exception ex)
{
importActivity.Error = ex;
importActivity.State = ImportFileActivityState.Error;
}
}
public virtual bool ShouldDetectActivity(ImportFileActivity activity)
{
return true;
}
private void RefreshAction(ImportFileActivity activity)
{
activity.Actions = new List<ItemMapActivityAction>();
if (activity.State == ImportFileActivityState.Scanned)
{
var allVersionAsNewVersion = new ImportDocumentAction(activity, "As new version", ConflictVersionOptions.All, ImportType.AsNewVersion);
var allversionsAsNewDocument = new ImportDocumentAction(activity, "As new file", ConflictVersionOptions.All, ImportType.AsNewDocument);
var allversionsAsRelatedDocument = new ImportDocumentAction(activity, "As related file", ConflictVersionOptions.All, ImportType.AsRelatedDocument);
var latestVersionAsNewVersion = new ImportDocumentAction(activity, "As new version", ConflictVersionOptions.Latest, ImportType.AsNewVersion);
var latestVersionAsNewDocument = new ImportDocumentAction(activity, "As new file", ConflictVersionOptions.Latest, ImportType.AsNewDocument);
var latestVersionAsRelatedDocument = new ImportDocumentAction(activity, "As related file", ConflictVersionOptions.Latest, ImportType.AsRelatedDocument);
var allVersionsGroup = new ItemMapActivityActionGroup("All versions");
var latestVersionGroup = new ItemMapActivityActionGroup("Latest version only");
if (activity.Changes.Any(p => new List<ChangeType> { ChangeType.BothChanged, ChangeType.RemoteChanged }.Contains(p.Type)))
{
allVersionsGroup.AddAction(new[] { allVersionAsNewVersion, allversionsAsNewDocument, allversionsAsRelatedDocument });
latestVersionGroup.AddAction(new[] { latestVersionAsNewVersion, latestVersionAsNewDocument, latestVersionAsRelatedDocument });
}
if (activity.Changes.Any(p => new List<ChangeType> { ChangeType.RemoteAdded }.Contains(p.Type)))
{
latestVersionGroup.AddAction(latestVersionAsNewDocument);
allVersionsGroup.AddAction(allversionsAsNewDocument);
}
var import = new ItemMapActivityActionGroup("Import");
if (allVersionsGroup.Actions.Any()) import.AddAction(allVersionsGroup);
if (latestVersionGroup.Actions.Any()) import.AddAction(latestVersionGroup);
foreach (var topLevel in new[] { import })
{
if (topLevel.Actions.Any())
{
activity.Actions.Add(topLevel);
}
}
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/WorkshareCustomEventArgs.cs
using System;
using System.Collections.Generic;
using Workshare.Integration.Common;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components
{
public class ItemSentArgs : EventArgs
{
public ItemSentArgs(IDMSItem item)
{
}
}
public class ItemUpdatedLocallyArgs : EventArgs
{
public ItemUpdatedLocallyArgs(IDMSItem item)
{
}
}
public class ItemUpdatedOnWSArgs : EventArgs
{
public ItemUpdatedOnWSArgs(IDMSItem item)
{
}
}
public class TraceArgs : EventArgs
{
public TraceArgs()
{
}
}
public class ItemAddedFromWSArgs : EventArgs
{
public ItemAddedFromWSArgs(IDMSItem item)
{
}
}
public class SyncItemsClickedArgs
{
public IEnumerable<SyncItemInformation> Items { private set; get; }
public IEnumerable<FileMapActivity> ItemsToSkip { get; set; }
public SyncItemsClickedArgs(IEnumerable<SyncItemInformation> items)
{
Items = items;
ItemsToSkip = new List<FileMapActivity>();
}
}
public class SendItemsClickedArgs
{
public IEnumerable<IDMSItem> Items { private set; get; }
public SendItemsClickedArgs(IEnumerable<IDMSItem> items)
{
Items = items;
}
}
public class CollaborationItemsClickedArgs
{
public IEnumerable<CollaborationItemInformation> Items { private set; get; }
public IEnumerable<FileMapActivity> ItemsToSkip { get; set; }
public CollaborationItemsClickedArgs(IEnumerable<CollaborationItemInformation> items)
{
Items = items;
ItemsToSkip = new List<FileMapActivity>();
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Modularity/ModuleBase.cs
using Microsoft.Practices.Unity;
namespace Workshare.Integration.Modularity
{
public abstract class ModuleBase
{
UnityContainer _Container;
protected UnityContainer Container
{
get
{
if (_Container == null)
{
_Container = new UnityContainer();
}
return _Container;
}
}
public ModuleBase()
{
this.Initialize();
}
protected virtual void Initialize()
{
}
public T Resolve<T>()
{
return Container.Resolve<T>();
}
}
}
<file_sep>/WSComponents/src/WSCloudService.Tests/TestUtils.cs
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using WorksharePlatform;
namespace WorkshareCloud.ServiceProxy.Tests
{
class TestUtils
{
public const string USER1="<EMAIL>";
public const string USER1_PASS = "<PASSWORD>";
public const string USER2 = "<EMAIL>";
public const string USER2_PASS = "<PASSWORD>";
}
class UserContext:IDisposable
{
public static implicit operator UserDetails(UserContext m)
{
return m.User;
}
public UserDetails User { get; private set; }
public UserContext(string email,string pass)
{
User = new UserDetails() { Email = email, Password = <PASSWORD> };
PlatformService.Login(User);
}
public void Dispose()
{
}
}
public class Permisions
{
public bool Invite=true;
public bool User_must_login=true;
public bool DownloadFiles=true;
public bool CanAddVersions=true;
}
public static class WS
{
public static FileDetails GetTestFile()
{
return new FileDetails()
{
Id=-1,
FolderId=-1,
Data=WSCloudService.Tests.Properties.Resources.testdoc1,
Name="testdoc1.docx"
};
}
public static FolderDetails CreateFolder(UserDetails user, Permisions perm, IEnumerable<string> shareTo = null, string fodlername = "testfolder")
{
var createdFodler = PlatformService.CreateFolder(user, fodlername, fodlername);
Assert.IsNotNull(createdFodler);
createdFodler.Permissions.Member_may_invite = perm.Invite;
createdFodler.Permissions.User_must_login = perm.User_must_login;
createdFodler.Permissions.DownloadFiles = perm.DownloadFiles;
createdFodler.Permissions.CanAddVersions = perm.CanAddVersions;
PlatformService.ApplyPermissionToFolder(user, createdFodler);
string users = string.Empty;
if (shareTo != null)
{
shareTo.ToList().ForEach(p => users += p + ";");
PlatformService.ShareFolderWith(user, createdFodler, users, string.Empty, string.Empty);
}
return createdFodler;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/ModuleViewBase.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Windows;
using System.Windows.Forms;
using System.Windows.Interop;
using System.Windows.Threading;
using Workshare.Components.Common;
using Workshare.Components.Helpers;
using Workshare.Components.Interfaces;
using Workshare.Components.Presenter;
using Workshare.Components.Views.Authentication;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.Progress;
using Workshare.Components.Views.SelectFolder;
using Workshare.Components.Views.SyncDialog;
using Workshare.Components.WSLogger;
using Workshare.Integration;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Services;
using WorksharePlatform;
using CheckOut_Dialog = Workshare.Components.Views.CheckOutDialog;
namespace Workshare.Components.Views
{
public abstract class ModuleViewBase : IModuleView
{
const int THREAD_SLEEP_TIME = 2;
public const int IE8MAJORVERSION = 8;
protected Dispatcher dispatcher;
public ModuleViewBase()
{
Debug.Assert(Thread.CurrentThread.GetApartmentState()==ApartmentState.STA);
dispatcher = Dispatcher.CurrentDispatcher;
}
public ModuleViewBase(IWSIntegration integration):this()
{
m_presenter = CreatePresenter(integration);
}
public abstract ModulePresenterBase CreatePresenter(IWSIntegration integration);
[DllImport("user32.dll")]
[return: MarshalAs(UnmanagedType.Bool)]
static extern bool IsWindow(IntPtr hWnd);
protected IntPtr _MainWindowHandle;
public virtual IntPtr ActivieWindowHandle
{
get
{
var res = IntPtr.Zero;
if (!IsWindow(_MainWindowHandle))
{
_MainWindowHandle = Process.GetCurrentProcess().MainWindowHandle;
}
res = _MainWindowHandle;
if (this.TopWindow != IntPtr.Zero && IsWindow(this.TopWindow))
{
res = TopWindow;
}
else
{
TopWindow = IntPtr.Zero;
}
return res;
}
set
{
_MainWindowHandle = value;
}
}
public IntPtr MainWindowHandle
{
get
{
var res = IntPtr.Zero;
if (!IsWindow(_MainWindowHandle))
{
_MainWindowHandle = Process.GetCurrentProcess().MainWindowHandle;
}
res = _MainWindowHandle;
return res;
}
}
#region Errors processiog
public virtual void ShowErrors(IEnumerable<ItemException> errors)
{
SendWMClose("#32768");
IEnumerable<ItemException> errs = errors;
ItemException offlineException = errs.Where(e => e.Error is OfflineException).FirstOrDefault();
if (offlineException != null)
{
errs = errs.Where(e => !(e.Error is OfflineException));
errs = errs.Union( new List<ItemException> {offlineException});
}
var act = new Action(() =>
{
var dlg = new ErrorsWindow.ErrorsWindow(errs);
SetMainAsParent(dlg);
dlg.DisableOwner = true;
dlg.ShowDialog();
});
ExecuteInAppropriateThread(act);
}
public virtual void ShowError(Exception ex)
{
Logger.WriteError(ex);
if (ex is MultiItemsException)
{
SendWMClose("#32768");
var multi = (MultiItemsException)ex;
var act = new Action(() =>
{
var dlg = new ErrorsWindow.ErrorsWindow(multi.errorList.AsQueryble());
SetMainAsParent(dlg);
dlg.DisableOwner = true;
dlg.ShowDialog();
});
ExecuteInAppropriateThread(act);
}
else
{
if (ex is BaseException)
{
ShowError((BaseException) ex);
}
else
{
ShowError(ex.ToString());
}
}
}
public virtual void ShowError(BaseException ex)
{
Logger.WriteError(ex);
var exceptions = new List<ItemException>()
{
new ItemException() { Error = ex}
};
ShowErrors(exceptions);
}
public void ShowError(string message)
{
SendWMClose("#32768");
ExecuteInAppropriateThread(() =>
{
#if DEBUG
System.Windows.Forms.MessageBox.Show(message, "FIX ME PLEASE!!!!.");
#else
System.Windows.Forms.MessageBox.Show(string.Format("Unexpected error occured:\n{0}\n Please try again and if error is persisted contact support.", message), "Unexpected error");
#endif
});
}
#endregion
protected ProgressWindow progressWindow;
public virtual void ShowProgressWindow(CommandInvoker invoker)
{
Logger.WriteTrace(string.Format("ShowProgressWindow"));
progressWindow = new ProgressWindow(invoker);
new WindowInteropHelper(progressWindow).Owner = Process.GetCurrentProcess().MainWindowHandle;
progressWindow.Show();
}
public IModulePresenter m_presenter
{
get;
set;
}
public virtual void OnSendItemsClicked(SendItemsClickedArgs args)
{
m_presenter.OnSendItemsClicked(args);
}
public virtual void OnSynchItemsClicked(SyncItemsClickedArgs args)
{
m_presenter.OnSyncItemsClicked(args);
}
public virtual void OnCollaborationItemsClicked(CollaborationItemsClickedArgs args)
{
m_presenter.OnCollaborationItemsClicked(args);
}
public virtual bool ShowLogin(out UserDetails user, DialogSettings settings)
{
Logger.Write("ModuleView::ShowLogin() START", Severity.Information);
WinInetHelper.ResetSession();
using (var loginWindow2 = new AuthentificationForm())
{
WinFormHelper helper = new WinFormHelper(ActivieWindowHandle);
loginWindow2.AutoSize = true;
loginWindow2.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowOnly;
loginWindow2.ClientSize = new System.Drawing.Size((settings.dialog_login_width > 0) ? settings.dialog_login_width : loginWindow2.Width, (settings.dialog_login_height > 0) ? settings.dialog_login_height : Math.Min(loginWindow2.Height, 700));
loginWindow2.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
loginWindow2.MaximizeBox = loginWindow2.MinimizeBox = false;
var res=loginWindow2.ShowDialog(helper);
user = loginWindow2.User;
if (res == System.Windows.Forms.DialogResult.Abort && loginWindow2.Error != null)
{
WSApplication.Instance.View.ShowError((BaseException)loginWindow2.Error);
}
return res == System.Windows.Forms.DialogResult.OK;
}
}
public virtual int ShowSelectFolder(DialogSettings settings)
{
Logger.Write("ModuleView::ShowSelectFolder() START", Severity.Information);
using (var folder = new SelectFolderForm())
{
folder.Width = (settings.dialog_select_folder_width > 0) ? settings.dialog_select_folder_width : folder.Width;
folder.Height = (settings.dialog_select_folder_height > 0) ? settings.dialog_select_folder_height : folder.Height;
folder.ShowDialog(GetMainWindow());
return folder.SelectedFolderId;
}
}
public virtual void ShowFileSyncDialog(object file, EventOperation operation, string command)
{ }
private bool IsFolderHasFiles(IDMSFolder fld)
{
if (fld.Files.Count() > 0)
{
return true;
}
foreach (var f in fld.SubFolders)
{
if (IsFolderHasFiles(f))
{
return true;
}
}
return false;
}
public virtual CheckOutOptions ShowCheckOutDialogIfNeed(SendItemsClickedArgs args)
{
string filename = "";
var items = args.Items.ToList();
if (items.Count() == 1)
{
if (((items[0] is IDMSFolder) && (IsFolderHasFiles(items[0] as IDMSFolder))) || ((items[0] is IDMSFile) && !(items[0] as IDMSFile).CheckedOut))
{
filename = items[0].Name;
}
else
{
return CheckOutOptions.CheckOut;
}
}
else
{
if (!(items.Any(p => p is IDMSFolder) || items.Any(p => p is IDMSFile && !(p as IDMSFile).CheckedOut)))
{
return CheckOutOptions.CheckOut;
}
}
var checkOutDialog = new CheckOut_Dialog.CheckOutDialog();
checkOutDialog.FileName = filename;
SetMainAsParent(checkOutDialog);
checkOutDialog.DisableOwner = true;
if (checkOutDialog.ShowDialog() == true)
{
return checkOutDialog.Result;
}
else
{
return CheckOutOptions.None;
}
}
#region protected
protected void SetMainAsParent(Window window, bool currentActiveWindow = true)
{
var helper = new WindowInteropHelper(window);
helper.Owner = currentActiveWindow ? ActivieWindowHandle:MainWindowHandle;
}
protected System.Windows.Forms.IWin32Window GetMainWindow()
{
return new WinFormHelper(ActivieWindowHandle);
}
protected void ExecuteInAppropriateThread(Action action)
{
ExecuteInAppropriateThread(() =>
{
Logger.Write("Execute in appropriate thread", Severity.Information);
action();
return "";
});
}
protected T ExecuteInAppropriateThread<T>(Func<T> func)
{
if (dispatcher.CheckAccess())
{
return func();
}
return (T)dispatcher.Invoke(func);
}
#endregion
public IntPtr TopWindow
{
get;set;
}
public void SuppressProgressDialog(bool p)
{
if(this.progressWindow!=null)
{
this.progressWindow.Suppress(p);
}
}
[DllImport("user32.dll", SetLastError = true)]
public static extern IntPtr FindWindow(string lpClassName, string lpWindowName);
[DllImport("user32.dll", CharSet = CharSet.Auto)]
static extern IntPtr SendMessage(IntPtr hWnd, int Msg, IntPtr wParam, IntPtr lParam);
private void SendWMClose(string lpClassName, string lpWindowName = null)
{
ExecuteInAppropriateThread(() =>
{
var window = FindWindow(lpClassName, lpWindowName);
var WM_CLOSE = 0x10;
while (window != IntPtr.Zero)
{
SendMessage(window, WM_CLOSE, IntPtr.Zero, IntPtr.Zero);
Thread.Sleep(50);
window = FindWindow(lpClassName, lpWindowName);
}
});
}
public List<ImportDocumentAction> SelectActions(List<ImportFileActivity> importActivities)
{
SendWMClose("#32768");
return ExecuteInAppropriateThread(() =>
{
if (importActivities.Any())
{
var deletedFiles = importActivities.Where(a => a.Changes.Any(c => c.Type == ChangeType.RemoteDeleted)).ToList();
var activitiesToProcess = importActivities.Where(a => deletedFiles.Where(d => d.Equals(a)).Count() == 0).ToList();
if (deletedFiles.Any())
{
foreach (var deletedFile in deletedFiles)
{
if (deletedFile.State != ImportFileActivityState.LinkIsDeleted)
{
var syncInfo = WSApplication.Instance.Module.Resolve<SyncInfoService>();
try
{
deletedFile.DmsFile.AddHistory("FileDeletedOrMoved", "Synced from Workshare on " + DateTime.Now.ToString(Activity.TimeFormat) + ". File has been deleted or moved", Operations.Sync);
syncInfo.BreakLink(deletedFile.DmsFile);
}
catch (FileCheckoutedToAnotherUser ex)
{
WSApplication.Instance.View.ShowErrors(new ItemException[] { new ItemException() { Item = deletedFile.DmsFile, Error = ex } });
}
}
}
}
if (activitiesToProcess.Any())
{
var dlg = new SyncDialogWindow2(activitiesToProcess);
SetMainAsParent(dlg, false);
if (dlg.ShowDialog() == true)
{
return dlg.GetSelectedActions().ToList();
}
}
}
return new List<ImportDocumentAction>();
});
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Services/SyncInfoService.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Components.WSLogger;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.SyncInfo;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Services
{
/// TODO move all sync info related operations to this service, extract interface and move this class to IManage
/// Also this class should depends on FolderMap, FileMap instead on directly relyes on IDmsFolder,IDmsFile
public class SyncInfoService
{
private readonly DmsWorkerBase _dmsWorker;
private readonly ISyncInfoFactory _syncInfoFactory;
private readonly WebDataStorageService _wdsService;
private readonly string FolderSyncInfoKey = "IMFolderSyncInfo:";
private readonly string FolderSendDataKey = "IMFolderSendDataInfo:";
private readonly string FileSyncInfoKey = "IMFileSyncInfo:";
public SyncInfoService(DmsWorkerBase dmsWorker, ISyncInfoFactory syncInfoFactory, WebDataStorageService wdsService)
{
_dmsWorker = dmsWorker;
_syncInfoFactory = syncInfoFactory;
_wdsService = wdsService;
}
public virtual ISyncInfo GetSyncInfo(IDMSItem item, bool ForceRequest = true)
{
if (item is IDMSFile)
{
return GetSyncInfo((IDMSFile)item, ForceRequest);
}
else if (item is IDMSFolder)
{
return GetSyncInfo((IDMSFolder)item, ForceRequest);
}
else
{
return null;
}
}
public virtual ISyncInfo GetSyncInfo(IDMSFolder folder, bool ForceRequest = true)
{
if (folder == null) return null;
try
{
string key = FolderSyncInfoKey + folder.DMSItemKey;
var _data = _wdsService.GetAccountData(key, DateTime.MinValue, ForceRequest);
var currentprops = FolderSyncInfo.Parse(_data);
if (currentprops != null && currentprops.DMSItemId == folder.DMSId.ToString())
{
return currentprops;
}
}
catch (Exception ex)
{
Logger.WriteError("Error when get sync info for folder" + folder.ID.ToString(), ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotGetDataFromWebDataStorageExeption(ex.Message);
}
return null;
}
public virtual ISyncInfo GetSyncInfo(IDMSFile file, bool ForceRequest = true)
{
if (file == null) return null;
try
{
string key = FileSyncInfoKey + file.DMSItemKey;
var _data = _wdsService.GetAccountData(key,file.Modified, ForceRequest);
return FileSyncInfo.Parse(_data);
}
catch (Exception ex)
{
Logger.WriteError("Error on get sync file info", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotGetDataFromWebDataStorageExeption(ex.Message);
}
}
public virtual IEnumerable<int> GetSendData(IDMSFolder iDMSFolder, bool ForceRequest = true)
{
try
{
string key = FolderSendDataKey + iDMSFolder.DMSItemKey;
var _data = _wdsService.GetAccountData(key, DateTime.MinValue, ForceRequest);
var sdi = SendDataInfo.Parse(_data);
return sdi.Items;
}
catch (Exception ex)
{
Logger.WriteError("Error during get send data info", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotGetDataFromWebDataStorageExeption(ex.Message);
}
}
public IVersionSyncInfo GetSyncInfo(IDmsVersion dmsVersion, bool ForceRequest = true)
{
if (dmsVersion == null) return null;
var fileInfo = (FileSyncInfo)GetSyncInfo(dmsVersion.AsFile(), ForceRequest);
if (fileInfo != null)
{
return fileInfo.GetVersionInfo(dmsVersion.Id);
}
return null;
}
public virtual void SaveSyncInfo(IDMSFile file, ISyncInfo info)
{
try
{
string key = FileSyncInfoKey + file.DMSItemKey;
if (info == null)
{
_wdsService.DeleteAccountData(key);
}
else
{
_wdsService.AddAccountData(key, info.ToString(), file.Modified);
}
}
catch (Exception ex)
{
Logger.WriteError("Error on Save sync file info", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotUpdateDataOnWebDataStorageExeption(ex.Message);
}
}
public virtual void SaveSyncInfo(IDMSFolder folder, ISyncInfo info)
{
try
{
string key = FolderSyncInfoKey + folder.DMSItemKey;
if (info == null)
{
_wdsService.DeleteAccountData(key);
}
else
{
_wdsService.AddAccountData(key, info.ToString(), DateTime.MinValue);
}
}
catch (Exception ex)
{
Logger.WriteError("Error on Save sync file info", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotUpdateDataOnWebDataStorageExeption(ex.Message);
}
}
public virtual void SaveSendDataInfo(IDMSFolder folder, IEnumerable<int> itemsToStore)
{
try
{
var sdi = _syncInfoFactory.CreateSendDataInfo();
sdi.Items = itemsToStore.ToList();
sdi.FolderID = folder.DMSId.ToString();
string _data = sdi.ToString();
string key = FolderSendDataKey + folder.DMSItemKey.ToString();
_wdsService.AddAccountData(key, _data, DateTime.MinValue);
}
catch (Exception ex)
{
Logger.WriteError("Error during Save send data info", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
throw new CannotUpdateDataOnWebDataStorageExeption(ex.Message);
}
}
public virtual void UpdateSendDataInfo(IDMSFolder iDMSFolder, List<int> newIds, bool ForceRequest = true)
{
var oldIds = GetSendData(iDMSFolder, ForceRequest);
SaveSendDataInfo(iDMSFolder, oldIds.Union(newIds));
}
/// <summary>
/// Should be called when some version was imported or uploaded. Should update only file level information as it knows nothing about which versions actually were processed
/// Maybe this method should be removed and only for version and folder will be left
/// </summary>
/// <param name="file"></param>
/// <param name="context"></param>
public virtual void UpdateSyncInfo(IDMSFile file, ActionContext context)
{
context = context ?? ActionContext.Default;
switch (context.Type)
{
case ActionType.Import:
{
var info = GetSyncInfoOrCreate(file);
info.DMSItemId = file.DMSId.ToString();
var wsFile = context.WsFile;
if (wsFile != null)
{
info.ItemId = wsFile.Id;
info.ParentId = wsFile.FolderId;
info.Modified = file.Modified.Ticks.ToString();
var lastImportedVersion = wsFile.Versions.FirstOrDefault(x => x.Id == info.LastImportedWsVerId);
if (lastImportedVersion == null || lastImportedVersion.Version < wsFile.CurrentVersion.Version)
{
info.LastImportedWsVerId = wsFile.CurrentVersion.Id;
}
}
info.ActivityId = context.LastActivity ?? info.ActivityId;
SaveSyncInfo(file, info);
break;
}
case ActionType.Upload:
{
var info = GetSyncInfoOrCreate(file);
info.DMSItemId = file.DMSId.ToString();
var wsFile = context.WsFile;
if (wsFile != null)
{
info.ItemId = wsFile.Id;
info.ParentId = wsFile.FolderId;
info.Modified = file.Modified.Ticks.ToString();
}
info.ActivityId = context.LastActivity ?? info.ActivityId;
SaveSyncInfo(file, info);
break;
}
}
}
public ISyncInfo GetSyncInfoOrCreate(IDMSFile file, bool ForceRequest = true)
{
return GetSyncInfo(file, ForceRequest) ?? _syncInfoFactory.CreateFileInfo();
}
public ISyncInfo GetSyncInfoOrCreate(IDMSFolder folder, bool ForceRequest = true)
{
return GetSyncInfo(folder, ForceRequest) ?? _syncInfoFactory.CreateFolderInfo();
}
IVersionSyncInfo GetVersionInfoOrCreate(ISyncInfo info, IDmsVersion version, bool ForceRequest = true)
{
return info.GetvInfos().FirstOrDefault(a => a.DmsVerId == version.Id.ToString()) ?? _syncInfoFactory.CreateVersionInfo();
}
/// <summary>
/// Should be called when version processsed. Can update File level and Version Level information as it knows which version as processed.
/// </summary>
/// <param name="version"></param>
/// <param name="context"></param>
public virtual void UpdateSyncInfo(IDmsVersion version, ActionContext context)//NEW
{
context = context ?? ActionContext.Default;
var file = version.GetLatestFile();
switch (context.Type)
{
case ActionType.Import:
{
var info = GetSyncInfoOrCreate(file);
info.DMSItemId = file.DMSId.ToString();
var versionInfo=GetVersionInfoOrCreate(info, version);
versionInfo.DmsVerId = version.ID.ToString();
versionInfo.DmsEditTime = version.EditTime.Ticks.ToString();
var wsVersoin = context.WsVersion;
if (wsVersoin != null)
{
var wsVersionId = wsVersoin.Id.ToString();
info.LastImportedWsVerId = wsVersoin.Id;
versionInfo.CurWsVerId = wsVersionId;
AddIfNotExists(versionInfo.WsVerIds,wsVersionId);
}
info.AddVInfo(versionInfo);
SaveSyncInfo(file, info);
break;
}
case ActionType.Upload:
{
var info = GetSyncInfoOrCreate(file);
info.DMSItemId = file.DMSId.ToString();
info.LastUploadedDmsVerNum = (info.LastUploadedDmsVerNum < version.Number)
? version.Number
: info.LastUploadedDmsVerNum;
var versionInfo = GetVersionInfoOrCreate(info, version);
var wsVersoin = context.WsVersion;
versionInfo.DmsVerId = version.ID.ToString();
versionInfo.DmsEditTime = version.EditTime.Ticks.ToString();
if (wsVersoin != null)
{
var wsVersionId = wsVersoin.Id.ToString();
versionInfo.CurWsVerId = wsVersionId;
AddIfNotExists(versionInfo.WsVerIds, wsVersionId);
}
info.AddVInfo(versionInfo);
SaveSyncInfo(file, info);
break;
}
}
}
void AddIfNotExists<T>(ICollection<T> list, T item)
{
if (!list.Contains(item))
{
list.Add(item);
}
}
public virtual void UpdateSyncInfo(IDMSFolder folder, ActionContext context)
{
context = context ?? ActionContext.Default;
switch (context.Type)
{
case ActionType.Import:
case ActionType.Upload:
{
var wsFolder = context.WsFolder;
if (wsFolder != null)
{
var info = GetSyncInfoOrCreate(folder);
info.DMSItemId = folder.DMSId.ToString();
info.ItemId = wsFolder.Id;
info.ParentId = wsFolder.ParentId;
info.ActivityId = context.LastActivity ?? info.ActivityId;
SaveSyncInfo(folder, info);
}
break;
}
}
}
public void BreakLink(IDMSFolder iDMSFolder)
{
if (iDMSFolder == null) return;
SaveSyncInfo(iDMSFolder, null);
SaveSendDataInfo(iDMSFolder, new int[0]);
}
public void BreakLink(IDMSFile iDMSFile)
{
if (iDMSFile == null) return;
try
{
var syncinfo = GetSyncInfo(iDMSFile);
var senddata = GetSendData(iDMSFile.ParentFolder);
SaveSendDataInfo(iDMSFile.ParentFolder, senddata.Where(f => f != syncinfo.ItemId));
}
catch (Exception ex)
{
Logger.WriteError("Error in BreakLink() for file", ex);
}
SaveSyncInfo(iDMSFile, null);
if (!iDMSFile.WasUpdatedAfterSend2())
iDMSFile.DiscardCheckout(true);
}
public bool IsLinked(IDMSFolder iDMSFolder)
{
if (iDMSFolder == null) return false;
return GetSyncInfo(iDMSFolder) != null;
}
public bool IsLinked(IDMSFile iDMSFile, bool ForceRequest = true)
{
if (iDMSFile == null) return false;
return GetSyncInfo(iDMSFile, ForceRequest) != null;
}
internal void CanUpdateSyncInfoOrThrow(IDMSFile iDMSFile, Operations operation)
{
if (iDMSFile == null) return;
_dmsWorker.CanUpdateFileOrThrow(iDMSFile, operation);
}
public void CanUpdateSendDataOrThrow(FolderMap folderMap)
{
if (folderMap == null) return;
_dmsWorker.CanUpdateFolderOrThrow(folderMap.LocalFolder);
}
public void CanUpdateSyncInfoOrThrow(VersionMap versionMap, Operations operation)
{
if(versionMap==null)return;
_dmsWorker.CanUpdateVersionOrThrow(versionMap.LocalVersion, operation);
}
}
public class ActionContext
{
public ActionContext(ActionType type)
{
this.Type = type;
}
public ActionType Type { private set; get; }
public WsFile WsFile { set; get; }
public WsVersion WsVersion { set; get; }
public string LastActivity { set; get; }
public WsFolder WsFolder { get; set; }
public static readonly ActionContext Default =new ActionContext(ActionType.Unknown);
}
public enum ActionType
{
Unknown,
Import,
Upload
}
}
<file_sep>/WSComponents/src/WSCloudService/ChunkInfo.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
namespace WorksharePlatform
{
public class ChunkInfo
{
public MemoryStream Stream { get; set; }
public int PartNumber { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/ConflictResolver/ConflictResolverWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows.Data;
using System.Windows.Input;
using System.Windows.Media;
using Workshare.Components.Helpers;
using Workshare.Components.Views.Common;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using RES = Workshare.Components.Properties.Resources;
namespace Workshare.Components.Views.ConflictResolver
{
/// <summary>
/// Interaction logic for ConflictResolverWindow.xaml
/// </summary>
public partial class ConflictResolverWindow : OwnWindow
{
public ConflictOptions Result
{
get
{
return ((ConflictResolverViewModel)DataContext).Result;
}
}
public ConflictResolverWindow()
{
InitializeComponent();
}
private void DockPanel_MouseDown_1(object sender, MouseButtonEventArgs e)
{
base.OnTitleMouseDown(sender, e);
}
public ConflictResolverWindow(ConflictItemData wsFile, ConflictItemData localFile, string DMSName)
: this()
{
this.DataContext = new ConflictResolverViewModel(wsFile, localFile, DMSName);
}
}
class NameToIconConverter : IValueConverter
{
private static readonly string[] WordExt = { ".docx", ".docm", ".dotx", ".dotm", ".doc", ".dot", ".rtf" };
private static readonly string[] ExcelExt = {
".xl", ".xlsx", ".xlsm", ".xlsb", ".xlam", ".xltx", ".xltm", ".xls",
".xlt"
};
private static readonly string[] PptExt = {
".pptx", ".ppt", ".pptm", ".ppsx", ".pps", ".ppsm", ".potx", ".pot",
".potm", ".odp"
};
private static readonly string[] PDFExt = { ".pdf" };
private static readonly string[] AudioExt = { ".mp3", ".wav", ".flac", ".aac" };
private static readonly string[] VideoExt = { ".avi", ".mp4", ".mov", ".mkv" };
private static readonly string[] ImageExt = { ".bmp", ".jpg", ".jpeg", ".jpe", ".jfif", ".ico", ".png", ".tif", ".tiff", ".dib", ".raw", ".gif" };
private static readonly string[] TextExt = { ".txt" };
Dictionary<string, ImageSource> cache = new Dictionary<string, ImageSource>();
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
System.Drawing.Image image = null;
var str = value as string;
str = str ?? string.Empty;
string ext = string.IsNullOrEmpty(str) ? "" : FileUtils.GetExtension(str).ToLower();
if (!cache.ContainsKey(ext))
{
if (WordExt.Contains(ext))
{
image = RES.doc_medium;
}
else if (ExcelExt.Contains(ext))
{
image = RES.xls_medium;
}
else if (PptExt.Contains(ext))
{
image = RES.ppt_medium;
}
else if (PDFExt.Contains(ext))
{
image = RES.pdf_medium;
}
else if (ImageExt.Contains(ext))
{
image = RES.image_medium;
}
else if (AudioExt.Contains(ext))
{
image = RES.audio_medium;
}
else if (VideoExt.Contains(ext))
{
image = RES.video_medium;
}
else if (TextExt.Contains(ext))
{
image = RES.txt_medium;
}
else
{
image = RES.generic_medium;
}
cache[str] = Utils.Convert(image, System.Drawing.Imaging.ImageFormat.Png);
}
return cache[str];
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/WorkshareIntegration.cs
using Workshare.Integration;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Services;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.IManage.Integration
{
public class WorkshareIntegration : WSIntegrationBase
{
public WorkshareIntegration(IAuthProvider authProvider,SyncInfoService syncService)
: base(authProvider,syncService)
{}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Registering.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Windows.Forms;
using Microsoft.Win32;
namespace Workshare.IManage
{
[ComVisible(false)]
public class Registering
{
enum App { DeskSite, FIleSite , ImanExt, IIntegrationDlg}
static char DELIMITER = ',';
static readonly string[] OPEN_COMMANDS = new[] { "iManExt3.NewOpenCmd", "iManExt.OpenCmd", "@33979", "iManExt.ViewCmd", "IManExt2.NewImportCmd","iManExt.ImportNewVersionCmd" };
static readonly string COMMANDS_VALUE_NAME="Commands";
static readonly string CUSTOM_FIELD_PATH = @"SOFTWARE\Workshare\Autonomy\CustomFieldToUse";
static readonly string CUSTOM_ICON_FIELD_PATH = @"SOFTWARE\Workshare\Autonomy\CustomIconFieldToUse";
static readonly string SENDDOC_TO_WS_PROGID = typeof(DocumentSendToWSCommand).FullName;
static readonly string SYNCDOC_FROM_WS_PROGID = typeof(DocumentSyncItemsCommand).FullName;
static readonly string SENDFLD_TO_WS_PROGID = typeof(FolderSendToWSCommand).FullName;
static readonly string SYNCFLD_FROM_WS_PROGID = typeof(FolderSyncItemsCommand).FullName;
static readonly string TRUESYNCFLD_FROM_WS_PROGID = typeof(FolderTrueSyncItemsCommand).FullName;
static readonly string COLLDOC_FROM_WS_PROGID = typeof(DocumentCeaseCollaborationCommand).FullName;
static readonly string COLLFLD_FROM_WS_PROGID = typeof(FolderCeaseCollaborationCommand).FullName;
static readonly string[] DOC_COMMANDS = new[] { SYNCDOC_FROM_WS_PROGID, SENDDOC_TO_WS_PROGID };
static readonly string[] FLD_COMMANDS = new[] { COLLFLD_FROM_WS_PROGID, SYNCFLD_FROM_WS_PROGID, SENDFLD_TO_WS_PROGID };
static readonly string[] TOOLBAR_COMMANDS = new[] { TRUESYNCFLD_FROM_WS_PROGID };
static string[] GetMenuSubKeys(App app)
{
switch (app)
{
case App.DeskSite: return new[]
{
@"Commands\Menus\Document",
@"Commands\Popup Menus\Folder",
@"Commands\Menus\File"
};
case App.ImanExt: return new[]
{
@"Browse Dialog\Popup Menus\Folder",
@"Browse Dialog\Popup Menus\Document",
@"Express Search\Popup Menus\Document",
@"Enhanced Integration Dialog\Popup Menus\Document",
@"Enhanced Integration Dialog\Popup Menus\Folder"
};
case App.FIleSite: return new[]
{
@"Commands\Document",
@"Commands\Folder",
@"Commands\IManageMenu\"
};
case App.IIntegrationDlg: return new[]
{
@"Commands\PopupMenu",
@"Commands\TreePopupMenu\Folder"
};
}
return new string[0];
}
static string[] GetAppKey(App app)
{
switch (app)
{
case App.ImanExt: return new[] {
@"SOFTWARE\Interwoven\Worksite\Client\iManExt",
@"SOFTWARE\Interwoven\Worksite\8.0\iManExt",
};
case App.DeskSite: return new[] {
@"SOFTWARE\Interwoven\Worksite\Client\DeskSite",
@"SOFTWARE\Interwoven\Worksite\8.0\DeskSite",
};
case App.FIleSite: return new[] {
@"SOFTWARE\Interwoven\Worksite\Client\FileSite",
@"SOFTWARE\Interwoven\Worksite\8.0\FileSite",
};
case App.IIntegrationDlg: return new[] {
@"SOFTWARE\Interwoven\Worksite\Client\iIntegrationDlg",
@"SOFTWARE\Interwoven\Worksite\8.0\iIntegrationDlg",
};
}
return new string[0];
}
struct RegValue
{
public string Key;
public string Value;
}
static RegValue[] GetRegKey()
{
return new RegValue[]
{
new RegValue() { Key = "IManExt3.FreezeDocumentCmd", Value = "{8848D5D3-F377-4D2D-A003-5B054DC0017D}"},
new RegValue() { Key = "IManExt.ImportNewVersionCmd", Value = "{C6158533-AD43-449A-A480-C72174D03A5B}" },
new RegValue() { Key = "IManExt2.NewImportCmd", Value = "{315C27E3-EEE9-46AE-B37B-37B0B24B19B2}" },
new RegValue() { Key = "IManExt2.IManMoveContentsCmd", Value = "{44B80FA0-1E93-45D3-8A56-C2E03D83DA8E}" },
new RegValue() { Key = "IManExt2.IManMoveFolderCmd", Value = "{F311F3A1-E00E-48F3-AA23-A4580A97B8B3}" }
};
}
public static int? RegisterReg()
{
RegValue[] keys = GetRegKey();
foreach (RegValue regValue in keys)
{
ReplaceRegistry(regValue.Key, regValue.Value);
}
return null;
}
static int? ReplaceRegistry(string key, string newValue)
{
try
{
RegistryKey regCR = Registry.ClassesRoot;
RegistryKey regKey = regCR.OpenSubKey(key, true);
RegistryKey regCLSID = regKey.OpenSubKey("CLSID", true);
Registry.SetValue(regCLSID.Name, "", newValue);
}
catch (Exception e)
{
}
return null;
}
public static int? UnRegisterReg()
{
RegValue[] keys = GetRegKey();
foreach (RegValue regValue in keys)
{
string clsid = FindCLSIDByProgID(regValue.Key);
if (clsid != null)
{
ReplaceRegistry(regValue.Key, clsid);
}
}
return null;
}
static string FindCLSIDByProgID(string progID)
{
RegistryKey regCR = Registry.ClassesRoot;
RegistryKey regIDs = regCR.OpenSubKey("CLSID");
String[] names = regIDs.GetSubKeyNames();
foreach (String name in names)
{
try
{
RegistryKey regCLSID = regIDs.OpenSubKey(name + "\\ProgID");
string value = (string)regCLSID.GetValue("");
string regexstr = String.Format(@"^{0}(\.\d)?$", progID);
Regex regex = new Regex(regexstr, RegexOptions.IgnoreCase);
if (regex.Matches(value).Count == 1)
{
return name;
}
}
catch (Exception ex)
{
}
}
return null;
}
public static int? GetCustomFieldValue()
{
var customFieldKey = Registry.LocalMachine.OpenSubKey(CUSTOM_FIELD_PATH);
if (customFieldKey != null)
{
int res;
if (int.TryParse(customFieldKey.GetValue("index").ToString(), out res))
return res;
}
return null;
}
public static int? GetCustomIconFieldValue()
{
var customFieldKey = Registry.LocalMachine.OpenSubKey(CUSTOM_ICON_FIELD_PATH);
if (customFieldKey != null)
{
int res;
if (int.TryParse(customFieldKey.GetValue("index").ToString(), out res))
return res;
}
return null;
}
public static void SetCustomFieldValue(int value)
{
var cfKey = Registry.LocalMachine.CreateSubKey(CUSTOM_FIELD_PATH);
cfKey.SetValue("index", value);
}
public static void SetCustomIconFieldValue(int value)
{
var cfKey = Registry.LocalMachine.CreateSubKey(CUSTOM_ICON_FIELD_PATH);
cfKey.SetValue("index", value);
}
public static void RemoveCustomFieldValues()
{
if (Registry.LocalMachine.OpenSubKey(CUSTOM_FIELD_PATH) != null)
Registry.LocalMachine.DeleteSubKey(CUSTOM_FIELD_PATH);
}
public static void RemoveCustomIconFieldValues()
{
if (Registry.LocalMachine.OpenSubKey(CUSTOM_ICON_FIELD_PATH) != null)
Registry.LocalMachine.DeleteSubKey(CUSTOM_ICON_FIELD_PATH);
}
static App[] SupportedApps = new[] { App.DeskSite ,App.FIleSite,App.IIntegrationDlg,App.ImanExt};
public const string WorkshareRegKey = "Workshare";
static void AppendCommand(App app, string command,RegistryKey menuKey)
{
if (NeedRegister(app, command, menuKey))
{
var value = menuKey.GetValue(COMMANDS_VALUE_NAME) as string;
if (!value.Contains(command))
{
//bool hasOpenCommandAsFirst = false;
var cmdArr = value.Split(new char[] { DELIMITER });
var i = 0;
while (i<cmdArr.Length && OPEN_COMMANDS.Contains(cmdArr[i])) i++;
if (i == 0)
{
value = command + DELIMITER + value;
}
else
{
value = value.Replace(cmdArr[i - 1], cmdArr[i-1] + DELIMITER + command);
}
}
menuKey.SetValue(COMMANDS_VALUE_NAME, value);
}
}
public static void SetToolbarOrderByKey(RegistryKey _key)
{
App app = App.DeskSite;
foreach (var appKey in GetAppKey(app))
{
using (var key = _key.OpenSubKey(appKey + @"\toolbar\toolbar-Bar4", true))
{
if (key != null)
{
var v = key.GetValue("Buttons");
if (v != null)
{
var arr = v as byte[];
if (arr != null && arr.Length > 0)
{
int ar1 = -1, ar2 = -1, ts = -1;
int i = 0;
while (i + 1 < arr.Length)
{
if (arr[i] == 241 && arr[i + 1] == 128) ar1 = i;
if (arr[i] == 242 && arr[i + 1] == 128) ar2 = i;
if (arr[i] == 234 && arr[i + 1] == 128) ts = i;
i += 5;
}
if (ts < 0)
{
if (ar1 > 0 && ar2 > 0)
{
int max = ar1 < ar2 ? ar2 : ar1;
if ((max + 6) < arr.Length)
{
arr[max + 5] = 234;
arr[max + 6] = 128;
}
else
{
arr.Concat(new byte[] { 234, 128, 0, 0, 0 });
}
}
else
{
if (ar1 < 0) arr.Concat(new byte[] { 241, 128, 0, 0, 0 });
if (ar2 < 0) arr.Concat(new byte[] { 242, 128, 0, 0, 0 });
arr.Concat(new byte[] { 234, 128, 0, 0, 0 });
}
}
key.SetValue("Buttons", arr);
}
}
}
}
}
}
public static void SetToolbarOrder()
{
SetToolbarOrderByKey(Registry.CurrentUser);
}
public static void RegisterMenus()
{
RegisterReg();
//TODO refactor
//System.Diagnostics.Debugger.Launch();
foreach (var app in SupportedApps)
{
foreach (var appKey in GetAppKey(app))
{
var appRegKey = Registry.LocalMachine.OpenSubKey(appKey, true);
if (appRegKey != null)
{
foreach (var menuRelativePath in GetMenuSubKeys(app))
{
if (menuRelativePath.ToLower().Contains("IManageMenu".ToLower()))
{
using (var wsSubKey = appRegKey.CreateSubKey(menuRelativePath))
{
var commands=wsSubKey.GetValue(COMMANDS_VALUE_NAME, WorkshareRegKey) as string;
if (!commands.Contains(WorkshareRegKey))
{
commands = commands.TrimEnd(',') + "," + WorkshareRegKey;
}
wsSubKey.SetValue(COMMANDS_VALUE_NAME, commands);
}
using (var wsSubKey = appRegKey.CreateSubKey(menuRelativePath+@"\"+WorkshareRegKey))
{
wsSubKey.SetValue("MenuText", WorkshareRegKey);
wsSubKey.SetValue(COMMANDS_VALUE_NAME, TRUESYNCFLD_FROM_WS_PROGID);
}
continue;
}
//setup true sync button
if (app == App.DeskSite && menuRelativePath.ToLower().Contains("file"))
{
using (var menuKey = appRegKey.OpenSubKey(menuRelativePath, true))
{
if (menuKey != null)
{
foreach (var command in TOOLBAR_COMMANDS)
{
AppendCommand(app, command, menuKey);
SetToolbarOrderByKey(Registry.LocalMachine);
}
}
}
continue;
}
using (var menuKey = appRegKey.OpenSubKey(menuRelativePath, true))
{
if (menuKey != null)
{
if (menuRelativePath.ToLower().Contains("folder"))
{
foreach (var command in FLD_COMMANDS)
{
AppendCommand(app, command, menuKey);
}
}
else
{
foreach (var command in DOC_COMMANDS)
{
AppendCommand(app, command, menuKey);
}
}
}
}
}
}
}
}
}
private static bool NeedRegister(App app, string command, RegistryKey menuKey)
{
return true;
}
public static void UnRegisterMenus()
{
UnRegisterReg();
foreach (var app in SupportedApps)
{
foreach (var appKey in GetAppKey(app))
{
var appRegKey = Registry.LocalMachine.OpenSubKey(appKey, true);
if (appRegKey != null)
{
foreach (var menuRelativePath in GetMenuSubKeys(app))
{
using (var commands = appRegKey.OpenSubKey(menuRelativePath, true))
{
if (commands != null)
{
var value = commands.GetValue(COMMANDS_VALUE_NAME) as string;
foreach (var command in FLD_COMMANDS.Concat(DOC_COMMANDS).Concat(TOOLBAR_COMMANDS))
{
if (value != null && value.Contains(command))
{
value = value.Replace(command + DELIMITER, "");
value = value.Replace(DELIMITER + command, "");
commands.SetValue(COMMANDS_VALUE_NAME, value);
}
}
}
}
}
}
}
}
}
public static void ForceToUseIe8Standarts()
{
var procName = Process.GetCurrentProcess().MainModule.ModuleName;
Registry.SetValue(@"HKEY_CURRENT_USER\SOFTWARE\Microsoft\Internet Explorer\MAIN\FeatureControl\FEATURE_BROWSER_EMULATION", procName, 8000);
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/OTFolder.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using Hummingbird.DM.Server.Interop.PCDClient;
using Workshare.Components.Exceptions;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.OpenText.Contrete
{
class ManFolder : IDMSFolder
{
IProject m_folder;
IProfile fld
{
get
{
return m_folder as IProfile;
}
}
public ManFolder(IProject folder)
{
m_folder = folder;
InitializeName();
}
private void InitializeName()
{
if (m_folder != null)
{
_name = m_folder.Name ?? null;
}
}
public bool IsDeleted
{
get
{
return false;
}
}
private bool DisplayImportDialog(WorksharePlatform.FileDetails file)
{
return false;
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, List<WorksharePlatform.Activity> activities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool update = true)
{
var a = true;
return AddFile(file, out a, activities,versionIdsToSkip, useProfileDialog, SyncVerOption,StartVersion,update);
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, out bool added, List<WorksharePlatform.Activity> activities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool update = true)
{
Logger.Write("ManFolder::AddFile() START ", Severity.Information);
added = false;
try
{
Logger.Write("ManFolder::AddFile() useProfileDialog=" + useProfileDialog.ToString(), Severity.Information);
if (useProfileDialog)
{
var resadd = added;
Workshare.OpenText.Presenter.ModulePresenter.InvokeInSTAThreadIfNeed(() =>
{
resadd = DisplayImportDialog(file);
});
added = resadd;
return null;
}
else
{
IProfile doc = Application.OTInstance.CurrentLibrary.CreateProfile("DEF_PROF");
string tmp = Path.GetTempFileName();
File.Copy(file.FilePath, tmp, true);
string FileType = Application.OTInstance.GetDocClassByFileExtension(Path.GetExtension(file.Name));
string AppType = Application.OTInstance.GetAppIDByFileExtension(Path.GetExtension(file.Name), Application.OTInstance.CurrentLibrary);
if (string.IsNullOrEmpty(AppType) && FileType == "IMG") // if noone else type for image file did not set - set the default image type
{
AppType = "DOCSIMAGE";
}
if (string.IsNullOrEmpty(AppType))
{
Logger.Write("ManFolder::AddFile() cannot detect AppType for file=" + useProfileDialog.ToString(), Severity.Information);
throw new FileNotCreatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE_WRONG_TYPE_ERROR_F, file.Name));
}
string FileName = Application.OTInstance.GetItemName(file.FriendlyName);
doc.Columns["TYPE_ID"].Value = FileType;
doc.Columns["DOCNAME"].Value = FileName;
doc.Columns["APP_ID"].Value = AppType;
doc.Columns["AUTHOR_ID"].Value = Application.OTInstance.CurrentUser;
doc.Columns["TYPIST_ID"].Value = Application.OTInstance.CurrentUser;
doc.Save(5);
if (doc.ErrCode != 0)
{
throw new FileNotCreatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE, FileName, doc.ErrText));
}
Logger.Write("Trying to upload a file into DMS: " + tmp, Severity.Information);
if (doc.Versions!= null && doc.Versions.Count > 0)
{
var version = doc.Versions[1];
File.Copy(tmp, version.FilePath);
var Results = version.UploadFile();
version.UnlockVersion((short)tagVersionStatus.VS_NORMAL);
Logger.Write("Trying to upload a file into DMS: Succeded " + tmp, Severity.Information);
}
doc.ReleaseDoc();
if (m_folder != null)
{
m_folder.AddDocument(doc);
}
var createdFile = new OTFile(doc, m_folder);
added = true;
Logger.Write("ManFolder::AddFile() COMPLETE ", Severity.Information);
return createdFile;
}
}
catch (FileNotCreatedException)
{
Logger.Write("ManFolder::AddFile() File Not Created Exception", Severity.Information);
throw;
}
catch (Exception ex)
{
Logger.Write("ManFolder::AddFile() Exception="+ex.Message, Severity.Information);
throw new FileNotCreatedException(ex.Message, ex);
}
}
public IDMSFolder AddSubFolder(WorksharePlatform.FolderDetails cloudFolder)
{
try
{
Logger.Write("ManFolder::AddSubFolder() START ", Severity.Information);
if (cloudFolder == null) throw new ArgumentNullException("cloudFolder");
bool isAdded = false;
var fld = m_folder.Library.CreateProject();
string FolderName = Application.OTInstance.GetItemName(cloudFolder.Name);
(fld as IProfile).Columns["TYPE_ID"].Value = "FOLDER";
(fld as IProfile).Columns["DOCNAME"].Value = FolderName;
(fld as IProfile).Columns["APP_ID"].Value = "FOLDER";
(fld as IProfile).Columns["AUTHOR_ID"].Value = m_folder.Library.UserName;
(fld as IProfile).Columns["TYPIST_ID"].Value = m_folder.Library.UserName;
(fld as IProfile).Save(5);
if (fld.ErrCode == 0)
{
isAdded = m_folder.AddSubProject(fld);
}
if (isAdded)
{
var addedFolder = m_folder.SubProjects[FolderName];
var createdFolder = new ManFolder(addedFolder);
createdFolder.UpdateSyncInfo(cloudFolder, true);
Logger.Write("ManFolder::AddSubFolder() COMPLETE ", Severity.Information);
return createdFolder;
}
Logger.Write("ManFolder::AddSubFolder() COMPLETE WITH NO SUCCESS ", Severity.Information);
return null;
}
catch (Exception ex)
{
Logger.Write("ManFolder::AddSubFolder() Exception= "+ex.Message, Severity.Information);
throw new FolderNotCreatedException(string.Format(RES.STR_CANNOT_ADD_NEW_FOLDER_ERROR_F, cloudFolder.Name, ex.Message));
}
}
public void UpdateDisplayName(string name)
{
if (fld != null && fld.Name != name)
{
_name = Application.OTInstance.GetItemName(name);
fld.Columns["DOCNAME"].Value = _name;
fld.Save(0);
}
}
public void UpdateSyncInfo(WorksharePlatform.FolderDetails folderDetails, bool silentUpdate)
{
//Logger.Write("ManFolder::UpdateSyncInfo() START ", Severity.Information);
//try
//{
// string fieldvalue = string.Empty;
// if (folderDetails != null)
// {
// var info = OTSyncInfo.Create(folderDetails, m_folder);
// if (info != null)
// {
// fieldvalue = info.ToString();
// }
// }
// Logger.Write("Sync info created ", Severity.Information);
// if (fld != null)
// {
// fld.Columns[Application.OTInstance.SENDINFO_FIELD].Value = fieldvalue;
// fld.Save(0);
// }
// else
// {
// PCDDocObject doc = new PCDDocObject();
// doc.SetDST(Application.OTInstance.UserDST);
// doc.SetObjectType("cyd_defprof");
// doc.SetProperty("%TARGET_LIBRARY", m_folder.Library.Name);
// doc.SetProperty("%OBJECT_IDENTIFIER", m_folder.ProjectID.ToString());
// doc.Fetch();
// Logger.Write("Target folder found ", Severity.Information);
// if (doc.ErrNumber == 0)
// {
// doc.SetProperty(Application.OTInstance.SENDINFO_FIELD, fieldvalue);
// if (folderDetails == null || string.IsNullOrEmpty(fieldvalue))
// {
// doc.SetProperty(Application.OTInstance.DATASTORE_FIELD, "");
// }
// doc.Update();
// Logger.Write("Sync info updated ", Severity.Information);
// }
// else
// {
// throw new CannotUpdateSyncInfoException(m_folder.ErrText);
// }
// }
// if (m_folder.ErrCode != 0)
// {
// throw new CannotUpdateSyncInfoException(m_folder.ErrText);
// }
//}
//catch (CannotUpdateSyncInfoException)
//{
// throw;
//}
//catch (Exception ex)
//{
// throw new CannotUpdateSyncInfoException(ex.Message, ex);
//}
//Logger.Write("ManFolder::UpdateSyncInfo() Complete ", Severity.Information);
}
public ISyncInfo GetSyncInfo()
{
if (fld != null)
{
var res = (OTSyncInfo)OTSyncInfo.Parse(fld.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString());
if (res != null && res.DMSItemId == m_folder.ProjectID)
return res;
}
return null;
}
public bool WasUpdatedAfterSend(ISyncInfo synhInfo)
{
//TODO
return false;
}
public IDMSItemID ID
{
get { return new ManItemID(m_folder); }
}
string _name = null;
public string Name
{
get
{
if (string.IsNullOrEmpty(_name))
{
InitializeName();
}
return _name;
}
set
{
}
}
public string Description
{
get
{
if (m_folder != null)
{
return m_folder.Caption;
}
else
{
return "";
}
}
set
{
}
}
public bool DoesUserHavePermissions(Workshare.Integration.Enums.Permissions permissions)
{
/*switch (permissions)
{
case Integration.Enums.Permissions.EditItem: return (m_folder.EffectiveAccess & imAccessRight.imRightReadWrite) == imAccessRight.imRightReadWrite;
}*/
return true;
}
public bool CheckedOutToUser
{
get { return true; }
}
public bool CheckedOut
{
get
{
return (fld == null)?false:(fld.Columns["STATUS"].Value.ToString() != "0");
}
}
public string DisplayName
{
get
{
if (m_folder != null)
{
return m_folder.Name;
}
else
{
return "";
}
}
set
{
}
}
public IEnumerable<IDMSFolder> SubFolders
{
get
{
Logger.Write("ManFolder::SubFolders GET ", Severity.Information);
var list = new List<IDMSFolder>();
if (m_folder != null)
{
foreach (IProject subFolder in m_folder.SubProjects)
{
list.Add(new ManFolder(subFolder));
}
}
else
{
foreach (IProject subFolder in Application.OTInstance.CurrentLibrary.Projects)
{
list.Add(new ManFolder(subFolder));
}
}
return list;
}
set
{
}
}
public IEnumerable<IDMSFile> Files
{
get
{
Logger.Write("ManFolder::Files GET ", Severity.Information);
var list = new List<IDMSFile>();
if (m_folder != null)
{
foreach (IProfile content in m_folder.SubDocuments)
{
// in subdocuments property there are documents and folders
if (content.Columns["APP_ID"].Value.ToString() != "FOLDER")
{
//content.Fetch();
list.Add(new OTFile(content, m_folder));
}
}
}
else
{
foreach (IProfile content in Application.OTInstance.CurrentLibrary.Projects)
{
if (content!=null && content.Columns["APP_ID"].Value.ToString() != "FOLDER")
{
//content.Fetch();
list.Add(new OTFile(content, m_folder));
}
}
}
return list;
}
set
{
}
}
public IDMSFolder ParentFolder
{
get
{
Logger.Write("ManFolder::ParentFolder GET ", Severity.Information);
if (fld != null && fld.ParentProjects.Count > 0)
{
return new ManFolder(fld.ParentProjects[1]);
}
return new ManFolder(null); // it's can be when folder located directly in root library
}
}
public void OnBeforeSending(OperationContext context)
{
// throw new NotImplementedException();
}
public void SaveSendDataInfo2(IEnumerable<int> allItems)
{
throw new NotImplementedException();
}
public void ClearSendDataInfo()
{
// not necessary function for that time
Logger.Write("ManFolder::ClearSendDataInfo() START ", Severity.Information);
if (fld != null)
{
if (fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value.ToString() != "")
{
fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value = "";
}
fld.Save(0);
fld.Fetch();
Logger.Write("ManFolder::ClearSendDataInfo() COMPLETE ", Severity.Information);
}
}
public void UpdateSendDataInfo(IEnumerable<int> itemsID)
{
Logger.Write("ManFolder::UpdateSendDataInfo() START ", Severity.Information);
List<int> data = new List<int>();
if (fld != null)
{
if (fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value.ToString() != "")
{
string[] items = fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value.ToString().Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
items.ToList().ForEach(x => { int a = 0; int.TryParse(x, out a); data.Add(a); });
fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value = "";
}
data.AddRange(itemsID);
string datastring = "";
data.ForEach(x => datastring += x.ToString() + ";");
fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value = datastring;
fld.Save(0);
Logger.Write("Item sent information updated ", Severity.Information);
fld.Fetch();
}
Logger.Write("ManFolder::UpdateSendDataInfo() COMPLETE ", Severity.Information);
}
public IEnumerable<int> GetSendDataInfo()
{
Logger.Write("ManFolder::GetSendDataInfo() START ", Severity.Information);
List<int> data = new List<int>();
if (fld != null)
{
fld.Fetch();
if (fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value.ToString() != "")
{
string[] items = fld.Columns[Application.OTInstance.DATASTORE_FIELD].Value.ToString().Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
items.ToList().ForEach(x => { int a = 0; int.TryParse(x, out a); data.Add(a); });
}
}
Logger.Write("ManFolder::GetSendDataInfo() COMPLETE ", Severity.Information);
return data;
}
public void OnSendError(object args, Exception e)
{
}
public void OnAfterAdd(OperationContext args)
{
}
public void OnAfterSending(OperationContext context)
{
}
public void OnBeforeSync(OperationContext args)
{
}
public void AddHistory(string eventName, string eventComment, Workshare.Integration.Operations operation)
{
if (m_folder !=null)
{
(m_folder as IProfile).AddDocumentActivity(eventComment);
m_folder.Save(0);
}
}
public void AddHistories(List<Activity> activities)
{
throw new NotImplementedException();
}
public void AddHistories(List<WorksharePlatform.Activity> activities, int version)
{
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
return false;
}
public void UpdateSendDataInfo2(IEnumerable<int> allItems)
{
}
public int DMSId
{
get { return -1; }
}
public IDMSFolder RootFolder()
{
return null;
}
public string DMSItemKey
{
get { return ""; }
}
}
}
<file_sep>/OpenText/src/RegisterMenus/Program.cs
using Workshare.OpenText;
namespace RegisterMenus
{
class Program
{
static void Main(string[] args)
{
Registering.RegisterMenus();
}
}
}
<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsMember.cs
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
public class WsMember
{
private Member _member;
private UserDetails _user;
public WsMember(UserDetails user, Member member)
{
_member = member;
_user = user;
}
public string Name
{
get { return _member.MemberName; }
}
public int Id
{
get { return _member.MemberId; }
}
public string Email
{
get
{
return _member.Email;
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Enums/ItemState.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Workshare.Components.Enums
{
public enum ItemState
{
Added = 1,
UpdatedAsNewVersion = 2,
Deleted = 3
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/ISyncInfo.cs
using System.Collections.Generic;
using System.Linq;
namespace Workshare.Integration.Interfaces
{
public interface IWebDataStorageData
{
string DataType { get; set; }
}
public interface ISendDataInfo : IWebDataStorageData
{
string FolderID { get; set; }
List<int> Items { get; set; }
}
public interface ISyncInfo : IWebDataStorageData
{
/// <summary>
/// parent id on Wprkshare
/// </summary>
int ParentId { get; set; }
/// <summary>
/// Last workshare version id that was imported
/// </summary>
int LastImportedWsVerId { get; set; }
/// <summary>
/// last dms version number that was uploaded to Workshare
/// </summary>
int LastUploadedDmsVerNum { get; set; }
/// <summary>
/// Id of item on workshare
/// </summary>
int ItemId { get; set; }
/// <summary>
/// id of item on DMS (should be independent on versions )
/// </summary>
string DMSItemId { get; set; }
/// <summary>
/// modified time of Dms item when it was last time synced
/// </summary>
string Modified { get; set; }
/// <summary>
/// Last activity id on Worksahre that was downloaded from ws
/// </summary>
string ActivityId { get; set; }
/// <summary>
/// returns info collection for versions. Most likely we should move this from here
/// </summary>
/// <returns></returns>
List<IVersionSyncInfo> GetvInfos();
void AddVInfo(IVersionSyncInfo info);
}
public static class Ext
{
public static bool IsLinked()
{
return false;
}
}
public interface IVersionSyncInfo
{
/// <summary>
/// Dms version Id - is used for as identifier of version info
/// </summary>
string DmsVerId { set; get; }
/// <summary>
/// Last ws version to which dms version was uploaded
/// </summary>
string CurWsVerId { set; get; }
/// <summary>
/// List of all ws version ids that were created from this dms version
/// </summary>
List<string> WsVerIds { set; get; }
/// <summary>
/// Dms edit time during last synchronization process of this version
/// </summary>
string DmsEditTime { get; set; }
}
public static class Exts
{
public static bool AnyVersionIsLinkedWithWsVersion(this ISyncInfo info,int wsVersionId)
{
if (info == null) return false;
return info.GetvInfos().Any(a => a.AnyVersionIsLinkedWithWsVersion(wsVersionId));
}
public static bool AnyVersionIsLinkedWithWsVersion(this IVersionSyncInfo info, int wsVersionId)
{
if (info == null) return false;
return info.WsVerIds.Any(a => string.Equals(a, wsVersionId.ToString()));
}
public static List<string> GetAllWsIds(this ISyncInfo info)
{
if (info == null) return new List<string>();
return info.GetvInfos().Where(b => b != null).SelectMany(a => a.WsVerIds).ToList();
}
}
}
<file_sep>/WSComponents/src/Framework4Adapter/AsposePdfAdapter.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using WorksharePlatform;
namespace Framework4Adapter
{
// Class as an example by working with Framework4
[ComVisible(true)]
[Guid("A7D39745-C7C1-4DD1-86BB-CC45DEB009F5")]
public class AsposePdfAdapter : IAsposePdfAdapter
{
public AsposePdfAdapter()
{
}
public byte[] AddCommentInData(byte[] data, List<FileComment> comments)
{
return data;
}
}
}
<file_sep>/iManageIntegration/Src/Workshate.HookEvents/HookDeclareAsRecordCmd.cs
using System;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Windows.Forms;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.Worksite.iManExt3;
namespace Workshare.HookEvents
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("8848D5D3-F377-4D2D-A003-5B054DC0017D")]
[ComVisible(true)]
public class HookDeclareAsRecordCmd : ICommand, Com.Interwoven.Worksite.iManExt3._ICommandEvents_Event
{
public event Com.Interwoven.Worksite.iManExt3._ICommandEvents_OnCancelEventHandler OnCancel;
public event Com.Interwoven.Worksite.iManExt3._ICommandEvents_OnInitDialogEventHandler OnInitDialog;
public event Com.Interwoven.Worksite.iManExt3._ICommandEvents_PostOnOKEventHandler PostOnOK;
public event Com.Interwoven.Worksite.iManExt3._ICommandEvents_PreOnOKEventHandler PreOnOK;
private FreezeDocumentCmd cmd;
private bool isCancel = true;
public HookDeclareAsRecordCmd()
{
//MessageBox.Show("Declare As Record");
cmd = new FreezeDocumentCmd();
//MessageBox.Show("Declare As Record: new");
try
{
cmd.OnCancel += cmd_OnCancel;
//MessageBox.Show("Declare As Record: event OnCancel");
cmd.OnInitDialog += cmd_OnInitDialog;
//MessageBox.Show("Declare As Record: event OnInitDialog");
cmd.PostOnOK += cmd_PostOnOK;
//MessageBox.Show("Declare As Record: event PostOnOk");
cmd.PreOnOK += cmd_PreOnOK;
//MessageBox.Show("Declare As Record: event PreOnOk");
}
catch (Exception ex)
{
//MessageBox.Show("Declare As Record: event" + ex);
}
}
void cmd_OnCancel(object pMyInterface)
{
isCancel = true;
{
OnCancel(pMyInterface);
}
}
void cmd_PreOnOK(object pMyInterface)
{
if (PreOnOK != null)
{
PreOnOK(pMyInterface);
}
}
void cmd_PostOnOK(object pMyInterface)
{
if (PostOnOK != null)
{
PostOnOK(pMyInterface);
}
}
void cmd_OnInitDialog(object pMyInterface)
{
if (OnInitDialog != null)
{
OnInitDialog(pMyInterface);
}
}
public int Accelerator
{
get
{
return cmd.Accelerator;
}
set
{
cmd.Accelerator = value;
}
}
public object Bitmap
{
get
{
return cmd.Bitmap;
}
set
{
cmd.Bitmap = value;
}
}
public ContextItems Context
{
get
{
return cmd.Context;
}
}
public void Execute()
{
cmd.Execute();
if (!isCancel)
{
try
{
//var pr = Process.GetCurrentProcess();
////Context.OfType<object>().ToList().ForEach(p => Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
//var docs = new List<object>() {Context.Item("Declare as Record")};
//ProcessIManageEvents.ProcessEvent(docs, EventOperation.DeclareAsRecord, "Declare as Record");
}
catch (Exception ex)
{
}
}
}
public string HelpFile
{
get
{
return cmd.HelpFile;
}
set
{
cmd.HelpFile = value;
}
}
public int HelpID
{
get
{
return cmd.HelpID;
}
set
{
cmd.HelpID = value;
}
}
public string HelpText
{
get
{
return cmd.HelpText;
}
set
{
cmd.HelpText = value;
}
}
private ContextItems ctx;
public void Initialize(ContextItems Context)
{
cmd.Initialize(Context);
}
public string MenuText
{
get
{
return cmd.MenuText;
}
set
{
cmd.MenuText = value;
}
}
public string Name
{
get
{
return cmd.Name;
}
set
{
cmd.Name = value;
}
}
public int Options
{
get
{
return cmd.Options;
}
set
{
cmd.Options = value;
}
}
public int Status
{
get
{
return cmd.Status;
}
set
{
cmd.Status = value;
}
}
public Commands SubCommands
{
get
{
return cmd.SubCommands;
}
set
{
cmd.SubCommands = value;
}
}
public string Title
{
get
{
return cmd.Title;
}
set
{
cmd.Title = value;
}
}
public CommandType Type
{
get
{
return cmd.Type;
}
set
{
cmd.Type = value;
}
}
public void Update()
{
cmd.Update();
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/Visitors/ItemMapVisitor.cs
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor
{
public abstract class ItemMapVisitor
{
public virtual bool VisitEnter(FileMap fileMap) { return true; }
public virtual void Visit(FileMap fileMap) { }
public virtual void VisitLeave(FileMap fileMap) { }
public virtual bool VisitEnter(FolderMap folderMap) { return true; }
public virtual void Visit(FolderMap foldermap) { }
public virtual void VisitLeave(FolderMap folderMap) { }
public virtual void Visit(VersionMap versionMap) { }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/UserControls/WsActivitiesViewer.xaml.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Workshare.Components.Helpers;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Processor.Changes;
namespace Workshare.Components.Views.TrueSyncDialog.UserControls
{
/// <summary>
/// Interaction logic for WsActivitiesViewer.xaml
/// </summary>
public partial class WsActivitiesViewer : UserControl
{
public static readonly DependencyProperty ActivitySourceProperty = DependencyProperty.Register(
"ActivitySource", typeof (IEnumerable), typeof (WsActivitiesViewer));
public static readonly DependencyProperty DiscardCommandProperty = DependencyProperty.Register(
"DiscardCommand", typeof(ICommand), typeof(WsActivitiesViewer));
public WsActivitiesViewer()
{
InitializeComponent();
}
public bool HasItems { get { return CurrentActivityItems.HasItems; } }
public static readonly DependencyProperty NoItemsTextProperty = DependencyProperty.Register(
"NoItemsText",
typeof(string),
typeof(WsActivitiesViewer));
public string NoItemsText
{
get
{
return (string)GetValue(NoItemsTextProperty);
}
set
{
SetValue(NoItemsTextProperty, value);
}
}
public ICommand DiscardCommand
{
get
{
return (ICommand)GetValue(DiscardCommandProperty);
}
set
{
SetValue(DiscardCommandProperty, value);
}
}
public IEnumerable ActivitySource
{
get
{
return (IEnumerable)GetValue(ActivitySourceProperty);
}
set
{
SetValue(ActivitySourceProperty, value);
}
}
private void UIElement_OnMouseUp(object sender, MouseButtonEventArgs e)
{
var dataContext = ((FrameworkElement) sender).DataContext;
var command = this.DiscardCommand;
if (command != null)
{
if (command.CanExecute(dataContext))
{
command.Execute(dataContext);
}
}
}
public static Window GetParentWindow(DependencyObject child)
{
var parentObject = VisualTreeHelper.GetParent(child);
if (parentObject == null)
{
return null;
}
var parent = parentObject as Window;
if (parent != null)
{
return parent;
}
return GetParentWindow(parentObject);
}
private void Hyperlink_OnRequestNavigate(object sender, RequestNavigateEventArgs e)
{
try
{
ProcessUtils.StartProcess(e.Uri.ToString());
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/VMs/UploadFileActivityVm.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncDialog
{
public class UploadFileActivityVm : FileActivityBaseVm<ImportFileActivityVm>
{
internal UploadFileActivity _data;
public UploadFileActivityVm()
: base(null)
{
}
public UploadFileActivityVm(UploadFileActivity data)
: base(data)
{
_data = data;
}
public IEnumerable<FileActivityChange> Changes
{
get
{
var res = _data.Changes.ToList();
return res;
}
}
public string DefaultTitle
{
get
{
return "Upload";
}
}
public override bool IsInProgress
{
get { return State == UploadFileActivityState.Uploading; }
}
public UploadFileActivityState State
{
get
{
return _data.State;
}
set
{
_data.State = value;
this.PropertyHasChanged(s => s.State);
}
}
public override void OnStartProcessing()
{
State=UploadFileActivityState.Uploading;
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/HistoryHelper.cs
using Hummingbird.DM.Extensions.Interop.DECore;
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Components.WSLogger;
namespace Workshare.OpenText.Concrete
{
public struct HistoryRowInfo
{
public DateTime DateOp;
public string User;
public string Operate;
}
public class HistoryHelper
{
public static List<HistoryRowInfo> GetHistory(BCObject obj)
{
List<HistoryRowInfo> _history = new List<HistoryRowInfo>();
if (obj != null)
{
try
{
Logger.Write("START getting history", Severity.Information);
Hummingbird.DM.Extensions.Interop.DECore.IBCEnumObjects eo;
BCCoreCtx ctx = new BCCoreCtx();
ctx.RegisterVariantParam("SID", "HistorySID");
ctx.RegisterVariantParam("UIContext", "History");
eo = obj.EnumChildObjects(ctx);
eo.OrderBy("START_DATE, START_TIME");
Hummingbird.DM.Extensions.Interop.DECore.IBCObject hr;
eo.Next(1, out hr);
while (hr != null)
{
string dat = hr.GetProperty("History", "HistoryDates", "START_DATE").ToString();
string tm = hr.GetProperty("History", "HistoryDates", "START_TIME").ToString();
string dsc = hr.GetProperty("History", "HistoryDates", "DESCRIPTION").ToString();
string usr = hr.GetProperty("History", "HistoryDates", "TYPIST_ID").ToString();
_history.Add(new HistoryRowInfo() { DateOp = DateTime.Parse(dat + " " + tm), Operate = dsc, User = usr });
eo.Next(1, out hr);
}
Logger.Write("FINISH getting history", Severity.Information);
}
catch
{
Logger.Write("ERROR getting history", Severity.Information);
}
}
return _history;
}
public static string GetLastCheckedOutUser(BCObject obj)
{
var _hist = GetHistory(obj);
var checkedout = _hist.Where(x => x.Operate.Equals("Check-out", StringComparison.InvariantCultureIgnoreCase)).OrderByDescending(x=>x.DateOp);
var operate = checkedout.FirstOrDefault();
return operate.User;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/Utils.cs
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Windows;
using System.Windows.Interop;
using System.Windows.Media;
using System.Windows.Media.Imaging;
namespace Workshare.Components.Views.Common
{
static class Utils
{
public static ImageSource Convert(Image image, ImageFormat format)
{
var bitmap = new BitmapImage();
bitmap.BeginInit();
var memoryStream = new MemoryStream();
// Save to a memory stream...
image.Save(memoryStream, format);
memoryStream.Seek(0, SeekOrigin.Begin);
bitmap.StreamSource = memoryStream;
bitmap.EndInit();
return bitmap;
}
public static ImageSource ToImageSource(Icon icon)
{
ImageSource imageSource = Imaging.CreateBitmapSourceFromHIcon(
icon.Handle,
Int32Rect.Empty,
BitmapSizeOptions.FromEmptyOptions());
return imageSource;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/Visitors/ProcessVisitor.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor.Maps.Visitors
{
//for now only filemap can be processed directly
public class ProcessVisitor:ItemMapVisitor
{
private ProcessOptions _options;
readonly DmsProcessStrategyBase _dmsProcessStrategy;
public ProcessVisitor(DmsProcessStrategyBase dmsProcessStrategy)
{
_dmsProcessStrategy = dmsProcessStrategy;
}
public void Initialize(ProcessOptions options)
{
_options = options;
}
IEnumerable<ItemMapActivityAction> GetActionsForMap(ItemMap map)
{
return _options.ActionsToApply
.Where(a => (a.Activity == null ? a.ActivityId : a.Activity.MapId) == map.Id).Distinct();
}
public override void Visit(FileMap fileMap)
{
try
{
var forthisNode = GetActionsForMap(fileMap);
var res = _dmsProcessStrategy.Process(fileMap, new DmsProcessOptions(forthisNode));
fileMap.ProcessState = res.Result;
}
catch (Exception ex)
{
fileMap.ProcessState = ProcessState.Error;
fileMap.Error = ex;
}
}
public override void Visit(FolderMap foldermap)
{
try
{
var forthisNode = GetActionsForMap(foldermap);
var res = _dmsProcessStrategy.Process(foldermap, new DmsProcessOptions(forthisNode));
foldermap.ProcessState = res.Result;
}
catch (Exception ex)
{
foldermap.ProcessState = ProcessState.Error;
foldermap.Error = ex;
}
}
public override void VisitLeave(FolderMap folderProcessResult)
{
}
public override void VisitLeave(FileMap fileMap)
{
}
public override void Visit(VersionMap versionMap)
{
}
}
}
<file_sep>/WSComponents/src/WSComponents/Interfaces/IWSComponentsApp.cs
using System;
using Workshare.Components.Interfaces;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
namespace Workshare.Components.Interfaces
{
public interface IWSComponentsApp
{
IWSIntegration Integration { get; }
IAuthProvider AuthProvider { get; }
IModuleView View { get; set; }
IModulePresenter Presenter { get; }
ModuleBase Module { get; }
Uri Server { get; }
string APP_CODE { get; }
string ServerKey { get; set; }
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/Layouts/WorkshareCloud/Auth_Token_Handler.aspx.cs
using System;
using Microsoft.SharePoint;
using Microsoft.SharePoint.WebControls;
using WorkshareCloud.Common;
namespace WorkshareCloud.Layouts.WorkshareCloud
{
public partial class Auth_Token_Handler : LayoutsPageBase
{
public const string TMP_AUTH_TOKEN = "<PASSWORD>";
protected void Page_Load(object sender, EventArgs e)
{
var authToken=this.Request.QueryString[TMP_AUTH_TOKEN];
if (!string.IsNullOrEmpty(authToken))
{
var user = CloudAuthenication.GetCurrentUser();
user.AuthToken = authToken;
CloudAuthenication.Update(user);
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Common/ConflictItemData.cs
using System;
namespace Workshare.Integration.Common
{
public class ConflictItemData
{
public string Name { set; get; }
public string Modifier { set; get; }
public string DateStr { set; get; }
public DateTime Date { set; get; }
public int VersionsNumber { get; set; }
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/OTSynhInfo.cs
using System;
using System.Collections.Generic;
using Workshare.Integration.Interfaces;
namespace Workshare.OpenText.Contrete
{
class OTSyncInfo : ISyncInfo
{
OTSyncInfo()
{
}
public int ParentFolderId
{
get;
set;
}
public int VersionId
{
get;
set;
}
public int ParentId { get; set; }
public int LastImportedWsVerId { get; set; }
public int ItemId
{
get;
set;
}
public string Modified
{
get;
set;
}
public string ActivityId { get; set; }
public List<IVersionSyncInfo> GetvInfos()
{
throw new NotImplementedException();
}
public void AddVInfo(IVersionSyncInfo info)
{
throw new NotImplementedException();
}
public IVersionSyncInfo GetOrCreate(string p)
{
throw new NotImplementedException();
}
public IVersionSyncInfo CreateVersionInfo()
{
throw new NotImplementedException();
}
public int DMSItemId
{
get;
set;
}
public long CheckOutDate
{
get;
set;
}
public bool WasAlreadyCheckedOut
{
get;
set;
}
public bool NeedToCheckedOut
{
get;
set;
}
const char DELIMITER = ';';
public const long NOT_CHECKED_OUT_DATE = -1;
public override string ToString()
{
return string.Format("{1}{0}{2}{0}{3}{0}{4}{0}{5}{0}{6}{0}{7}{0}{7}", DELIMITER, ParentFolderId, ItemId, VersionId, Modified, DMSItemId, CheckOutDate, WasAlreadyCheckedOut.ToString(), NeedToCheckedOut);
}
internal static OTSyncInfo Create(WorksharePlatform.FileDetails fileDetailes, DateTime ModifyDate, global::Hummingbird.DM.Extensions.Interop.DOCSObjects.IProfile m_document, bool IsAlreadyCheckedout, bool needToCheckOut)
{
var file = new OTFile(m_document);
return new OTSyncInfo()
{
ItemId = fileDetailes.CurrentVersion.FileId,
Modified = ModifyDate.Ticks.ToString(),
ParentFolderId = fileDetailes.FolderId,
VersionId = (fileDetailes.CurrentVersion != null) ? fileDetailes.CurrentVersion.Id : 0,
DMSItemId = m_document.DocNumber,
CheckOutDate = (file.CheckoutTime.HasValue) ? file.CheckoutTime.Value.Ticks : NOT_CHECKED_OUT_DATE,
WasAlreadyCheckedOut = IsAlreadyCheckedout,
NeedToCheckedOut = needToCheckOut
};
}
internal static ISyncInfo Parse(string p)
{
if (!string.IsNullOrEmpty(p))
{
var splits = p.Split(DELIMITER);
if (splits.Length == 7)
{
return new OTSyncInfo()
{
ParentFolderId = int.Parse(splits[0]),
ItemId = int.Parse(splits[1]),
VersionId = int.Parse(splits[2]),
Modified = splits[3],
DMSItemId = int.Parse(splits[4]),
CheckOutDate = long.Parse(splits[5]),
WasAlreadyCheckedOut = bool.Parse(splits[6]),
NeedToCheckedOut = bool.Parse(splits[7])
};
}
}
return null;
}
internal static OTSyncInfo Create(WorksharePlatform.FolderDetails folderDetails, global::Hummingbird.DM.Extensions.Interop.DOCSObjects.IProject m_folder)
{
if (folderDetails == null) return null;
return new OTSyncInfo()
{
ItemId = folderDetails.Id,
ParentFolderId = folderDetails.ParentId,
VersionId = 0,
Modified = "0",
DMSItemId = m_folder.ProjectID
};
}
public string LastActivityId
{
get;set;
}
string ISyncInfo.DMSItemId
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
public System.Collections.Generic.List<int> ProcessedVersionIdss
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
public int LastUploadedDmsVerNum
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
public string DataType
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
}
}
<file_sep>/WSComponents/src/WSComponents.Tests/TestWSApplication.cs
using NUnit.Framework;
using Workshare.Components.Concrete;
namespace WSComponents.Tests
{
[TestFixture]
public class TestWSApplication
{
[TestFixtureSetUp]
public void Setup()
{
// WSApplication.Instance = new WSApplication();
}
// [Test]
public void Properties_NotNull()
{
Assert.IsNotNull(WSApplication.Instance.AuthProvider);
Assert.IsNotNull(WSApplication.Instance.Integration);
Assert.IsNotNull(WSApplication.Instance.View);
Assert.IsNotNull(WSApplication.Instance.Presenter);
Assert.IsNotNull(WSApplication.Instance.Server);
}
}
}
<file_sep>/WSComponents/src/WSComponents.Tests/TestPresenterBase.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Moq;
using NUnit.Framework;
using Workshare.Components;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Interfaces;
using Workshare.Components.Presenter;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using WorksharePlatform;
namespace WSComponents.Tests
{
class TestPresenter : ModulePresenterBase
{
public TestPresenter(IModuleView view, IWSIntegration integration)
: base(view, integration)
{ }
public override bool LoginIfNeeded()
{
return true;
}
public override DialogSettings GetCurrentDialogSettings(UserDetails user)
{
return DefaultDialogSettings;
}
}
[TestFixture]
public class TestPresenterBase
{
TestPresenter presenter;
Mock<IModuleView> view;
Mock<IWSIntegration> integration;
Mock<IDMSFolder> fld;
Mock<IDMSFile> file;
Mock<IWSComponentsApp> app;
[SetUp]
public void Setup()
{
app = new Mock<IWSComponentsApp>();
WSApplication.Instance = app.Object;
view = new Mock<IModuleView>();
integration = new Mock<IWSIntegration>();
app.Setup(x => x.View).Returns(view.Object);
app.Setup(x => x.Integration).Returns(integration.Object);
presenter = new TestPresenter(app.Object.View, app.Object.Integration);
}
[Test]
public void TestShowProgress()
{
view.Verify(x => x.ShowProgressWindow(It.IsAny<Workshare.Components.Views.Common.CommandInvoker>()), Times.Once());
}
// [Test]
public void TestError_ShowOnlyErrorMessage()
{
ItemsErrorsCollection lst = new ItemsErrorsCollection(null);
bool errorWasShown = false;
fld = new Mock<IDMSFolder>();
lst.Add(fld.Object, new CloudFolderNotFound("Folder not found"));
file = new Mock<IDMSFile>();
lst.Add(file.Object, new CloudFileNotFound("File not found"));
MultiItemsException ex = new MultiItemsException(lst, null);
view.Setup(x => x.ShowErrors(It.IsAny<IEnumerable<ItemException>>())).Callback<IEnumerable<ItemException>>((y) => { errorWasShown = true; Assert.IsTrue(y.Count() == 2); });
presenter.res_OnError(null, new WorkUnitErrorEventArgs(null, ex));
Assert.IsTrue(errorWasShown);
view.Verify(x => x.ShowError(It.IsAny<Exception>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<string>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<BaseException>()), Times.Never());
}
// [Test]
public void TestError_ShowOnlyOneMessageWhenFolderAccesDeny()
{
ItemsErrorsCollection lst = new ItemsErrorsCollection(null);
bool errorWasShown = false;
file = new Mock<IDMSFile>();
lst.Add(file.Object, new CloudFileNotFound("Folder not found"));
lst.Add(file.Object, new CloudFolderAccessDenied("Access Deny", "Folder Access Deny"));
lst.Add(file.Object, new CannotProcessItemException());
MultiItemsException ex = new MultiItemsException(lst, null);
view.Setup(x => x.ShowErrors(It.IsAny<IEnumerable<ItemException>>())).Callback<IEnumerable<ItemException>>((y) => { errorWasShown = true; Assert.IsTrue(y.Count() == 1); });
presenter.res_OnError(null, new WorkUnitErrorEventArgs(null, ex));
Assert.IsTrue(errorWasShown);
view.Verify(x => x.ShowError(It.IsAny<Exception>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<string>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<BaseException>()), Times.Never());
}
// [Test]
public void TestError_ShowCheckinDialog()
{
ItemsErrorsCollection lst = new ItemsErrorsCollection(null);
file = new Mock<IDMSFile>();
var err = new FileOnCloudIsNewerException(file.Object, "CloudName", DateTime.Now, "Clouduser", 2);
var file2 = new Mock<IDMSFile>();
var err2 = new FileOnCloudIsNewerException(file2.Object, "CloudName2", DateTime.Now, "Clouduser2", 2);
lst.Add(file.Object, err);
lst.Add(file2.Object, err2);
var ex = new MultiItemsException(lst, new OperationContext(new List<IDMSItem>() { file.Object, file2.Object }, new List<FileMapActivity>()));
int callCount = 0;
bool canContinue = false;
view.Setup(x => x.ShowCheckinDlg(It.IsAny<ConflictItemData>(), It.IsAny<IDMSFile>(), It.IsAny<bool>(), It.IsAny<int>(), It.IsAny<bool>())).Callback(() => { callCount++; }).Returns(new CheckinDlgResult() { ApplyToAll = false, option = Workshare.Integration.Enums.ConflictOptions.Replace });
integration.Setup(x => x.SyncItems(It.IsAny<IEnumerable<SyncItemInformation>>(), It.IsAny<OperationContext>())).Callback(() => canContinue = true);
Assert.IsFalse(presenter.IsAnyItemInProgress(), "We should not have any ityem in progress now, but in fact we have"); // we should add item to progress
presenter.res_OnError(null, new WorkUnitErrorEventArgs(null, ex));
Assert.IsTrue(callCount == 2, "Dialog should be displayed twice but it was not"); // we should show dialog twice
view.Verify(x => x.ShowError(It.IsAny<Exception>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<string>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<BaseException>()), Times.Never());
Utility.WaitWithPeriodicalCallback(ref canContinue, Utility.DEFAULT_TIMEOUT, System.Windows.Forms.Application.DoEvents);
integration.Verify(x => x.SyncItems(It.IsAny<IEnumerable<SyncItemInformation>>(), It.IsAny<OperationContext>()), Times.Once(), "Sync method should be called");
callCount = 0;
// return applytoall = true and dialog should displayed only once
view.Setup(x => x.ShowCheckinDlg(It.IsAny<ConflictItemData>(), It.IsAny<IDMSFile>(), It.IsAny<bool>(), It.IsAny<int>(), It.IsAny<bool>())).Callback(() => { callCount++; }).Returns(new CheckinDlgResult() { ApplyToAll = true, option = Workshare.Integration.Enums.ConflictOptions.Replace });
presenter.res_OnError(null, new WorkUnitErrorEventArgs(null, ex));
Assert.IsTrue(callCount == 1, "Dialog should be displayed once this time but it was not"); // we should show dialog once
}
// [Test]
public void TestError_ShowConflictDialog()
{
ItemsErrorsCollection lst = new ItemsErrorsCollection(null);
file = new Mock<IDMSFile>();
var err = new FileConflictException(file.Object, "CloudName", DateTime.Now, "Clouduser", 1);
var file2 = new Mock<IDMSFile>();
var err2 = new FileConflictException(file2.Object, "CloudName2", DateTime.Now, "Clouduser2", 1);
lst.Add(file.Object, err);
lst.Add(file2.Object, err2);
var ex = new MultiItemsException(lst, new OperationContext(new List<IDMSItem>() { file.Object, file2.Object }, new List<FileMapActivity>()));
int callCount = 0;
bool canContinue = false;
integration.Setup(x => x.SyncItems(It.IsAny<IEnumerable<SyncItemInformation>>(), It.IsAny<OperationContext>())).Callback(() => canContinue = true);
view.Setup(x => x.ShowConflictDialog(It.IsAny<ConflictItemData>(), It.IsAny<ConflictItemData>())).Callback(() => { callCount++; }).Returns(ConflictOptions.KeepBoth);
presenter.res_OnError(null, new WorkUnitErrorEventArgs(null, ex));
view.Verify(x => x.ShowError(It.IsAny<Exception>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<string>()), Times.Never());
view.Verify(x => x.ShowError(It.IsAny<BaseException>()), Times.Never());
Assert.IsTrue(callCount == 2, "Dialog should be displayed twice but it was not"); // we should show dialog twice
Utility.WaitWithPeriodicalCallback(ref canContinue, Utility.DEFAULT_TIMEOUT, System.Windows.Forms.Application.DoEvents);
integration.Verify(x => x.SyncItems(It.IsAny<IEnumerable<SyncItemInformation>>(), It.IsAny<OperationContext>()), Times.Once(), "Sync method should be called");
}
// [Test]
public void TestSendToWorkshare()
{
bool canContinue = false;
fld = new Mock<IDMSFolder>();
file = new Mock<IDMSFile>();
List<IDMSItem> lst = new List<IDMSItem>();
lst.Add(file.Object);
lst.Add(fld.Object);
view.Setup(x => x.ShowSelectFolder(presenter.GetCurrentDialogSettings(null))).Returns(10);
integration.Setup(x => x.SendItems(lst, It.IsAny<FolderDetails>(), It.IsAny<OperationContext>())).Callback(() =>
{
Assert.IsTrue(presenter.IsAnyItemInProgress(), "Items should be in progress in that moment");
canContinue = true;
});
var auth = new Mock<IAuthProvider>();
auth.Setup(x => x.GetCurrentWSUser()).Returns(new UserDetails());
app.Setup(x => x.AuthProvider).Returns(auth.Object);
Assert.IsFalse(presenter.IsAnyItemInProgress(), "Items should not be in progress in that moment");
presenter.OnSendItemsClicked(new SendItemsClickedArgs(lst));
Utility.WaitWithPeriodicalCallback(ref canContinue, Utility.DEFAULT_TIMEOUT, System.Windows.Forms.Application.DoEvents);
view.Verify(x => x.ShowSelectFolder(presenter.GetCurrentDialogSettings(null)), Times.Once(), "Select folder dialog was not shown");
integration.Verify(x => x.SendItems(lst,It.IsAny<FolderDetails>(), It.IsAny<OperationContext>()), Times.Once(), "Send method should be called");
Assert.IsFalse(presenter.IsAnyItemInProgress(), "Items should not be in progress in that moment");
}
// [Test]
public void TestSyncToWorkshare()
{
bool canContinue = false;
fld = new Mock<IDMSFolder>();
file = new Mock<IDMSFile>();
List<SyncItemInformation> lst = new List<SyncItemInformation>();
lst.Add(new SyncItemInformation(file.Object, ConflictOptions.None, ConflictVersionOptions.None));
lst.Add(new SyncItemInformation(fld.Object, ConflictOptions.None, ConflictVersionOptions.None));
integration.Setup(x => x.SyncItems(It.IsAny<IEnumerable<SyncItemInformation>>(), It.IsAny<OperationContext>()))
.Callback<IEnumerable<SyncItemInformation>, OperationContext>((a, b) => {
Assert.IsTrue(presenter.IsAnyItemInProgress(), "Items should be in progress in that moment");
if (a.Count() == 2)
{
canContinue = true;
}
});
var auth = new Mock<IAuthProvider>();
auth.Setup(x => x.GetCurrentWSUser()).Returns(new UserDetails());
app.Setup(x => x.AuthProvider).Returns(auth.Object);
Assert.IsFalse(presenter.IsAnyItemInProgress(), "Items should not be in progress in that moment");
presenter.OnSyncItemsClicked(new SyncItemsClickedArgs(lst));
Utility.WaitWithPeriodicalCallback(ref canContinue, Utility.DEFAULT_TIMEOUT, System.Windows.Forms.Application.DoEvents);
integration.Verify(x => x.SyncItems(lst, It.IsAny<OperationContext>()), Times.Once(), "Sync method should be called");
Assert.IsFalse(presenter.IsAnyItemInProgress(), "Items should not be in progress in that moment");
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Application.cs
using System.Diagnostics;
using Microsoft.Win32;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Interfaces;
using Workshare.IManage.Integration;
using Workshare.IManage.Views;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
namespace Workshare.IManage
{
public class Application : WSApplication
{
Application():base()
{
Registering.ForceToUseIe8Standarts();
WorksharePlatform.PlatformService.Host = WorksharePlatform.PlatformService.MY_SERVER;
WorksharePlatform.PlatformService.UseHostFromRegistry = true;
}
public override IModulePresenter Presenter
{
get
{
return Application.iManInstance.View.m_presenter;
}
}
public static Application iManInstance
{
get
{
if (Instance == null)
{
Instance = new Application();
}
return Instance as Application;
}
}
ModuleBase _module;
public override ModuleBase Module
{
get { return _module ?? (_module = new IManageModule()); }
}
private IModuleView _view;
public override IModuleView View
{
get { return _view ?? (_view = Module.Resolve<IModuleView>()); }
set { _view = value; }
}
public override string APP_CODE { get { return "Workshare-iManage"; } }
public string SENDINFO_FIELD = "WORKSHARECLOUD";
public string DATASTORE_FIELD = "WORKSHARESENDINFO";
public string SUBCLASSNAME = "WORKSHARE";
public iManType ClientType = iManType.DeskSite;
private string _clientVersion = "";
public string ClientVersion
{
get
{
if (string.IsNullOrEmpty(_clientVersion))
{
var reg = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Interwoven\\WorkSite\\Client\\Common\\InstallRoot");
if (reg == null)
{
reg = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Interwoven\\WorkSite\\8.0\\Common\\InstallRoot");
if (reg == null)
{
_clientVersion = "7";
}
else
{
_clientVersion = "8";
}
}
else
{
_clientVersion = "9";
}
}
return _clientVersion;
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/TrueSyncUploadFilesDialog.xaml.cs
using System;
using System.ComponentModel;
using System.Windows;
using Workshare.Components.Common;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using RES = Workshare.Components.Properties.Resources;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog
{
/// <summary>
/// Interaction logic for WordSendDialogWindow.xaml
/// </summary>
public partial class TrueSyncUploadFilesDialog : OwnWindow
{
public TrueSyncUploadFilesDialog()
{
InitializeComponent();
}
private readonly TrueSyncFilesScanDialogVm _vm;
public TrueSyncUploadFilesDialog(ModuleBase module, TrueSyncFilesScanDialogVm vm)
{
InitializeComponent();
this._vm = vm;
Closing += TrueSyncUploadFilesDialog_Closing;
}
private void TrueSyncUploadFilesDialog_Closing(object sender, CancelEventArgs e)
{
bool needClose = true;
try
{
Integration.Processor.Changes.Activities.UploadFileActivityState state = _vm.ParentScans[0].Activities[0].State;
if (state == Integration.Processor.Changes.Activities.UploadFileActivityState.Uploading)
{
needClose = false;
}
}
catch (Exception ex)
{
needClose = ( _vm.State != TrueSyncFilesScanDialogVm.StateEnum.Loading );
}
if (!needClose)
{
WSApplication.Instance.View.ShowError(new BaseException(RES.STR_WORKSHARE_UPLOADING_DOCUMENT, RES.STR_WAIT_UPLOAD_COMPLETE));
e.Cancel = true;
}
}
public void Initiailize(IDMSFile file)
{
_vm.Initialize(file);
DataContext = _vm;
}
private void OwnWindow_Loaded(object sender, RoutedEventArgs e)
{
_vm.RescanCommand.Execute(null);
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Exceptions/ExceptionExts.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
namespace Workshare.OpenText.Exceptions
{
static class ExceptionExts
{
public static bool IsConnectionError(this Exception ex)
{
var wex = ex as WebException;
if (wex != null)
{
var responce = wex.Response as HttpWebResponse;
if (responce != null)
{
return false;
}
else
{
return true;
}
}
return false;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/VMs/ImportFileActivityVm.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Windows.Documents;
using System.Windows.Input;
using Workshare.Components.Enums;
using Workshare.Components.Views.Common;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes.Activities;
using RES = Workshare.Components.Properties.Resources;
using System.Linq;
using System.Windows;
using System.Diagnostics;
using Workshare.Integration.Exceptions;
using System.Net;
using Workshare.Integration.Enums;
using Workshare.Integration.Processor.Changes;
using Workshare.Components.Views.TrueSyncDialog.VMs;
namespace Workshare.Components.Views.TrueSyncDialog
{
public class ImportFileActivityVm : FileActivityBaseVm<ImportFileActivityVm>
{
internal ImportFileActivity _data;
public ImportFileActivityVm()
: base(null)
{
}
public ImportFileActivityVm(ImportFileActivity data)
: base(data)
{
_data = data;
}
public IEnumerable<FileActivityChange> Changes
{
get
{
var res = _data.Changes.
Where(p => p.Type != ChangeType.NotModified
&& p.Type != ChangeType.DocTypeChanged
&& p.Type != ChangeType.NamesDiffer).ToList();
return res;
}
}
public string DefaultTitle
{
get
{
//don't forget - returned value is used in WsSplitButton.xaml
if (_data.Changes.Any(p => p.Type == ChangeType.RemoteDeleted))
return "Break Link";
else
return "Import";
}
}
public override bool IsInProgress
{
get
{
return State == ImportFileActivityState.Importing;
}
}
public ImportFileActivityState State
{
get
{
return _data.State;
}
set
{
if (_data.State != value)
{
_data.State = value;
this.PropertyHasChanged(p => p.State);
}
}
}
public override void OnStartProcessing()
{
this.State = ImportFileActivityState.Importing;
}
}
public interface IItemMapActivityVm
{
string Id { get; }
string MapId { get; }
void PropertyChangedAll();
object data { get; }
string Name { get; }
bool IsDiscarded { get; set; }
void OnStartProcessing();
}
public abstract class ItemMapActivityVm<T> : BasePropertyChanged<T>, IItemMapActivityVm
{
public abstract string Id { get; }
public abstract string MapId { get; }
public abstract object data { get; }
public abstract string Name { get; }
public abstract bool IsDiscarded { get; set; }
public abstract void OnStartProcessing();
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Strategies/DmsWorker.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.Exceptions;
using Workshare.Components.Helpers;
using Workshare.IManage.Contrete;
using Workshare.Integration;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Strategies;
using WorksharePlatform;
namespace Workshare.IManage.Strategies
{
/// <summary>
/// Should depends only on DMS stuff
/// </summary>
public class DmsWorker : DmsWorkerBase
{
readonly ModuleBase _module;
public DmsWorker(ModuleBase module)
{
_module = module;
}
public override void CanImportNewFileOrThrow(NewFileOptions options)
{
var folder = (ManFolder)options.ParentFolder;
if (folder != null)
{
if (!folder.HasRightToAddNewFiles())
{
throw new DMSUnAuthorizedException();
}
}
}
public override void ImportNewFile(NewFileOptions options)
{
try
{
if (options.VersionList == null) return;
ManFile dmsFile = null;
var parentFolder = (ManFolder)options.ParentFolder;
bool first = true;
foreach (var verInfo in options.VersionList)
{
if (first)
{
var dlg = _module.Resolve<ImportDialog>();
var datebase = (parentFolder != null)
? (parentFolder.m_folder.Database)
: ((options.BasedOnFile != null)
? ((ManFile)options.BasedOnFile).m_document.Database
: null);
dlg.Initialize(parentFolder, verInfo.FilePath, verInfo.FriendlyName, datebase);
dlg.ShowDialog();
dmsFile = dlg.AddedFile;
if (dmsFile != null)
{
verInfo.createdVersion = dmsFile.GetVersions().Last();
//dmsFile.Description = options.WsFile.FriendlyName;
dmsFile.AddHistory(string.Empty,
string.Format(Workshare.Integration.Properties.Resources.STR_HISTORY_ADDFILETOFOLDER,
DateTime.Now.ToString(Activity.TimeFormat)), Operations.Sync);
dmsFile.AddHistories(verInfo.Activities.ToList());
}
options.CreatedFile = dmsFile;
}
else if (dmsFile != null)
{
verInfo.createdVersion = dmsFile.GetLatest().AddVersion(verInfo.FilePath, verInfo.file.file, verInfo.Activities.ToList(), dmsFile.DisplayName);
}
first = false;
}
if (dmsFile != null)
{
dmsFile.DiscardCheckout();
}
}
catch (WebException)
{
throw;
}
catch (FileNotCreatedException)
{
throw;
}
catch (Exception ex)
{
throw new FileNotCreatedException(ex.Message, ex);
}
finally
{
//in case of exception we should delete temp files from temporary folder anyway
foreach (var verInfo in options.VersionList)
{
FileUtils.SafeDelete(verInfo.FilePath);
verInfo.file.file.DeleteFile();
}
}
}
public override void ImportNewVersion(NewVersionsOptions options)
{
if (options.VersionList == null) return;
var dmsFile = (ManFile)options.File;
if (dmsFile.CheckedOut)
{
if (FileUtils.IsFileLocked(dmsFile.CheckoutPath))
{
throw new LockedByAnotherProcessException(dmsFile.Name);
}
if (dmsFile.IsLocalFileWasEdited())
{
var version = dmsFile.CheckInFile(true);
dmsFile = (ManFile)((ManVersion)version).AsFile();
}
}
foreach (var info in options.VersionList)
{
try
{
IDmsVersion addedVersion = dmsFile.AddVersion(info.FilePath, info.file.file, info.Activities.ToList(), info.FriendlyName);
info.createdVersion = addedVersion;
dmsFile = (ManFile)((ManVersion)addedVersion).AsFile();
}
finally
{
//in case of exception we should delete temp files from temporary folder anyway
FileUtils.SafeDelete(info.FilePath);
info.file.file.DeleteFile();
}
}
}
public override void CanImportNewVersionOrThrow(IDMSFile dmsFile)
{
var file = (ManFile)dmsFile;
file.Refresh();
var latestVersion = (ManFile)file.GetLatest();
latestVersion.Refresh();
if (latestVersion.CheckedOut && !latestVersion.CheckedOutToUser)
{
throw new FileCheckoutedToAnotherUser(file.GetCheckoutUser());
}
}
public override void CanUpdateFileOrThrow(IDMSFile localFile, Operations operation)
{
var file = (ManFile)localFile;
file.Refresh();
if (!file.DoesUserHavePermissions(imAccessRight.imRightReadWrite))
{
throw new DMSUnAuthorizedException();
}
if (file.CheckedOut && !file.CheckedOutToUser)
{
throw new FileCheckoutedToAnotherUser(operation, file.GetCheckoutUser());
}
}
public override void CanUpdateFolderOrThrow(IDMSFolder localFolder)
{
var folder = (ManFolder)localFolder;
if (!folder.DoesUserHavePermissions(imAccessRight.imRightReadWrite))
{
throw new DMSUnAuthorizedException();
}
}
public override void CanUpdateVersionOrThrow(IDmsVersion localVersion, Operations operation)
{
var version = (ManVersion)localVersion;
CanUpdateFileOrThrow(version.AsFile(), operation);
}
public override void RelateDocument(IDMSFile mainFile, IDMSFile relatedFile, string comment)
{
var file = (ManFile)mainFile;
file.RelateDocument(relatedFile, comment);
}
public override IDMSFolder CreateSubFolder(IDMSFolder folder, NewFolderOptions options)
{
return folder.AddSubFolder(options.WsFolder._folder);
}
public override string GetName(IDMSFile iDMSFile)
{
return iDMSFile.Name;
}
public override string GetFilePath(IDMSFile iDMSFile)
{
var file = (ManFile)iDMSFile;
return file.GetCurrentCheckedInFile();
}
public override string GetFilePath(IDmsVersion dmsVersion)
{
var version = (ManVersion)dmsVersion;
return version.GetCurrentCheckedInFile();
}
public override string GetName(IDmsVersion dmsVersion)
{
var version = (ManVersion)dmsVersion;
return version.Name;
}
public override string GetFriendlyName(IDmsVersion dmsVersion)
{
var version = (ManVersion)dmsVersion;
return version.DisplayName;
}
public override string GetFriendlyName(IDMSFolder folder)
{
var fld = (ManFolder)folder;
return fld.DisplayName;
}
public override string GetFriendlyName(IDMSFile iDMSFile)
{
var fld = (ManFile)iDMSFile;
return fld.Name;
}
public override DateTime GetEditTime(IDmsVersion dmsVersion)
{
var version = (ManVersion)dmsVersion;
return version.EditTime;
}
public override DateTime GetEditTime(IDMSFile dmsFile)
{
var file = (ManFile)dmsFile;
return file.GetLatest().Modified;
}
public override IEnumerable<BaseDMSItem> GetParents(BaseDMSFile item)
{
var file = (ManFile)item;
return file.ParentFolders.OfType<BaseDMSItem>();
}
public override void AddHistory(IDmsVersion dmsVersion, string eventName, string comment, Operations operation)
{
((ManVersion)dmsVersion).AddHistory(eventName, comment, operation);
}
public override void UnlockIfNotEdited(IDMSFile localFile)
{
var dmsFile = localFile as ManFile;
if (dmsFile.CheckedOut)
{
if (FileUtils.IsFileLocked(dmsFile.CheckoutPath))
{
throw new LockedByAnotherProcessException(dmsFile.Name);
}
if (!dmsFile.IsLocalFileWasEdited())
{
dmsFile.CheckInFileEx(false, imCheckinDisposition.imCheckinReplaceOriginal);
}
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/WSApplication.cs
using System;
using System.Net;
using Workshare.Components.Interfaces;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
namespace Workshare.Components.Common
{
public abstract class WSApplication : IWSComponentsApp
{
public virtual string APP_CODE { get { return ""; } }
public string ServerKey { get; set; }
//protected static WSApplication m_app = null;
public static IWSComponentsApp Instance
{
get; protected set;
}
public Uri Server
{
get
{
return new Uri("https://" + WorksharePlatform.PlatformService.Host);
}
}
public virtual IModulePresenter Presenter
{
get
{
return Instance.View.m_presenter;
}
}
public abstract IModuleView View
{
get; set;
}
protected IWSIntegration _integration;
public virtual IWSIntegration Integration
{
get
{
if (_integration == null)
{
ServicePointManager.DefaultConnectionLimit = int.MaxValue;//TODO remove this line. fix problem with multiple files uploading
_integration = Module.Resolve<IWSIntegration>();
}
return _integration;
}
}
public abstract ModuleBase Module { get; }
private IAuthProvider _authProvider;
public virtual IAuthProvider AuthProvider
{
get { return _authProvider ?? (_authProvider = Module.Resolve<IAuthProvider>()); }
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Activities/ItemMapActivity.cs
using System;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor.Changes
{
public abstract class ItemMapActivity
{
public ItemMapActivity()
{
Id = Guid.NewGuid().ToString();
}
protected void Refresh(ItemMap map)
{
this.MapId = map.Id;
}
public string Id { set; get; }
public string MapId { set; get; }
public bool IsDiscarded { get; set; }
public string Type { get; set; }
public abstract bool InKindOfErrorState();
public abstract bool InKindOfProceeedState();
public abstract bool InKindOfProcessingState();
}
}
<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsVersion.cs
using System;
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
public class WsVersion
{
internal FileVersionDetails version;
UserDetails user;
public WsVersion(FileVersionDetails version, UserDetails user)
{
this.version = version;
this.user = user;
}
public int Id { get { return version.Id; } }
public int Version { get { return version.Version; } }
public WsUser Creator { get { return new WsUser(version.Creator); } }
public DateTime CreateDate { get { return version.CreateDate; } }
public string DownloadFileVersion()
{
return PlatformService.DownloadFileVersion(user, version.FileId, version.Version);
}
public string DownloadFileVersionInPdf()
{
return PlatformService.DownloadFileVersionInPdf(user, version.FileId, version.Version);
}
public string DownloadFileVersionInPdfWithComment(string password)
{
return PlatformService.DownloadFileVersionInPdfWithComment(user, password, version.Version);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Activities/UploadFileActivity.cs
namespace Workshare.Integration.Processor.Changes.Activities
{
public class UploadFileActivity : FileMapActivity
{
public UploadFileActivityState State { set; get; }
public UploadFileActivity()
{
Type = "UploadActivity";
}
public override bool CanExecute(ItemMapActivityAction action)
{
if (this.Actions == null) return false;
if (!(action is UploadDocumentAction)) return false;
if (this.State != UploadFileActivityState.Scanned)
{
return false;
}
if (!Actions.Contains(action))
{
return false;
}
return true;
}
public override bool InKindOfErrorState()
{
return State == UploadFileActivityState.Error
|| State == UploadFileActivityState.CheckedOutOnAnotherMachine
|| State == UploadFileActivityState.CheckedOutToAnother
|| State == UploadFileActivityState.NoAccessOnWorkshare;
}
public override bool InKindOfProcessingState()
{
return State == UploadFileActivityState.Uploading;
}
public override bool InKindOfProceeedState()
{
return State == UploadFileActivityState.Uploaded;
}
public override string ToString()
{
return string.Format("UploadFileActivity State={0}; Id={1}; Changes={2}", State, MapId,Changes.AsString());
}
}
public enum UploadFileActivityState { Scanned, Uploaded, Error, Uploading, CheckedOutToAnother, CheckedOutOnAnotherMachine, NoAccessOnWorkshare };
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/FolderMap.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Maps
{
public class FolderMap : ItemMap
{
public IDMSFolder LocalFolder { set; get; }
public WsFolder WsFolder { set; get; }
public override void Apply(ItemMapVisitor visitor)
{
if (visitor.VisitEnter(this))
{
foreach (var item in this.Maps.ToList())
{
item.Apply(visitor);
}
visitor.Visit(this);
}
visitor.VisitLeave(this);
}
public override string GetId()
{
return GetMapId(this);
}
public static string GetMapId(FolderMap map)
{
if (map.LocalFolder == null)
{
return map.WsFolder == null ? string.Format("localfolder:{0};wsfolder:{1}", -1, -1) : string.Format("localfolder:{0};wsfolder:{1}", -1, map.WsFolder.Id);
}
if (map.WsFolder == null)
{
return string.Format("localfolder:{0};wsfolder:{1}", map.LocalFolder.ID, -1);
}
return string.Format("localfolder:{0};wsfolder:{1}", map.LocalFolder.ID, map.WsFolder.Id);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Exceptions/FileNotCreatedException.cs
using System;
using Workshare.Integration.Exceptions;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Exceptions
{
public class FileNotCreatedException : BaseException
{
public FileNotCreatedException(string message)
: base(RES.STR_UNABLESYNC_CAPTION, message)
{
}
public FileNotCreatedException(string message, Exception inner)
: base(RES.STR_UNABLESYNC_CAPTION, message, inner)
{
}
}
public class FolderNotCreatedException : BaseException
{
public FolderNotCreatedException(string message)
: base(RES.STR_UNABLESYNC_CAPTION, message)
{
}
public FolderNotCreatedException(string message, Exception inner)
: base(RES.STR_UNABLESYNC_CAPTION, message, inner)
{
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ActionStrategy/ActionStrategy.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
namespace Workshare.Integration.Processor.Strategies.ActionStrategy
{
public abstract class ActionStrategy
{
protected SyncInfoService SyncInfoService;
public ActionStrategy(SyncInfoService syncInfoService)
{
this.SyncInfoService = syncInfoService;
}
public abstract ProcessResult Process(Maps.FileMap fileMap, ItemMapActivityAction action);
public abstract ProcessResult Process(FolderMap folderMap, ItemMapActivityAction action);
protected void UpdateSendInfoForFolder(FolderMap foldermap, int newId)
{
var ids = SyncInfoService.GetSendData(foldermap.LocalFolder).Distinct().ToList();
if (foldermap.WsFolder != null)
{
var existedOnWsIds = foldermap.WsFolder.SubFolders.Select(p => p.Id)
.Concat(foldermap.WsFolder.Files.Select(f => f.Id).ToList());
ids = existedOnWsIds.Intersect(ids).ToList();
}
if (ids == null)
{
ids = new List<int>();
}
if (!ids.Contains(newId))
{
ids.Add(newId);
}
SyncInfoService.UpdateSendDataInfo(foldermap.LocalFolder, ids);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Visitors/MapFinder.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor.Changes.Visitors
{
class MapFinder:ItemMapVisitor
{
private string id;
public MapFinder(string id)
{
this.id = id;
}
public ItemMap Result { private set; get; }
public override bool VisitEnter(FolderMap folderMap)
{
return Result == null;
}
public override void Visit(FolderMap foldermap)
{
if (foldermap.Id == id)
{
Result = foldermap;
}
}
public override void Visit(FileMap fileMap)
{
if (fileMap.Id == id)
{
Result = fileMap;
}
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/ErrorsWindow/ErrorsWindow.xaml.cs
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Input;
using Workshare.Components.Views.Common;
using Workshare.Integration.Exceptions;
namespace Workshare.Components.Views.ErrorsWindow
{
/// <summary>
/// Interaction logic for ErrorsWindow.xaml
/// </summary>
public partial class ErrorsWindow : OwnWindow
{
public ErrorsWindow()
{
InitializeComponent();
}
public ErrorsWindow(IEnumerable<ItemException> errors):this()
{
this.DataContext = new ErrorsViewModel(errors);
}
private void TitleBar_MouseDown(object sender, MouseButtonEventArgs e)
{
this.OnTitleMouseDown(sender, e);
}
private void richTB_Loaded_1(object sender, RoutedEventArgs e)
{
var richTextBox=(RichTextBox)sender;
var context = richTextBox.DataContext as ItemErrorViewModel;
if(context!=null)
{
var document=richTextBox.Document??new FlowDocument();
document.PagePadding = new Thickness(0);
var paragraph = new Paragraph();
foreach(var block in GetDescriptionBlocks(context))
{
paragraph.Inlines.Add(block);
}
document.Blocks.Add(paragraph);
richTextBox.Document = document;
}
}
private IList<Inline> GetDescriptionBlocks(ItemErrorViewModel context)
{
var blocks = new List<Inline>();
var regEx = new Regex("(\\$[a-z]+?\\$)", RegexOptions.IgnoreCase);
var errorMessage = context.Error.Message;
if (!string.IsNullOrEmpty(errorMessage))
{
if (context.Item == null)
{
errorMessage = errorMessage.Replace("$NAME$", "");
errorMessage = Regex.Replace(errorMessage, " +", " ");
var run = new Run();
run.Text = errorMessage;
blocks.Add(run);
}
else
{
var matches = regEx.Matches(errorMessage);
var splits = regEx.Split(errorMessage);
foreach (var s in splits)
{
var run = new Run();
if (string.Equals("$NAME$", s))
{
run.FontWeight = FontWeights.Bold;
run.Text = (context.Item != null) ? (context.Item.Name ?? context.Item.DisplayName) : string.Empty;
}
else
{
run.Text = s;
}
blocks.Add(run);
}
}
}
return blocks;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ActionStrategy/ImportStrategy.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Properties;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Integration.Processor.Strategies.ActionStrategy
{
public class ImportStrategy : ActionStrategy
{
readonly WsProxy _wsProxy;
readonly IAuthProvider _auth;
readonly DmsWorkerBase _dmsWorker;
readonly SyncInfoService _syncInfoService;
public ImportStrategy(WsProxy wsProxy, IAuthProvider auth, DmsWorkerBase dmsWorker, SyncInfoService syncInfoService)
: base(syncInfoService)
{
this._wsProxy = wsProxy;
this._auth = auth;
this._dmsWorker = dmsWorker;
this._syncInfoService = syncInfoService;
}
List<string> GetWsVersionIdsFromDms(FileMap fileMap, ISyncInfo syncInfo)
{
var res =
fileMap.Maps.OfType<VersionMap>()
.Where(a => a.WsVersion != null && a.LocalVersion != null)
.Select(a => a.WsVersion.Id.ToString()).ToList();
if (syncInfo != null)
{
res.AddRange(syncInfo.GetAllWsIds());
}
return res;
}
public override ProcessResult Process(FileMap fileMap, ItemMapActivityAction action)
{
try
{
var importAction = (ImportDocumentAction)action;
var user = _auth.GetCurrentWSUser2();
if (fileMap.WsFile != null)
{
var wsFileInfo = fileMap.WsFile;
var syncInfo = _syncInfoService.GetSyncInfo(fileMap);
var lastActivityId = (syncInfo == null) ? "" : syncInfo.ActivityId;
var activities =
new Func<List<Activity>>(
() => _wsProxy.GetFileActivities(user, wsFileInfo, lastActivityId).ToList());
var versionsToSikp = GetWsVersionIdsFromDms(fileMap, syncInfo);
if (importAction.ImportType == ImportType.AsNewDocument ||
importAction.ImportType == ImportType.AsRelatedDocument)
{
var parentFolderMap = fileMap.GetLocalParent();
CreateAncesstorFoldersIfNeeded(fileMap);
if (parentFolderMap.HasLocal())
{
var startVersion = 1;
if (syncInfo != null)
{
startVersion =
fileMap.WsFile.Versions.Where(p => p.Id == syncInfo.LastImportedWsVerId)
.Select(a => a.Version)
.FirstOrDefault();
startVersion =
fileMap.WsFile.Versions.OrderBy(a => a.Version)
.Where(p => p.Version > startVersion)
.Select(a => a.Version)
.FirstOrDefault();
if (startVersion == 0)
{
startVersion = 1;
}
}
var keepLinkForNewFile = !fileMap.HasLocal();
var newFile = ImportAsNewFile(wsFileInfo, parentFolderMap.LocalFolder, action, activities,
startVersion, fileMap.LocalFile, keepLinkForNewFile,versionsToSikp);
if (newFile != null)
{
UpdateSendInfoForFolder(parentFolderMap, wsFileInfo.Id);
if (!fileMap.HasLocal())
{
fileMap.LocalFile = newFile;
}
else
{
if (importAction.ImportType == ImportType.AsRelatedDocument)
{
RelateDocument(fileMap.LocalFile, newFile);
}
var newMap = new FileMap();
newMap.LocalFile = newFile;
if (_syncInfoService.GetSyncInfo(newMap) != null)
{
newMap.WsFile = fileMap.WsFile;
newMap.ProcessState = ProcessState.Processed;
}
else
{
newMap.ProcessState = ProcessState.Scanned;
}
parentFolderMap.AddIf(newMap);
}
return ProcessResult.Processed;
}
else
{
return ProcessResult.Cancelled;
}
}
}
else if (importAction.ImportType == ImportType.AsNewVersion)
{
if (fileMap.HasLocal())
{
ImportAsNewVersion(fileMap.LocalFile, syncInfo, wsFileInfo, action, activities, versionsToSikp);
return ProcessResult.Processed;
}
else
{
Debug.Assert(false, "new version for null local file?");
}
}
else
{
Debug.Assert(false, "New ImportType?");
}
}
else
{
Debug.Assert(false, "Importing deleted file");
}
return ProcessResult.Processed;
}
catch (WebException ex)
{
if (ex.IsStatusCode(HttpStatusCode.Forbidden) || ex.IsStatusCode(HttpStatusCode.Unauthorized))
{
throw new CloudFolderAccessDenied(Resources.STR_UNABLE_SYNC, Resources.STR_UNABLESYNCFILE_TEXT, ex);
}
if (ex.IsConnectionError())
{
throw new OfflineException();
}
throw;
}
}
private void CreateAncesstorFoldersIfNeeded(FileMap fileMap)
{
if (fileMap == null) return;
var curFolderMap = fileMap.Parent as FolderMap;
var foldersToCreate = new List<FolderMap>();
while (curFolderMap != null && curFolderMap.HasRemote() && !curFolderMap.HasLocal())
{
foldersToCreate.Add(curFolderMap);
curFolderMap = curFolderMap.Parent as FolderMap;
}
if (curFolderMap.HasLocal())
{
foldersToCreate.Reverse();
foreach (var folderToCreateMap in foldersToCreate)
{
var newDmsFolder = _dmsWorker.CreateSubFolder(curFolderMap.LocalFolder, new NewFolderOptions
{
WsFolder = folderToCreateMap.WsFolder
});
if (newDmsFolder != null)
{
_syncInfoService.UpdateSyncInfo(newDmsFolder, new ActionContext(Services.ActionType.Upload)
{
WsFolder = folderToCreateMap.WsFolder
});;
folderToCreateMap.LocalFolder = newDmsFolder;
curFolderMap = folderToCreateMap;
}
else
{
Debug.Assert(false, "Why?");
break;
}
}
}
}
private void RelateDocument(IDMSFile mainFile, IDMSFile relatedFile)
{
_dmsWorker.RelateDocument(mainFile, relatedFile, "Related using Workshare");
}
private IDMSFile ImportAsNewFile(WsFile wsFile, IDMSFolder dmsFolder, ItemMapActivityAction action, Func<List<Activity>> activitiesFunc, int startVersionIncluding, IDMSFile basedOnFile, bool keepLinkForNewFile, List<string> versionsToSkip)
{
var versionsToCreate = new List<CreateversionInfo>();
if (basedOnFile != null)
{
_syncInfoService.CanUpdateSyncInfoOrThrow(basedOnFile, Operations.Sync);
}
var importAction = (ImportDocumentAction)action;
var activities = activitiesFunc();
var lastActivity = activities.LastOrDefault();
if (importAction.ImportVersionsAction == ConflictVersionOptions.All)
{
bool firstVersion = true;
var versionsToAdd =
wsFile.Versions.Where(x => x.Version >= startVersionIncluding && !ShouldBeSkipped(versionsToSkip,x))
.OrderBy(x => x.Version).ToArray();
foreach (var ver in versionsToAdd)
{
var activitiesForCurrentVersion = new List<Activity>();
if (firstVersion)
{
activitiesForCurrentVersion.AddRange(activities.Where(x => x.CurrentVersion < ver.Version).ToList());
}
activitiesForCurrentVersion.AddRange(activities.Where(x => x.CurrentVersion == ver.Version).ToList());
versionsToCreate.Add(new CreateversionInfo
{
Activities = activitiesForCurrentVersion,
file = wsFile,
version = ver,
FriendlyName = wsFile.FriendlyName,
FilePath = ver.DownloadFileVersion()
});
firstVersion = false;
}
}
else if (importAction.ImportVersionsAction == ConflictVersionOptions.Latest)
{
var lastVersion = wsFile.CurrentVersion;
var startVersion = (wsFile.Versions.Select(p => p.Version).FirstOrDefault());
startVersion = (startVersion == startVersionIncluding ? startVersion : startVersionIncluding - 1); //
var activitiesForVersion = activities.ToList();
if (basedOnFile != null)//importing new versions as new file
{
activitiesForVersion = activities.Where(a => a.CurrentVersion >= startVersion).ToList();
}
versionsToCreate.Add(new CreateversionInfo
{
Activities = activitiesForVersion,
file = wsFile,
version = lastVersion,
FriendlyName = wsFile.FriendlyName,
FilePath = lastVersion.DownloadFileVersion()
});
}
var options = new NewFileOptions()
{
WsFile = wsFile,
ParentFolder = dmsFolder,
BasedOnFile = basedOnFile,
VersionList = versionsToCreate
};
_dmsWorker.CanImportNewFileOrThrow(options);
_dmsWorker.ImportNewFile(options);
if (options.CreatedFile != null)
{
var importeVersionsIds = new List<int>();
foreach (var verInfo in options.VersionList)
{
if (keepLinkForNewFile)
{
_syncInfoService.UpdateSyncInfo(verInfo.createdVersion, new ActionContext(Services.ActionType.Import)
{
WsVersion = verInfo.version
});
}
importeVersionsIds.Add(verInfo.version.Id);
}
if (options.BasedOnFile != null)
{
_syncInfoService.UpdateSyncInfo(options.BasedOnFile, new ActionContext(Services.ActionType.Import)
{
WsFile = wsFile,
LastActivity = lastActivity.IdOrNull()
});
}
if (keepLinkForNewFile)
{
_syncInfoService.UpdateSyncInfo(options.CreatedFile, new ActionContext(Services.ActionType.Import)
{
WsFile = wsFile,
LastActivity = lastActivity.IdOrNull()
});
//_syncInfoService.UpdateSyncInfo(options.CreatedFile, wsFile, lastActivity);
}
}
return options.CreatedFile;
}
bool ShouldBeSkipped(IEnumerable<string> toSkip, WsVersion ver)
{
return toSkip.Any(a => string.Equals(a, ver.Id.ToString()));
}
private void ImportAsNewVersion(IDMSFile dmsFile, ISyncInfo syncInfo, WsFile wsFile, ItemMapActivityAction action, Func<List<Activity>> allActivitiesFunc, IEnumerable<string> versionIdsToSkip)
{
_syncInfoService.CanUpdateSyncInfoOrThrow(dmsFile, Operations.Sync);
_dmsWorker.CanImportNewVersionOrThrow(dmsFile);
var allActivities = allActivitiesFunc();
var lastActivity = allActivities.LastOrDefault();
var importAction = (ImportDocumentAction)action;
var versionInfos = new List<CreateversionInfo>();
var versionId = (syncInfo == null) ? 0 : syncInfo.LastImportedWsVerId;
var startFromVersion = wsFile.Versions.Where(p => p.Id == versionId).Select(p => p.Version).FirstOrDefault();
if (startFromVersion == 0)
{
startFromVersion = 1;
}
var versionsToAdd = wsFile.Versions
.Where(x => x.Version > startFromVersion && !versionIdsToSkip.Contains(x.Id.ToString()))
.OrderBy(x => x.Version).ToArray();
dmsFile.AddHistories(allActivities.Where( x => !(versionsToAdd.Any(version => version.Version == x.CurrentVersion)) ).ToList());
if (importAction.ImportVersionsAction == ConflictVersionOptions.Latest)
{
versionsToAdd = wsFile.Versions.Where(x => x.Version > startFromVersion && !ShouldBeSkipped(versionIdsToSkip,x))
.OrderBy(x => x.Version).ToArray();
versionInfos.Add(new CreateversionInfo
{
Activities = allActivities.Where(activity => versionsToAdd.Any(version => version.Version == activity.CurrentVersion)),
file = wsFile,
FriendlyName = wsFile.FriendlyName,
version = wsFile.CurrentVersion,
FilePath = wsFile.CurrentVersion.DownloadFileVersion()
});
}
else if (importAction.ImportVersionsAction == ConflictVersionOptions.All)
{
foreach (var wsVersion in versionsToAdd)
{
versionInfos.Add(new CreateversionInfo()
{
Activities = allActivities.Where(a => a.CurrentVersion == wsVersion.Version).ToList(),
file = wsFile,
FriendlyName = wsFile.FriendlyName,
version = wsVersion,
FilePath = wsVersion.DownloadFileVersion()
});
}
}
var options = new NewVersionsOptions
{
File = dmsFile,
WsFile = wsFile,
VersionList = versionInfos
};
_dmsWorker.ImportNewVersion(options);
foreach (var ver in options.VersionList)
{
_syncInfoService.UpdateSyncInfo(ver.createdVersion, new ActionContext(Services.ActionType.Import)
{
WsVersion = ver.version
});
}
_syncInfoService.UpdateSyncInfo(dmsFile, new ActionContext(Services.ActionType.Import)
{
WsFile = wsFile,
LastActivity = lastActivity.IdOrNull()
});
}
public override ProcessResult Process(FolderMap folderMap, ItemMapActivityAction action)
{
return ProcessResult.Processed;
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Concrete/OTFile.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Hummingbird.DM.Extensions.Interop.DECore;
using Hummingbird.DM.Extensions.Interop.DOCSObjects;
using Workshare.Components.Exceptions;
using Workshare.Components.Helpers;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.OpenText.Concrete;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.OpenText.Contrete
{
class OTFile : IDMSFile
{
IProfile m_document = null;
IProject m_Folder = null;
//IManDocumentType newdoctype = null;
bool wasAlreadyCheckedOut = false;
bool m_bNeedToCleanTemp = false;
string m_sTempCheckOutPath = "";
List<HistoryRowInfo> _history = new List<HistoryRowInfo>();
public OTFile(IProfile doc)
{
Logger.Write("ManFile::Create()", Severity.Information);
m_document = doc;
RefreshHistory();
}
public OTFile(IProfile doc, IProject folder)
: this(doc)
{
m_Folder = folder;
}
private void RefreshHistory()
{
_history = HistoryHelper.GetHistory(m_document as BCObject);
}
public IDmsVersion AddVersion(string filePath, FileDetails file, List<Activity> activities, string versionFriendlyName = null,
bool checkInVersion = false, bool keepLocalState = true)
{
throw new NotImplementedException();
}
public void UpdateSyncInfo(WorksharePlatform.FileDetails fileDetailes, bool silentUpdate, OperationContext ctx, WorksharePlatform.Activity last)
{
//Logger.Write("ManFile::UpdateSyncInfo() START", Severity.Information);
//try
//{
// string fieldvalue = string.Empty;
// if (fileDetailes != null && CheckedOutToUser)
// {
// DateTime LastEditDate = (ctx != null && ctx.CheckOutOption == Workshare.Integration.Enums.CheckOutOptions.CheckOut) ? EditDate : Modified;
// var info = OTSyncInfo.Create(fileDetailes, LastEditDate, m_document, wasAlreadyCheckedOut, (ctx == null) ? false : ctx.CheckOutOption == Workshare.Integration.Enums.CheckOutOptions.CheckOut);
// if (info != null)
// {
// fieldvalue = info.ToString();
// }
// }
// Logger.Write("ManFile::UpdateSyncInfo() Try fetch document from OpenText", Severity.Information);
// /*PCDDocObject doc = new PCDDocObject();
// doc.SetDST(Application.UserDST);
// doc.SetObjectType("cyd_defprof");
// doc.SetProperty("%TARGET_LIBRARY", m_document.Library.Name);
// doc.SetProperty("%OBJECT_IDENTIFIER", m_document.DocNumber.ToString());
// doc.SetProperty("%VERSION_ID", m_document.CurrentVersion.VersionID);
// doc.Fetch();
// if (doc.ErrNumber == 0)
// {
// doc.SetProperty(Application.SENDINFO_FIELD, fieldvalue);
// doc.Update();
// }
// else
// {
// throw new CannotUpdateSyncInfoException(m_document.ErrText);
// }*/
// m_document.Columns[Application.OTInstance.SENDINFO_FIELD].Value = fieldvalue;
// m_document.Save(0);
// if (m_document.ErrCode != 0)
// {
// throw new CannotUpdateSyncInfoException(m_document.ErrText);
// }
// Logger.Write("ManFile::UpdateSyncInfo() FINISH", Severity.Information);
//}
//catch (CannotUpdateSyncInfoException)
//{
// Logger.Write("ManFile::UpdateSyncInfo() CannotUpdate error", Severity.Information);
// throw;
//}
//catch (Exception ex)
//{
// Logger.Write("ManFile::UpdateSyncInfo() Exception error", Severity.Information);
// throw new CannotUpdateSyncInfoException(ex.Message, ex) ;
//}
}
public void RemoveTempFileIfNeed()
{
if (m_bNeedToCleanTemp && !string.IsNullOrEmpty(m_sTempCheckOutPath))
{
if (File.Exists(m_sTempCheckOutPath))
File.Delete(m_sTempCheckOutPath);
m_bNeedToCleanTemp = false;
m_sTempCheckOutPath = string.Empty;
}
}
public DateTime? CheckoutTime
{
get
{
Logger.Write("ManFile::CheckOutTime GET", Severity.Information);
try
{
DateTime? res = null;
if (CheckedOut)
{
var checkedout = _history.Where(x => x.Operate.Equals("Check-out", StringComparison.InvariantCultureIgnoreCase));
if (checkedout != null)
{
res = checkedout.Max(x => x.DateOp);
}
}
return res;
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Verbose);
return null;
}
}
}
public ISyncInfo GetSyncInfo()
{
Logger.Write("ManFile::GetSyncInfo() START", Severity.Information);
m_document.Fetch();
var res = (OTSyncInfo)OTSyncInfo.Parse(m_document.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString());
// TODO Remove that for NEW WORKFLOW
if ((!CheckedOutToUser) || (res != null && res.CheckOutDate == OTSyncInfo.NOT_CHECKED_OUT_DATE) || !CheckoutTime.HasValue || (res != null && res.CheckOutDate != CheckoutTime.Value.Ticks))
{
return null;
}
else
{
return res;
}
}
public IDMSItemID ID
{
get { return new ManItemID(m_document); }
}
string _name = null;
public string Name
{
get
{
if (_name == null)
{
string ext = Path.GetExtension(m_document.Name);
string def_ext = Path.GetExtension(m_document.CurrentVersion.FilePath);
if (string.IsNullOrEmpty(ext) || string.Compare(ext.ToLower(), def_ext.ToLower(), StringComparison.InvariantCultureIgnoreCase) != 0)
{
_name = m_document.Name + def_ext;
}
else
{
_name = m_document.Name;
}
}
return _name;
}
set
{
}
}
public string Description
{
get
{
return m_document.CurrentVersion.Comment;
}
set
{
m_document.CurrentVersion.Comment = value;
}
}
public bool DoesUserHavePermissions(Workshare.Integration.Enums.Permissions permissions)
{
/*switch (permissions)
{
case Integration.Enums.Permissions.EditItem: return (m_docuemnt.EffectiveAccess & imAccessRight.imRightReadWrite) == imAccessRight.imRightReadWrite;
}*/
return true;
}
public bool CheckedOutToUser
{
get
{
Logger.Write("ManFile::CheckedOutToUser GET", Severity.Information);
RefreshHistory();
var res = false;
if (CheckedOut)
{
var checkedout = _history.Where(x => x.Operate.Equals("Check-out"));
if (checkedout != null)
{
DateTime lastCheckOutDate = checkedout.Max(z => z.DateOp);
var users = _history.Where(x => x.Operate.Equals("Check-out") && x.DateOp == lastCheckOutDate);
string checkedOutUser = users.Select(x => x.User).FirstOrDefault();
Logger.Write("NamFile::CheckedOutToUser Checkeduser="+checkedOutUser+" library user="+m_document.Library.UserName, Severity.Information);
res = string.Compare(checkedOutUser,m_document.Library.UserName, StringComparison.InvariantCultureIgnoreCase) == 0;
}
}
return res;
}
}
public bool IsCheckedOutFileExists()
{
string checkedoutpath;
if (!string.IsNullOrEmpty(m_document.CheckoutPath))
{
checkedoutpath = m_document.CheckoutPath + Path.GetFileName(m_document.CurrentVersion.FilePath);
}
else
{
checkedoutpath = m_document.CurrentVersion.FilePath;
}
if (CheckedOut && !string.IsNullOrEmpty(checkedoutpath))
{
return File.Exists(checkedoutpath);
}
else
{
return false;
}
}
public bool CheckedOut
{
get
{
Logger.Write("ManFile::CheckedOut GET", Severity.Information);
m_document.Fetch();
return !(m_document.Columns["STATUS"].Value.ToString() == "0");
}
}
public void UpdateDisplayName(string name)
{
if (name != m_document.Columns["DOCNAME"].Value.ToString())
{
_name = Application.OTInstance.GetItemName(name);
m_document.Columns["DOCNAME"].Value = _name;
m_document.Save(0);
_name = null;
}
}
public void AddVersion(string filePath, WorksharePlatform.FileDetails file, List<WorksharePlatform.Activity> activities, bool checkInVersion = false)
{
Logger.Write("ManFile::Update START", Severity.Information);
string path = (string.IsNullOrEmpty(m_document.CheckoutPath)) ? m_document.CurrentVersion.FilePath : m_document.CheckoutPath + Path.GetFileName(m_document.CurrentVersion.FilePath);
try
{
Logger.Write("Trying to update checkouted file: " + path, Severity.Information);
if (!string.IsNullOrEmpty(path))
{
File.Copy(filePath, path, true);
var res = m_document.CreateVersionFromFile(0, path);
if (res == null)
{
throw new FileCannotBeCheckedInException("Error check in");
}
}
}
catch (FileNotUpdatedException)
{
throw;
}
catch (IOException ex)
{
Logger.Write("Failded to update checkouted file: " + path, Severity.Error);
if (ex.Message.Contains("The process cannot access the file"))
{
throw new FileNotUpdatedException(RES.STR_FILE_LOCKED_REASON, ex);
}
else
{
throw new FileNotUpdatedException(ex.Message, ex);
}
}
}
public bool WasUpdatedAfterSend(ISyncInfo synhInfo)
{
return (synhInfo == null) ? false : !string.Equals(Modified.Ticks.ToString(), ((OTSyncInfo)synhInfo).Modified);
}
public DateTime EditDate
{
get
{
if (CheckedOut)
{
string localfilepath = string.Empty;
try
{
localfilepath = m_document.CheckoutPath+Path.GetFileName(m_document.CurrentVersion.FilePath);
var localfile = new FileInfo(localfilepath);
return localfile.LastWriteTime;
}
catch (IOException ex)
{
Logger.Write("Failded to access to checkouted file: " + localfilepath, Severity.Error);
throw new FileNotFoundException(ex.Message, ex);
}
}
else
{
return m_document.CurrentVersion.LastEditDate;
}
}
}
public string GetFilePath()
{
Logger.Write("OTFile::GetFilePath() START", Severity.Information);
string localCopy = string.Empty;
try
{
if (CheckedOut && !string.IsNullOrEmpty(m_document.CheckoutPath))
{
Logger.Write("NamFile::GetFile() Gettin checked out file", Severity.Information);
localCopy = m_document.CheckoutPath + Path.GetFileName(m_document.CurrentVersion.FilePath);
}
else
{
localCopy = m_document.CurrentVersion.FilePath;
Logger.Write("Trying to get copy of a file: " + localCopy, Severity.Information);
// do it for sure that we have latest version of the file
m_document.CurrentVersion.DownloadFile();
Logger.Write("Succeded: " + localCopy, Severity.Information);
}
Logger.Write("OTFile::GetFilePath() reading file data", Severity.Information);
return localCopy;
}
catch (IOException ex)
{
Logger.Write("Failded to access to checkouted file: " + localCopy, Severity.Error);
throw new LocalFileNotFound();
}
catch (Exception ex)
{
Logger.Write("Error due getting of copy: " + localCopy, Severity.CriticalError);
throw;
}
}
public string DisplayName
{
get
{
return Path.GetFileNameWithoutExtension(Name);
}
set
{
}
}
public IDMSFolder ParentFolder
{
get
{
if (m_Folder == null && m_document.ParentProjects.Count == 1)
{
m_Folder = m_document.ParentProjects[1];
}
if (m_Folder != null)
{
return new ManFolder(m_Folder);
}
return null; // it's can be when file located directly in root library;
}
}
public string Modifier
{
get
{
return m_document.CurrentVersion.Author;
}
}
public DateTime Modified
{
get
{
return m_document.CurrentVersion.LastEditDate; ;
}
}
const int INFINITY_DAYS = 10000;
public void OnBeforeSending(OperationContext context)
{
Logger.Write("ManFile::OnBeforeSending() START", Severity.Information);
if (!CheckedOut)
{
GetFileToCheckout();
}
else if (!CheckedOutToUser)
{
Logger.Write("ManFile::OnBeforeSending() File checked out to another user", Severity.Information);
throw new FileCheckoutedToAnotherUser(Workshare.Integration.Operations.Send);
}
else
{
wasAlreadyCheckedOut = true;
}
}
public void OnAfterSending(OperationContext context)
{
if (context.CheckOutOption == Workshare.Integration.Enums.CheckOutOptions.DontCheckOut && !wasAlreadyCheckedOut)
{
DiscardCheckout();
}
}
public void OnAfterSync(OperationContext args)
{
Logger.Write("ManFile::OnAfterSync() START", Severity.Information);
var si = (OTSyncInfo)OTSyncInfo.Parse(m_document.Columns[Application.OTInstance.SENDINFO_FIELD].Value.ToString());
UpdateSyncInfo(null, true, null,null);
if ((si == null || !si.WasAlreadyCheckedOut || !si.NeedToCheckedOut) && !WasUpdatedAfterSend(si))
{
DiscardCheckout();
m_bNeedToCleanTemp = true;
m_sTempCheckOutPath = m_document.CheckoutPath;
RemoveTempFileIfNeed();
}
}
public void OnAfterAdd(OperationContext args)
{
m_document.ReleaseDoc();
}
public void OnSendError(object args, Exception ex)
{
Logger.Write(ex, Severity.Error);
if (!wasAlreadyCheckedOut)
{
DiscardCheckout();
}
}
private void UpdateDocTypeIfNeed()
{
}
public void DiscardCheckout(bool deleteCheckOutFile = false)
{
Logger.Write("ManFile::DiscardCheckout() START", Severity.Information);
try
{
m_document.UnlockProfile(0, 0, "", 0);
if (deleteCheckOutFile)
FileUtils.SafeDelete(m_document.CheckoutPath);
}
catch
{
throw new FileUnlockFailedException();
}
}
public List<IDMSFolder> ParentFolders
{
get
{
var res = new List<IDMSFolder>();
foreach (IProject folder in m_document.ParentProjects)
{
res.Add(new ManFolder(folder));
}
return res;
}
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, out bool added, List<WorksharePlatform.Activity> activities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool update = true)
{
Logger.Write("ManFolder::AddFile() START ", Severity.Information);
added = false;
try
{
Logger.Write("ManFolder::AddFile() useProfileDialog=" + useProfileDialog.ToString(), Severity.Information);
/*if (useProfileDialog)
{
var resadd = added;
Workshare.OpenText.Presenter.ModulePresenter.InvokeInSTAThreadIfNeed(() =>
{
resadd = DisplayImportDialog(file);
});
added = resadd;
return null;
}
else
{*/
IProfile doc = Application.OTInstance.CurrentLibrary.CreateProfile("DEF_PROF");
string FileType = Application.OTInstance.GetDocClassByFileExtension(Path.GetExtension(file.Name));
string AppType = Application.OTInstance.GetAppIDByFileExtension(Path.GetExtension(file.Name), Application.OTInstance.CurrentLibrary);
if (string.IsNullOrEmpty(AppType) && FileType == "IMG") // if noone else type for image file did not set - set the default image type
{
AppType = "DOCSIMAGE";
}
if (string.IsNullOrEmpty(AppType))
{
Logger.Write("ManFolder::AddFile() cannot detect AppType for file=" + useProfileDialog.ToString(), Severity.Information);
throw new FileNotCreatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE_WRONG_TYPE_ERROR_F, file.Name));
}
string FileName = Application.OTInstance.GetItemName(file.FriendlyName);
doc.Columns["TYPE_ID"].Value = FileType;
doc.Columns["DOCNAME"].Value = FileName;
doc.Columns["APP_ID"].Value = AppType;
doc.Columns["AUTHOR_ID"].Value = Application.OTInstance.CurrentUser;
doc.Columns["TYPIST_ID"].Value = Application.OTInstance.CurrentUser;
doc.Save(5);
if (doc.ErrCode != 0)
{
throw new FileNotCreatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE, FileName, doc.ErrText));
}
if (doc.Versions != null && doc.Versions.Count > 0)
{
var version = doc.Versions[1];
Logger.Write("Trying to upload a file into DMS: " + version.FilePath, Severity.Information);
File.Copy(file.FilePath, version.FilePath, true);
var Results = version.UploadFile();
version.UnlockVersion((short)tagVersionStatus.VS_NORMAL);
Logger.Write("Trying to upload a file into DMS: Succeded " + version.FilePath, Severity.Information);
}
//doc.ReleaseDoc();
if (m_Folder != null)
{
m_Folder.AddDocument(doc);
}
var createdFile = new OTFile(doc, m_Folder);
added = true;
Logger.Write("ManFolder::AddFile() COMPLETE ", Severity.Information);
return createdFile;
//}
}
catch (FileNotCreatedException)
{
Logger.Write("ManFolder::AddFile() File Not Created Exception", Severity.Information);
throw;
}
catch (Exception ex)
{
Logger.Write("ManFolder::AddFile() Exception=" + ex.Message, Severity.Information);
throw new FileNotCreatedException(ex.Message, ex);
}
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, List<WorksharePlatform.Activity> activities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool update = true)
{
var a = true;
return AddFile(file, out a, activities,versionIdsToSkip, useProfileDialog, SyncVerOption,StartVersion, update);
}
private bool DisplayImportDialog(WorksharePlatform.FileDetails file)
{
return false;
}
public void AddHistory(string eventName, string eventComment, Workshare.Integration.Operations operation)
{
m_document.AddDocumentActivity(eventComment);
m_document.Save(0);
}
public void AddHistories(List<Activity> activities)
{
throw new NotImplementedException();
}
public void GetFileToCheckout()
{
Logger.Write("ManFile::OnBeforeSending() Try to check out file", Severity.Information);
m_document.CurrentVersion.CheckOut(DateTime.Now.AddDays(INFINITY_DAYS), "Workshare automatic Check-out", Path.GetTempPath());
}
public void OnBeforeSync(OperationContext args)
{
}
public void AddHistories(List<WorksharePlatform.Activity> activities, int vesrion)
{
}
public void RelateDocument(IDMSFile file, string comments)
{
//TODO
}
public void GetFileToCheckoutLatest()
{
}
public void UpdateSyncInfo2(WorksharePlatform.FileDetails fileDetailes, string lastActivity)
{
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
return false;
}
public int DMSId
{
get { return -1; }
}
public IDMSFolder RootFolder()
{
return null;
}
IDmsVersion AddVersion(string filePath, WorksharePlatform.FileDetails file, List<WorksharePlatform.Activity> activities, string versionFriendlyNmae=null )
{
throw new NotImplementedException();
}
public IEnumerable<IDmsVersion> GetVersions()
{
throw new NotImplementedException();
}
public void CanUpdateProfileOrThrow()
{
throw new NotImplementedException();
}
public bool IsFileLocked()
{
throw new NotImplementedException();
}
public bool IsLocalFileWasEdited()
{
throw new NotImplementedException();
}
public IDmsVersion CheckInFile(bool keepCheckOut)
{
throw new NotImplementedException();
}
public IDMSFile GetLatest()
{
throw new NotImplementedException();
}
public string DMSItemKey
{
get { return ""; }
}
}
}
<file_sep>/WSComponents/src/WSCloudService/FileComment.cs
using System;
namespace WorksharePlatform
{
public struct Positional
{
public int x;
public int y;
public int width;
public int height;
}
public class FileComment
{
public int Id { get; set; }
public DateTime CreatedAt { get; set; }
public int FileId { get; set; }
public int PageNumber { get; set; }
public int FileVersion { get; set;}
public string Body { get; set; }
public int ReplyToId { get; set; }
public string UserName { get; set; }
public Positional Positional { get; set; }
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/BaseDMSFile.cs
namespace Workshare.Integration.Processor
{
public abstract class BaseDMSFile : BaseDMSItem
{
//TODO remove
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/CloudAuthenication.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using WorksharePlatform;
using System.Web;
using System.Net;
namespace WorkshareCloud.Common
{
public static class CloudAuthenication
{
public static void CheckIfAuth(UserDetails user)
{
if (!(user != null && (!string.IsNullOrEmpty(user.AuthToken) || (user.SessionCookies != null && user.SessionCookies.Count > 1))))
{
throw new CloudUnAuthorized();
}
}
static public void Login(UserDetails user)
{
PlatformService.Login(user);
Update(user);
// AuthCurrentUser(user);
}
//static void AuthCurrentUser(UserDetails userDetails)
//{
// var responce = HttpContext.Current.Response;
// foreach (Cookie cookie in userDetails.SessionCookies.GetCookies(new Uri(userDetails.ServiceUrl)))
// {
// responce.Cookies.Add(new HttpCookie(cookie.Name, cookie.Value));
// }
// responce.Cookies.Add(new HttpCookie(cloudemail, userDetails.Email));
//}
//static void RemoveAuth()
//{
// var responce = HttpContext.Current.Response;
// responce.Cookies.Add(new HttpCookie(cloudemail, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
// responce.Cookies.Add(new HttpCookie(user_credentials, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
// responce.Cookies.Add(new HttpCookie(_session_id, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
//}
//static void RemoveAuth()
//{
// var responce = HttpContext.Current.Response;
// responce.Cookies.Add(new HttpCookie(CookieAwareClient.TMP_AUTH_TOKEN, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
// responce.Cookies.Add(new HttpCookie(cloudemail, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
// responce.Cookies.Add(new HttpCookie(user_credentials, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
// responce.Cookies.Add(new HttpCookie(_session_id, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
//}
//public static void SetAuth(string auth_token)
//{
// var responce = HttpContext.Current.Response;
// responce.Cookies.Add(new HttpCookie(CookieAwareClient.TMP_AUTH_TOKEN, auth_token) { Expires = DateTime.Now.AddHours(1) });
//}
//public static UserDetails GetCurrentUser()
//{
// var userCookies = HttpContext.Current.Request.Cookies;
// if (userCookies.AllKeys.Contains(user_credentials) && userCookies.AllKeys.Contains(_session_id))
// {
// var user = new UserDetails()
// {
// SessionCookies = new CookieContainer()
// };
// user.SessionCookies.Add(new Cookie(userCookies[user_credentials].Name, userCookies[user_credentials].Value, "/", user.Host));
// user.SessionCookies.Add(new Cookie(userCookies[_session_id].Name, userCookies[_session_id].Value, "/", user.Host));
// user.Email = userCookies[cloudemail].Value;
// return user;
// }
// else
// {
// return null;
// }
//}
/// <summary>
/// get current user from cookies
/// </summary>
/// <returns></returns>
public static UserDetails GetCurrentUser()
{
UserDetails user = new UserDetails();
var userCookies = HttpContext.Current.Request.Cookies;
if (userCookies.AllKeys.Contains(CookieAwareClient.TmpAuthToken) && !string.IsNullOrEmpty(userCookies[CookieAwareClient.TmpAuthToken].Value))
{
user = new UserDetails()
{
AuthToken = userCookies[CookieAwareClient.TmpAuthToken].Value,
SessionCookies = new CookieContainer()
};
}
if (userCookies.AllKeys.Contains(CookieAwareClient.user_credentials) && userCookies.AllKeys.Contains(CookieAwareClient._session_id))
{
user = new UserDetails()
{
SessionCookies = new CookieContainer()
};
user.SessionCookies.Add(new Cookie(userCookies[CookieAwareClient.user_credentials].Name, userCookies[CookieAwareClient.user_credentials].Value, "/", user.Host));
user.SessionCookies.Add(new Cookie(userCookies[CookieAwareClient._session_id].Name, userCookies[CookieAwareClient._session_id].Value, "/", user.Host));
}
return user;
}
public static void Logout()
{
//PlatformService.Logout();//TODO
// RemoveAuth();
Update(null);
}
/// <summary>
/// update cookies for current user
/// </summary>
/// <param name="user"></param>
public static void Update(UserDetails user)
{
var responce = HttpContext.Current.Response;
if (user == null || string.IsNullOrEmpty(user.AuthToken))
{
responce.Cookies.Add(new HttpCookie(CookieAwareClient.TmpAuthToken, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
}
else
{
if (responce.Cookies[CookieAwareClient.TmpAuthToken] == null || !string.Equals(responce.Cookies[CookieAwareClient.TmpAuthToken].Value, user.AuthToken))
{
responce.Cookies.Add(new HttpCookie(CookieAwareClient.TmpAuthToken, user.AuthToken) { Expires = DateTime.Now.AddMinutes(55) });
}
}
CookieCollection cookies= null;
if (user != null &&user.SessionCookies!=null)
{
cookies=user.SessionCookies.GetCookies(PlatformService.HostWithSchema);
}
if (!HasCookie(cookies, CookieAwareClient.user_credentials) || !HasCookie(cookies, CookieAwareClient._session_id))
{
responce.Cookies.Add(new HttpCookie(CookieAwareClient.cloudemail, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
responce.Cookies.Add(new HttpCookie(CookieAwareClient.user_credentials, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
responce.Cookies.Add(new HttpCookie(CookieAwareClient._session_id, string.Empty) { Expires = DateTime.Now.AddDays(-10) });
}
else
{
responce.Cookies.Add(new HttpCookie(CookieAwareClient.cloudemail, user.Email));
responce.Cookies.Add(new HttpCookie(CookieAwareClient.user_credentials, cookies[CookieAwareClient.user_credentials].Value));
responce.Cookies.Add(new HttpCookie(CookieAwareClient._session_id, cookies[CookieAwareClient._session_id].Value));
}
}
static bool HasCookie(CookieCollection col, string cookieName)
{
return col!=null&&col[cookieName] != null && !string.IsNullOrEmpty(col[cookieName].Value);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Progress/VMs/ProgressItemVm.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;
using Workshare.Components.Views.Common;
namespace Workshare.Components.Views.Progress.VMs
{
public class ProgressItemVm:BasePropertyChanged<ProgressItemVm>
{
public string Title { set; get; }
public ObservableCollection<TargetItemVm> TargetItems { set; get; }
public StateEnum State { set; get; }
public ProgressItemVm()
{
TargetItems = new ObservableCollection<TargetItemVm>();
}
public string Name { get; set; }
public string TargetItemsToolTip
{
get
{
var itemsText = "Items:";
foreach (var targetItemVm in TargetItems)
{
itemsText += Environment.NewLine + targetItemVm.Name;
}
return itemsText;
}
}
}
public enum StateEnum
{
Loading,
Success,
Error
}
}
<file_sep>/OpenText/src/Workshare.OpenText/WorkshareIntegration.cs
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Net;
using Workshare.Integration;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.OpenText.Integration
{
public class WorkshareIntegration : WSIntegrationBase
{
public WorkshareIntegration(IAuthProvider authProvider)
: base(authProvider,null)
{
}
//protected override List<int> AddNewFileToFolder(IDMSFolder folder, UserDetails user, SyncOptions options, OperationContext context, ItemsErrorsCollection errorlist, List<FileDetails> filesOnCloud, List<int> localFiles)
//{
// List<int> addedFiles = new List<int>();
// var processedFiles = folder.GetSendDataInfo();
// foreach (var fileID in filesOnCloud.Select(x => x.Id).Except(localFiles))
// {
// if (!processedFiles.Contains(fileID))
// {
// SafeFuncCall(errorlist, folder, new ExceptionListSignal() { IsAll = true, IsWeb = true, FileError = true }, Operations.Sync, () =>
// {
// FileDetails cloudFile = filesOnCloud.Where(x => x.Id == fileID).First();
// PlatformService.GetVersionDetails(user, cloudFile);
// cloudFile.Data = PlatformService.DownloadFile(user, fileID);
// IDMSFile newFile = folder.AddFile(cloudFile,null);
// if (newFile != null)
// {
// newFile.Description = cloudFile.FriendlyName;
// newFile.AddHistory("",RES.STR_HISTORY_ADDFILETOFOLDER, Operations.Sync);
// }
// addedFiles.Add(cloudFile.Id);
// });
// }
// }
// return addedFiles;
//}
//protected override void UpdateLocalFileName(IDMSFile localFile, FileDetails wsFile)
//{
// localFile.UpdateDisplayName(wsFile.FriendlyName);
//}
//protected override void LocalFileNoMatter_wsFileWasChanged_KeepBothFiles(UserDetails user, IDMSFile localFile, FileDetails wsFile, OperationContext context, ISyncInfo syncInfo, ConflictVersionOptions SyncVerOption)
//{
// if (localFile.ParentFolder != null)
// {
// // add new copy of the file to OpenText
// wsFile.Data = PlatformService.DownloadFile(user, wsFile.Id);
// bool newFileIsAdded = false;
// IDMSFile newFile = ((IDMSFolder)localFile.ParentFolder).AddFile(wsFile,out newFileIsAdded,null/*, true*/);// NOTE: display Profile dialog hire..
// if (newFileIsAdded)
// {
// if (newFile != null)
// {
// newFile.AddHistory("", string.Format(RES.STR_HISTORY_SAVEDFROM, localFile.Name), Operations.Sync);
// }
// UpdateLocalFileName(localFile, wsFile);
// localFile.AddHistory("", string.Format(RES.STR_HISTORY_SAVEDNEW, newFile.Name), Operations.Sync);
// // if local file was changed then we check in changes, otherwise we discard check out for file
// if (localFile.WasUpdatedAfterSend(syncInfo))
// {
// localFile.OnAfterSync(context);
// }
// else
// {
// localFile.UpdateSyncInfo(null, true, null,null);
// }
// }
// }
//}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/ActionStrategy/CeaseCollaborationStrategy.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Properties;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Integration.Processor.Strategies.ActionStrategy
{
public class CeaseCollaborationStrategy : ActionStrategy
{
readonly WsProxy _wsProxy;
readonly IAuthProvider _auth;
readonly DmsWorkerBase _dmsWorker;
readonly SyncInfoService _syncInfoService;
public CeaseCollaborationStrategy(WsProxy wsProxy, IAuthProvider auth, DmsWorkerBase dmsWorker, SyncInfoService syncInfoService)
: base(syncInfoService)
{
this._wsProxy = wsProxy;
this._auth = auth;
this._dmsWorker = dmsWorker;
this._syncInfoService = syncInfoService;
}
public override ProcessResult Process(Maps.FileMap fileMap, ItemMapActivityAction action)
{
try
{
CeaseCollaborationAction _action = action as CeaseCollaborationAction;
if (fileMap.HasRemote())
{
_syncInfoService.GetSyncInfo(fileMap);
if (_action.ImportAction == CeaseCollaborationImportType.WithComments)
{
ImportAsNewFile(fileMap.WsFile, fileMap.LocalFile.ParentFolder, fileMap.LocalFile, true);
}
else if (_action.ImportAction == CeaseCollaborationImportType.WithoutComments)
{
ImportAsNewFile(fileMap.WsFile, fileMap.LocalFile.ParentFolder, fileMap.LocalFile, false);
}
if (_action.FileAction == CeaseCollaborationType.Delete)
{
WsUser wsUser = _auth.GetCurrentWSUser2();
_wsProxy.DeleteFile(wsUser, fileMap.WsFile.Id);
fileMap.LocalFile.AddHistory(string.Empty, string.Format(RES.STR_HISTORY_ITEMDELETED, wsUser.UserName), Operations.Deleted);
}
}
_dmsWorker.UnlockIfNotEdited(fileMap.LocalFile);
_syncInfoService.BreakLink(fileMap.LocalFile);
return ProcessResult.Processed;
}
catch (WebException ex)
{
if (ex.IsCloudFolderAccessDenied())
{
throw new CloudFolderAccessDenied(Resources.STR_UNABLE_SYNC, Resources.STR_UNABLESYNCFILE_TEXT, ex);
}
throw;
}
}
public override ProcessResult Process(Maps.FolderMap folderMap, ItemMapActivityAction action)
{
try
{
CeaseCollaborationAction _action = action as CeaseCollaborationAction;
if (folderMap.HasRemote())
{
if (_action.FileAction == CeaseCollaborationType.Delete)
{
_wsProxy.DeleteFolder(_auth.GetCurrentWSUser2(), folderMap.WsFolder.Id);
}
}
_syncInfoService.BreakLink(folderMap.LocalFolder);
return ProcessResult.Processed;
}
catch (WebException ex)
{
if (ex.IsStatusCode(HttpStatusCode.Forbidden))
{
throw new CloudFolderAccessDenied(Resources.STR_UNABLE_SYNC, Resources.STR_UNABLESYNCFILE_TEXT, ex);
}
throw;
}
}
private IDMSFile ImportAsNewFile(WsFile wsFile, IDMSFolder dmsFolder, IDMSFile basedOnFile, bool withComments)
{
var versionsToCreate = new List<CreateversionInfo>();
foreach (var ver in wsFile.Versions.OrderBy(x => x.Version).ToArray())
{
var activitiesForCurrentVersion = new List<Activity>();
versionsToCreate.Add(new CreateversionInfo
{
Activities = activitiesForCurrentVersion,
file = wsFile,
version = ver,
FriendlyName = wsFile.FriendlyName,
FilePath = withComments ? ver.DownloadFileVersionInPdfWithComment((string)wsFile.DownloadPassword) : ver.DownloadFileVersionInPdf()
});
}
var options = new NewFileOptions()
{
WsFile = wsFile,
ParentFolder = dmsFolder,
BasedOnFile = basedOnFile,
VersionList = versionsToCreate
};
_dmsWorker.CanImportNewFileOrThrow(options);
_dmsWorker.ImportNewFile(options);
return options.CreatedFile;
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText.Setup.CustomAction/CustomActions.cs
using Microsoft.Deployment.WindowsInstaller;
using System;
using System.Reflection;
namespace Workshare.OpenText
{
public class CustomActions
{
private static ActionResult ExecuteAction(Func<ActionResult> action, string methodName, Session session)
{
#if MESSAGES
MessageBox.Show(string.Format("Method {0} STARTED", methodName));
#endif
session.Log(string.Format("Method {0} STARTED", methodName));
try
{
return action();
}
catch (Exception ex)
{
#if MESSAGES
MessageBox.Show(ex.ToString(), string.Format("{0} : {1}", methodName, ex.Message));
#endif
session.Log(string.Format("Method {0} caused Exception\n{1}\n{2}", methodName, ex.Message, ex.ToString()));
}
finally
{
session.Log(string.Format("Method {0} FINISHED", methodName));
}
return ActionResult.Failure;
}
[CustomAction]
public static ActionResult RegisterMenusAction(Session session)
{
return ExecuteAction(() =>
{
Registering.RegisterMenus();
return ActionResult.Success;
}, MethodBase.GetCurrentMethod().Name, session);
}
[CustomAction]
public static ActionResult UnRegisterMenusAction(Session session)
{
return ExecuteAction(() =>
{
//Registering.UnRegisterMenus();
return ActionResult.Success;
}, MethodBase.GetCurrentMethod().Name, session);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/WsProxies/WsFile.cs
using System;
using System.Collections.Generic;
using System.Linq;
using WorksharePlatform;
namespace Workshare.Integration.WsProxies
{
/// <summary>
/// Should load data on demand from server
/// </summary>
public class WsFile
{
public FileDetails file;
UserDetails user;
public WsFile(FileDetails file, UserDetails user)
{
if (file == null) throw new ArgumentNullException("file");
if (user == null) throw new ArgumentNullException("user");
this.file = file;
this.user=user;
}
public int FolderId { get { return file.FolderId; } }
public int Id { get { return file.Id; } }
public string RemoteUrl { get { return file.RemoteUrl; } }
public object DownloadPassword { get { return file.DownloadPassword; } }
public string FriendlyName { get { return file.FriendlyName; } }
public string Name { get { return file.Name; } }
public WsUser Creator { get { return new WsUser(file.Creator); } }
public DateTime CreatedAt { get { return file.CreatedAt; } }
public DateTime UpdateDate { get { return file.UpdateDate; } }
IEnumerable<WsVersion> _Versions;
public IEnumerable<WsVersion> Versions
{
get
{
if(_Versions==null)
{
_Versions=PlatformService.GetVersionDetails(user,file).Select(a=>new WsVersion(a,user));
}
return _Versions;
}
internal set
{
_Versions = value;
}
}
public WsVersion CurrentVersion
{
get
{
var versions = Versions;
var maxVersionId = versions.Max(p => p.Version);
var res= versions.First(p => p.Version == maxVersionId);
file.CurrentVersion = res.version;
return res;
}
}
public bool IsDeleted {
get { return file.IsDeleted; }
}
}
}
<file_sep>/WSComponents/src/WSIntegration/SettingsStorage/SettingsStorage.cs
using Microsoft.Win32;
using Workshare.Components.WSLogger;
namespace Workshare.Integration.SettingsStorage
{
public class SettingsStorage
{
private const string RegKey = @"Software\Workshare\IManage\Settings";
public void SaveOption(string name, string value)
{
Logger.WriteTrace(string.Format("Saving {0}={1}", name, value));
using (var reg=Registry.CurrentUser.CreateSubKey(RegKey))
{
if (reg != null) reg.SetValue(name,value);
}
}
public string GetString(string name, string defaultvalue=null)
{
using (var reg = Registry.CurrentUser.OpenSubKey(RegKey))
{
if (reg != null)
{
return reg.GetValue(name) as string;
}
}
return defaultvalue;
}
public void SaveOption(string name, int value)
{
Logger.WriteTrace(string.Format("Saving {0}={1}", name, value));
using (var reg = Registry.CurrentUser.CreateSubKey(RegKey))
{
if (reg != null) reg.SetValue(name, value,RegistryValueKind.DWord);
}
}
public int GetInt(string name, int defaultvalue = 0)
{
using (var reg = Registry.CurrentUser.OpenSubKey(RegKey))
{
if (reg != null)
{
return (int) reg.GetValue(name, defaultvalue);
}
}
return defaultvalue;
}
}
}
<file_sep>/OpenText/src/Workshare.OpenText/Presenter/ModulePresenter.cs
using Workshare.Components;
using Workshare.Components.Interfaces;
using Workshare.Components.Presenter;
namespace Workshare.OpenText.Presenter
{
public class ModulePresenter :ModulePresenterBase
{
public ModulePresenter(IModuleView view) : base(view)
{
}
public override void OnSyncItemsClicked(SyncItemsClickedArgs args)
{
throw new System.NotImplementedException();
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/CheckInDialogResult.cs
using Workshare.Integration.Enums;
namespace Workshare.Components.Concrete
{
public class CheckinDlgResult
{
public bool ApplyToAll { set; get; }
public ConflictOptions option { set; get; }
public ConflictVersionOptions versions { get; set; }
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/FileMap.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Maps
{
public class FileMap : ItemMap
{
public IDMSFile LocalFile { set; get; }
public WsFile WsFile { set; get; }
/// <summary>
/// If files was send by itself to other folder then its parent then it will have this property set to actually container of it.
/// </summary>
public FolderMap IndirectParent { set; get; }
public override void Apply(ItemMapVisitor visitor)
{
if (visitor.VisitEnter(this))
{
foreach (var item in this.Maps)
{
item.Apply(visitor);
}
visitor.Visit(this);
}
visitor.VisitLeave(this);
}
public static string GetMapId(FileMap map)
{
if (map.LocalFile == null)
{
return map.WsFile == null ? string.Format("localfile:{0};wsfile:{1}", -1, -1) : string.Format("localfile:{0};wsfile:{1}", -1, map.WsFile.Id);
}
if (map.WsFile == null)
{
return string.Format("localfile:{0};wsfile:{1}", map.LocalFile.ID, -1);
}
return string.Format("localfile:{0};wsfile:{1}", map.LocalFile.ID, map.WsFile.Id);
}
public override string GetId()
{
return GetMapId(this);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/ConflictResolver/ConflictResolverViewModel.cs
using System;
using System.Windows.Input;
using System.Windows.Media;
using Workshare.Components.Concrete;
using Workshare.Components.Views.Common;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using RES = Workshare.Components.Properties.Resources;
using RES_INT = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.ConflictResolver
{
class ConflictResolverViewModel : OwnViewModel
{
public ICommand ReplaceCommand { set; get; }
public ICommand KeepBothCommand { set; get; }
public ICommand CancelCommand { set; get; }
public ConflictResolverViewModel()
{
WsData = new ConflictItemDataViewModel(RES_INT.STR_CONFLICT_DLG_WORKSHAREFILE_TITLE,new ConflictItemData()
{
Date=DateTime.Now,
Modifier="ModifierValue",
Name="testDoc.docx"
});
ManData = new ConflictItemDataViewModel("Custom DMS",new ConflictItemData()
{
Date = DateTime.Now,
Modifier = "Man ModifierValue",
Name = "Man testDoc.docx"
});
}
public ConflictResolverViewModel(ConflictItemData wsFile, ConflictItemData localFile, string DMSTitle)
{
WsData = new ConflictItemDataViewModel(RES_INT.STR_CONFLICT_DLG_WORKSHAREFILE_TITLE, wsFile);
ManData = new ConflictItemDataViewModel(DMSTitle, localFile);
CancelCommand = new RelayCommand((p) =>
{
Close(ConflictOptions.None, false);
});
ReplaceCommand = new RelayCommand((p) =>
{
Close(ConflictOptions.Replace, true);
});
KeepBothCommand = new RelayCommand((p) =>
{
Close(ConflictOptions.KeepBoth, true);
});
}
void Close(ConflictOptions result, bool? dialogResult)
{
Result = result;
RaiseClose(dialogResult);
}
public ConflictOptions Result { set; get; }
public ConflictItemDataViewModel WsData { set; get; }
public ConflictItemDataViewModel ManData { set; get; }
ImageSource m_ArrowImage = null;
public ImageSource ArrowImage
{
get
{
if (m_ArrowImage == null)
{
m_ArrowImage = Utils.Convert(RES.conflict_arrow, System.Drawing.Imaging.ImageFormat.Png);
}
return m_ArrowImage;
}
}
}
class ConflictItemDataViewModel
{
ConflictItemData m_Data;
public ConflictItemDataViewModel(string title, ConflictItemData data)
{
m_Data = data;
this.Title = title;
}
public string Title { get; private set; }
public string Name { get { return m_Data.Name; } }
public string Modifier { get { return m_Data.Modifier; } }
public string Date { get { return m_Data.Date.ToString(); } }
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/Features/WorkshareCloudIntegration/WorkshareCloudIntegration.EventReceiver.cs
using System;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using Microsoft.SharePoint;
using Microsoft.SharePoint.Security;
using WorkshareCloud.Common;
using System.Linq;
using WorkshareCloud.Common.Receivers;
namespace WorkshareCloud.Features.WorkshareCloudIntegration
{
/// <summary>
/// This class handles events raised during feature activation, deactivation, installation, uninstallation, and upgrade.
/// </summary>
/// <remarks>
/// The GUID attached to this class may be used during packaging and should not be modified.
/// </remarks>
[Guid("ebd7b255-328c-4217-8495-f37b42ef6d4c")]
public class WorkshareCloudIntegrationEventReceiver : SPFeatureReceiver
{
// Uncomment the method below to handle the event raised after a feature has been activated.
public override void FeatureActivated(SPFeatureReceiverProperties properties)
{
WorkshareCloud.Common.Logging.Logger.WriteTrace("Feature Activated", Microsoft.SharePoint.Administration.TraceSeverity.Verbose, Common.Logging.Category.CloudService);
}
public override void FeatureDeactivating(SPFeatureReceiverProperties properties)
{
var FieldName = CloudPathFieldValue.CloudField;
using (SPWeb v = (SPWeb)properties.Feature.Parent)
{
SPList l;
for (int i = 0; i < v.Lists.Count; i++)
{
try
{
l = v.Lists[i];
if (l is SPDocumentLibrary && l.Hidden == false)
{
if (l.Fields.ContainsField(FieldName))
{
l.Fields.Delete(FieldName);
l.Update();
}
for (int j = 0; j < l.Views.Count; j++)
{
try
{
var view = l.Views[j];
if (view.ViewFields.Exists(FieldName))
{
view.ViewFields.Delete(FieldName);
// view.XslLink = "";
view.Update();
}
}
catch { }
}
l.EventReceivers.OfType<SPEventReceiverDefinition>().Where(p => string.Equals(p.Class, typeof(WorkshareReceiver).FullName)).ToList().ForEach(receiver => receiver.Delete());
}
}
catch { }
}
};
}
// Uncomment the method below to handle the event raised after a feature has been installed.
//public override void FeatureInstalled(SPFeatureReceiverProperties properties)
//{
//}
// Uncomment the method below to handle the event raised before a feature is uninstalled.
//public override void FeatureUninstalling(SPFeatureReceiverProperties properties)
//{
//}
// Uncomment the method below to handle the event raised when a feature is upgrading.
//public override void FeatureUpgrading(SPFeatureReceiverProperties properties, string upgradeActionName, System.Collections.Generic.IDictionary<string, string> parameters)
//{
//}
}
}
<file_sep>/WSComponents/src/WSComponents/Exceptions/FileNotUpdatedException.cs
using System;
using Workshare.Integration.Exceptions;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Exceptions
{
public class FileNotUpdatedException : BaseException
{
public FileNotUpdatedException(string reasonMessage)
: base(RES.STR_UNABLE_SYNC, reasonMessage)
{
}
public FileNotUpdatedException(string reasonMessage, Exception inner)
: base(RES.STR_UNABLE_SYNC, string.Format(RES.STR_FILE_CANNOT_BE_UPDATED, reasonMessage), inner)
{
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Contrete/ManFile.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.Exceptions;
using Workshare.Components.Helpers;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.IManage.Contrete
{
class ManFile : BaseDMSFile, IDMSFile
{
internal IManDocument m_document = null;
IManFolder m_Folder = null;
IManDocumentType newdoctype = null;
bool wasAlreadyCheckedOut = false;
bool m_bNeedToCleanTemp = false;
string m_sTempCheckOutPath = "";
public IDMSFile GetLatest()
{
try
{
m_document.Refresh();
}
catch(Exception ex)
{
Logger.Write(ex, Severity.Verbose);
throw new OfflineException();
}
var l = m_document.LatestVersion;
return new ManFile(l, m_Folder);
}
public ManFile(IManDocument doc)
{
m_document = doc;
}
public ManFile(IManDocument doc, IManFolder folder)
: this(doc)
{
m_Folder = folder;
}
public void RelateDocument(IDMSFile file, string comments)
{
var manFile = (ManFile)file;
m_document.RelatedDocuments.AddRelation(manFile.m_document.Number, comments);
}
public IDMSItemID ID
{
get { return new ManItemID(m_document); }
}
public void RemoveTempFileIfNeed()
{
if (m_bNeedToCleanTemp && !string.IsNullOrEmpty(m_sTempCheckOutPath))
{
if (File.Exists(m_sTempCheckOutPath))
File.Delete(m_sTempCheckOutPath);
m_bNeedToCleanTemp = false;
m_sTempCheckOutPath = string.Empty;
}
}
public DateTime? CheckoutTime
{
get
{
try
{
if (m_document.CheckedOut)
{
return m_document.CheckoutTime;
}
else
{
return null;
}
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Verbose);
return null;
}
}
set{}
}
string _name = null;
public string Name
{
get
{
if (_name == null)
{
_name = (string.IsNullOrEmpty(m_document.LatestVersion.Name)) ? m_document.LatestVersion.Description : m_document.LatestVersion.Name;
string ext = FileUtils.GetExtension(_name);
string iManExt = "."+m_document.LatestVersion.Extension.ToLower();
if (string.IsNullOrEmpty(ext) || string.Compare(ext.ToLower(), iManExt.ToLower(), StringComparison.InvariantCultureIgnoreCase) != 0)
{
_name = _name + iManExt;
}
}
return _name;
}
set
{
}
}
public bool DoesUserHavePermissions(imAccessRight permissions)
{
return (m_document.EffectiveAccess & permissions) == permissions;
}
public bool DoesUserHavePermissions(Workshare.Integration.Enums.Permissions permissions)
{
switch (permissions)
{
case Workshare.Integration.Enums.Permissions.EditItem: return (m_document.EffectiveAccess & imAccessRight.imRightReadWrite) == imAccessRight.imRightReadWrite;
}
return true;
}
public bool CheckedOutToUser
{
get
{
var user = m_document.InUseBy;
if (user != null)
{
return user.Name == m_document.Database.Session.UserID;
}
return false;
}
}
public string CheckoutPath { get { return m_document.CheckoutPath; } }
public bool CheckedOut
{
get { return m_document.CheckedOut; }
}
public IDmsVersion AddVersion(string filePath, FileDetails file, List<Activity> activities, string versionFriendlyName = null, bool checkInVersion = false, bool keepLocalState = false)
{
Logger.Write("ADD VERSION: Start Add version", Severity.Information);
string tmp_filename = (file.CurrentVersion.Name.Length < 15) ? file.CurrentVersion.Name : FileUtils.GetFileNameWithoutExtension(file.CurrentVersion.Name).Substring(0, 8) + "." + FileUtils.GetExtension(file.CurrentVersion.Name);
bool wasCheckedOut = true;
if (!m_document.CheckedOut)
{
m_document.CheckOut(Path.GetTempFileName(), imCheckOutOptions.imReplaceExistingFile, DateTime.Now.AddDays(INFINITY_DAYS), "Workshare adding new versions");
wasCheckedOut = false;
}
var oldCheckOutPath = m_document.CheckoutPath;
var oldCheckOutComment = m_document.CheckoutComment;
if ((newdoctype = m_document.Database.GetDocumentTypeFromPath(filePath)) == null)
{
if (m_document.Type != null)
{
newdoctype = m_document.Type;
}
else
{
Logger.Write("ADD VERSION: Cannot get new doc type", Severity.Error);
throw new FileNotUpdatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE_WRONG_TYPE_ERROR_F, file.Name));
}
}
UpdateDocTypeIfNeed();
Logger.Write("ADD VERSION: Try check in with result", Severity.Information);
file.FilePath = Path.GetTempPath() + tmp_filename;
File.Copy(filePath, file.FilePath, true);
FileUtils.SafeDelete(filePath);
IManCheckinResult res = m_document.CheckInWithResults(file.FilePath, imCheckinDisposition.imCheckinNewVersion, imCheckinOptions.imDontKeepCheckedOut);
if (!res.Succeeded)
{
var resErr = new StringBuilder();
resErr.AppendFormat("ErrorCode : {0}. ErrorMessage: {1}", res.ErrorCode, res.ErrorMessage);
foreach (IManProfileError manProfileError in res.ErrorList)
{
resErr.Append(Environment.NewLine);
resErr.AppendFormat("{0} : {1}", manProfileError.AttribID, manProfileError.Description);
}
Logger.Write("ManFile::AddVersion(): CheckInWithResults return error: " + resErr, Severity.Information);
throw new FileCannotBeCheckedInException(res.ErrorMessage);
}
var checkedInVersion = res.Result;
//add history to current version
if (activities != null)
{
var last = activities.LastOrDefault(p => p.Noun == "Comment");
foreach (var activity in activities)
{
if (activity.Noun == "Comment" && activity != last)
continue;
var comment = activity.GetComment();
if (!string.IsNullOrEmpty(comment))
res.Result.HistoryList.Add(activity.HistoryModify ? imHistEvent.imHistoryModify : imHistEvent.imHistoryView, 0, 0, RES.STR_WORKSHARE_TITLE, comment, activity.FolderName, string.Empty, string.Empty);
}
}
res.Result.HistoryList.Add(imHistEvent.imHistoryEchoSync, 0, 0, RES.STR_WORKSHARE_TITLE, string.Format(RES.STR_HISTORY_ITEMSYNC, DateTime.Now.ToString(Activity.TimeFormat)), Environment.MachineName, string.Empty, string.Empty);
if (versionFriendlyName != null)
{
res.Result.Description = versionFriendlyName;
}
res.Result.Update();
var addedver = new ManVersion(new ManFile(res.Result, m_Folder));
if (!checkedInVersion.CheckedOut && wasCheckedOut)
{
var tmp = Path.GetTempFileName();//Path.GetTempPath() + tmp_filename;
checkedInVersion.CheckOut(tmp, imCheckOutOptions.imReplaceExistingFile, DateTime.Now.AddDays(INFINITY_DAYS), oldCheckOutComment);
var fi = new FileInfo(tmp);
// somehow that's date set incorect by imanage, but we need that to further use
fi.LastWriteTime = addedver.EditTime;
fi.LastAccessTime = addedver.EditTime;
if (keepLocalState)
{
checkedInVersion.CheckoutPath = oldCheckOutPath;
checkedInVersion.Update();
FileUtils.SafeDelete(tmp);
}
}
FileUtils.SafeDelete(oldCheckOutPath);
return addedver;
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
this.m_document.Refresh();
var syncInfo = Application.iManInstance.Module.Resolve<SyncInfoService>().GetSyncInfo(this, useForceRequest);
if (syncInfo == null) return false;
long oldModified;
if (long.TryParse(syncInfo.Modified, out oldModified))
{
return (EditDate.Ticks > long.Parse((syncInfo).Modified));
}
return false;
}
public bool IsLocalFileWasEdited()
{
try
{
if (CheckedOut)
{
if (!string.IsNullOrEmpty(CheckoutPath) && File.Exists(CheckoutPath))
{
var fi = new FileInfo(CheckoutPath);
return fi.LastWriteTime != Modified;
}
}
return false;
}
catch
{
return false;
}
}
public IDmsVersion CheckInFileEx(bool keepCheckOut, imCheckinDisposition checkinDisposition)
{
if (CheckedOut)
{
string checkoutpath = CheckoutPath;
var oldCheckOutComment = m_document.CheckoutComment;
IManCheckinResult res = m_document.CheckInWithResults(checkoutpath, checkinDisposition, imCheckinOptions.imDontKeepCheckedOut);
if (!res.Succeeded)
{
var resErr = new StringBuilder();
resErr.AppendFormat("ErrorCode : {0}. ErrorMessage: {1}", res.ErrorCode, res.ErrorMessage);
foreach (IManProfileError manProfileError in res.ErrorList)
{
resErr.Append(Environment.NewLine);
resErr.AppendFormat("{0} : {1}", manProfileError.AttribID, manProfileError.Description);
}
Logger.Write("ManFile::AddVersion(): CheckInWithResults return error: " + resErr, Severity.Information);
throw new FileCannotBeCheckedInException(res.ErrorMessage);
}
var checkedInVersion = res.Result;
var addedver = new ManVersion(new ManFile(res.Result, m_Folder));
if (keepCheckOut)
{
var tmp = Path.GetTempFileName();
checkedInVersion.CheckOut(tmp, imCheckOutOptions.imReplaceExistingFile, DateTime.Now.AddDays(INFINITY_DAYS), oldCheckOutComment);
var fi = new FileInfo(tmp);
// somehow that's date set incorect by imanage, but we need that to further use
fi.LastWriteTime = addedver.EditTime;
fi.LastAccessTime = addedver.EditTime;
//checkedInVersion.Update();
}
return addedver;
}
else
{
return null;
}
}
public IDmsVersion CheckInFile(bool keepCheckOut)
{
return CheckInFileEx(keepCheckOut, imCheckinDisposition.imCheckinNewVersion);
}
public bool IsCheckedOutFileExists()
{
if (m_document.CheckedOut && !string.IsNullOrEmpty(m_document.CheckoutPath))
{
return File.Exists(m_document.CheckoutPath);
}
else
{
return true;
}
}
public DateTime EditDate
{
get
{
if (CheckedOut)
{
string localfilepath = string.Empty;
try
{
localfilepath = m_document.CheckoutPath;
var localfile = new FileInfo(localfilepath);
return localfile.LastWriteTime;
}
catch (IOException ex)
{
Logger.Write("Failded to access to checkouted file: " + localfilepath, Severity.Error);
throw new FileNotFoundException(ex.Message, ex);
}
}
else
{
return Modified;
}
}
}
public string GetFilePath()
{
string localCopy = string.Empty;
try
{
if (CheckedOut)
{
localCopy = m_document.CheckoutPath;
}
else
{
localCopy = Path.GetTempFileName();
Logger.Write("Trying to get copy of a file: " + localCopy, Severity.Information);
m_document.GetCopy(localCopy, imGetCopyOptions.imNativeFormat);
Logger.Write("Succeded: " + localCopy, Severity.Information);
}
return localCopy;
}
catch (IOException ex)
{
Logger.Write("Failded to access to checkouted file: " + localCopy, Severity.Error);
throw new LocalFileNotFound();
}
catch (Exception ex)
{
Logger.Write("Error due getting of copy: " + localCopy, Severity.CriticalError);
throw;
}
}
public string DisplayName
{
get
{
return FileUtils.GetFileNameWithoutExtension(Name);
}
set
{
}
}
public IDMSFolder ParentFolder
{
get
{
if (m_Folder == null && ParentFolders.Count == 1)
{
return ParentFolders[0];
}
if (m_Folder != null)
{
return new ManFolder(m_Folder);
}
return null;
}
}
public string Modifier
{
get
{
if (m_document.Author != null)
{
return m_document.Author.Name;
}
return string.Empty;
}
}
public DateTime Modified
{
get
{
try
{
var dateedit = (DateTime)m_document.GetAttributeValueByID(imProfileAttributeID.imProfileEditDate);
var timeedit = (DateTime)m_document.GetAttributeValueByID(imProfileAttributeID.imProfileEditTime);
var Edit = new DateTime(dateedit.Year, dateedit.Month, dateedit.Day, timeedit.Hour, timeedit.Minute, timeedit.Second, timeedit.Millisecond);
return Edit;
}
catch (Exception ex)
{
return m_document.LatestVersion.AccessTime;
}
}
}
const int INFINITY_DAYS = 10000;
public void OnBeforeSending(OperationContext context)
{
if (!CheckedOut)
{
GetFileToCheckout();
if (context.CheckOutOption != Workshare.Integration.Enums.CheckOutOptions.CheckOut)
{
m_bNeedToCleanTemp = true;
m_sTempCheckOutPath = CheckoutPath;
}
}
else if (!CheckedOutToUser)
{
throw new FileCheckoutedToAnotherUser(Workshare.Integration.Operations.Send);
}
else
{
wasAlreadyCheckedOut = true;
}
}
public void OnAfterSending(OperationContext context)
{
if (context.CheckOutOption == Workshare.Integration.Enums.CheckOutOptions.DontCheckOut && !wasAlreadyCheckedOut)
{
DiscardCheckout();
}
RemoveTempFileIfNeed();
}
public void OnSendError(object args, Exception ex)
{
Logger.Write(ex, Severity.Error);
if (!wasAlreadyCheckedOut)
{
m_document.UnlockContent();
}
}
private void UpdateDocTypeIfNeed()
{
if (newdoctype!=null && newdoctype.Name != m_document.Type.Name)
{
Logger.Write("Update doc type", Severity.Information);
m_document.SetAttributeByID(imProfileAttributeID.imProfileType, newdoctype);
}
}
public void DiscardCheckout(bool deleteCheckOutFile = false)
{
if (!this.IsCheckedOutToOtherUser())
{
if (m_document.CheckedOut)
{
bool res;
if (m_document.LatestVersion.CheckedOut)
{
res = m_document.LatestVersion.UnlockContent();
if (!res)
{
throw new FileUnlockFailedException();
}
m_document.LatestVersion.Update();
}
m_document.Refresh();
if (m_document.CheckedOut)
{
res = m_document.UnlockContent();
if (!res)
{
throw new FileUnlockFailedException();
}
m_document.Update();
}
if (deleteCheckOutFile)
FileUtils.SafeDelete(m_document.CheckoutPath);
}
}
}
public List<IDMSFolder> ParentFolders
{
get
{
var res = new List<IDMSFolder>();
foreach (IManFolder folder in m_document.Folders)
{
res.Add(new ManFolder(folder));
}
return res;
}
}
private ManFile DisplayImportDialog(WorksharePlatform.FileDetails file)
{
var dlg = Application.Instance.Module.Resolve<ImportDialog>();
dlg.Initialize(null, file.FilePath, file.FriendlyName ?? file.Name, m_document.Database);
dlg.ShowDialog();
return dlg.AddedFile;
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, List<Activity> actnivities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool updateSyncInfoForVersion = true)
{
var a = true;
return AddFile(file, out a, actnivities,versionIdsToSkip, useProfileDialog, SyncVerOption, StartVersion,updateSyncInfoForVersion);
}
SyncInfoService GetService()
{
return Application.Instance.Module.Resolve<SyncInfoService>();
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, out bool added, List<Activity> actnivities,IEnumerable<string> versionIdsToSkip , bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool updateSyncInfoForVersions = true)
{
Logger.Write("Adding document to folder", Severity.Trace);
added = false;
try
{
//actnivities = PlatformService.GetFileActivitiesImanage(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.FolderId, file.Id);
if (SyncVerOption == ConflictVersionOptions.All)
{
file.DeleteFile();
file.FilePath = PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.Id, StartVersion);
}
else
{
var firstVersionToImport =
file.Versions.OrderBy(a => a.Version)
.LastOrDefault(a => !ManFile.ShouldSkip(versionIdsToSkip, a)) ?? file.CurrentVersion;
file.DeleteFile();
file.FilePath =
PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(),
file.Id, firstVersionToImport.Version);
}
ManFile createdFile;
if (useProfileDialog)
{
createdFile= DisplayImportDialog(file);
added = createdFile!=null;
if (createdFile != null && updateSyncInfoForVersions)
{
var version = SyncVerOption == ConflictVersionOptions.All ? file.Versions.FirstOrDefault(v => v.Version == StartVersion) : file.Versions.OrderBy(a => a.Version).LastOrDefault();
if (version != null)
{
GetService().UpdateSyncInfo(createdFile.GetVersions().First(),new ActionContext(Workshare.Integration.Processor.Services.ActionType.Import)
{
WsVersion = new WsVersion(version, null)
});
}
}
}
else
{
IManDocument doc = m_document.Database.CreateDocument();
IManUser oUser = m_document.Database.GetUser(m_document.Database.Session.UserID);
var oDocClasses = m_document.Database.SearchDocumentClasses("", imSearchAttributeType.imSearchBoth, true);
var oDocClass = (oDocClasses.Contains("DOC")) ? oDocClasses.ItemByName("DOC") : oDocClasses.ItemByIndex(0);
var doctype = doc.Database.GetDocumentTypeFromPath(file.FilePath);
if (doctype == null)
{
throw new FileNotCreatedException(string.Format(Properties.Resources.STR_CANNOT_ADD_NEW_FILE_WRONG_TYPE_ERROR_F, file.FriendlyName));
}
doc.SetAttributeByID(imProfileAttributeID.imProfileType, doctype);
doc.SetAttributeByID(imProfileAttributeID.imProfileClass, oDocClass);
doc.SetAttributeByID(imProfileAttributeID.imProfileAuthor, oUser);
doc.SetAttributeByID(imProfileAttributeID.imProfileDescription, FileUtils.GetFileNameWithoutExtension(file.FriendlyName));
doc.SetAttributeByID(imProfileAttributeID.imProfileOperator, oUser);
Logger.Write("Trying to upload a file into DMS: " + file.FilePath, Severity.Information);
var Results = doc.CheckInWithResults(file.FilePath, imCheckinDisposition.imCheckinNewDocument, imCheckinOptions.imDontKeepCheckedOut);
if (!Results.Succeeded)
{
var resErr = new StringBuilder();
resErr.AppendFormat("ErrorCode : {0}. ErrorMessage : {1}", Results.ErrorCode, Results.ErrorMessage);
foreach (IManProfileError manProfileError in Results.ErrorList)
{
resErr.Append(Environment.NewLine);
resErr.AppendFormat("AttribID : {0}. Description : {1}", manProfileError.AttribID, manProfileError.Description);
}
Logger.Write("CheckInWithResults return error: " + resErr.ToString(), Severity.Information);
throw new FileNotCreatedException(Results.ErrorMessage);
}
Logger.Write("Trying to upload a file into DMS: Succeded " + file.FilePath, Severity.Information);
added = true;
createdFile = new ManFile(doc, m_Folder);
}
if (createdFile != null)
{
if (SyncVerOption == ConflictVersionOptions.All)
{
if (!createdFile.CheckedOut)
{
createdFile.GetFileToCheckout();
}
/// TODO: REMOVE THAT IN FUTURE
if (StartVersion == 2)
{
createdFile.AddHistories(actnivities.Where(x => x.CurrentVersion == 1).ToList());
}
createdFile.AddHistories(actnivities.Where(x => x.CurrentVersion == StartVersion).ToList());
foreach (var ver in file.Versions.Where(x => x.Version > StartVersion && !ShouldSkip(versionIdsToSkip, x)).OrderBy(x => x.Version).ToArray())
{
file.FilePath = PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.Id, ver.Version);
var version = createdFile.AddVersion(file.FilePath, file, actnivities.Where(x => x.CurrentVersion == ver.Version).ToList());
if (updateSyncInfoForVersions)
{
GetService().UpdateSyncInfo(version, new ActionContext(Workshare.Integration.Processor.Services.ActionType.Import)
{
WsVersion = new WsVersion(ver, null)
});
}
}
}
else if (SyncVerOption == ConflictVersionOptions.Latest)
{
var versionsIdsToAdd =
file.Versions.Where(x => x.Version >= StartVersion).Select(x => x.Version).ToArray();
createdFile.AddHistories(actnivities.Where(act=>versionsIdsToAdd.Contains(act.CurrentVersion)).ToList());
}
}
return createdFile;
}
catch (FileNotCreatedException)
{
throw;
}
catch (Exception ex)
{
throw new FileNotCreatedException(ex.Message, ex);
}
}
static public bool ShouldSkip(IEnumerable<string> list,FileVersionDetails verDetails )
{
if (list == null) return false;
return list.Any(a => string.Equals(a, verDetails.Id.ToString()));
}
private imHistEvent GetHistEventByOperation(Workshare.Integration.Operations operation)
{
switch (operation)
{
case Workshare.Integration.Operations.Send:
return imHistEvent.imHistoryExport;
case Workshare.Integration.Operations.Sync:
return imHistEvent.imHistoryEchoSync;
case Workshare.Integration.Operations.Deleted:
return imHistEvent.imHistoryDelete;
default:
throw new CannotAddHistoryException("Operation is not correct");
}
}
public void AddHistory(string eventName, string eventComment, Workshare.Integration.Operations operation)
{
if(this.IsCheckedOutToOtherUser())
{
throw new FileCheckoutedToAnotherUser(operation, GetCheckoutUser());
}
m_document.HistoryList.Add(GetHistEventByOperation(operation), 0, 0, RES.STR_WORKSHARE_TITLE, eventComment, Environment.MachineName, eventName, string.Empty);
var res = m_document.UpdateWithResults();
if (!res.Succeeded)
{
throw new CannotAddHistoryException(res.ErrorList.ItemByIndex(0).Description);
}
}
public void AddHistories(List<Activity> activities)
{
try
{
if (activities == null)
throw new CannotAddHistoryException("activity list is empty");
var last = activities.LastOrDefault(p => p.Noun == "Comment");
foreach (var activity in activities)
{
if (activity.Noun == "Comment" && activity != last)
continue;
var comment = activity.GetComment();
//if (activity.Noun != "Member" && activity.CurrentVersion != version)
//continue;
if (!string.IsNullOrEmpty(comment))
m_document.HistoryList.Add(
activity.HistoryModify ? imHistEvent.imHistoryModify : imHistEvent.imHistoryView,
0, 0, RES.STR_WORKSHARE_TITLE, comment, activity.FolderName, string.Empty, string.Empty);
}
var res = m_document.UpdateWithResults();
if (!res.Succeeded)
{
throw new CannotAddHistoryException(res.ErrorList.ItemByIndex(0).Description);
}
}
catch (COMException ex)
{
if (ex.ErrorCode ==-2147221199) //Document in use
{
throw new FileCheckoutedToAnotherUser(GetCheckoutUser());
}
throw;
}
}
public void GetFileToCheckout()
{
string filename = Path.GetTempFileName();
m_document.CheckOut(filename, imCheckOutOptions.imReplaceExistingFile, DateTime.Now.AddDays(INFINITY_DAYS), string.Empty);
var fi = new FileInfo(filename);
// somehow that's date set incorect by imanage, but we need that to further use
fi.LastWriteTime = Modified;
fi.LastAccessTime = Modified;
}
public override string ToString()
{
return string.Format("File with ID={0}, Name={1}", ID, DisplayName);
}
public IDMSFolder RootFolder()
{
return new ManFolder(m_document.Database.Root);
}
public IEnumerable<IDmsVersion> GetVersions()
{
var res = new List<IDmsVersion>();
m_document.Refresh();
var latest=m_document.LatestVersion;
var versions = latest.Versions;
versions.Refresh();
foreach (IManDocument doc in versions)
{
res.Add(new ManVersion(new ManFile(doc,m_Folder)));
}
return res;
}
public int DMSId
{
get
{
return m_document.Number;
}
}
internal string GetCheckoutUser()
{
var user = m_document.InUseBy;
if (user != null)
{
return user.Name;
}
return string.Empty;
}
internal string GetCurrentCheckedInFile()
{
var localCopy = Path.GetTempFileName();
m_document.GetCopy(localCopy, imGetCopyOptions.imNativeFormat);
return localCopy;
}
public bool IsFileLocked()
{
if (CheckedOut)
{
return FileUtils.IsFileLocked(CheckoutPath);
}
return false;
}
internal void Refresh()
{
m_document.Refresh();
}
public string DMSItemKey
{
get { return (m_document == null) ? "" : Application.iManInstance.ServerKey + ":!DatabaseName:" + m_document.Database.Name + ":!Document:" + this.DMSId; }
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Interfaces/Logger/Logger.cs
using System;
using System.Diagnostics;
using Microsoft.Win32;
namespace Workshare.Components.WSLogger
{
//TODO better place for logger
public enum Severity
{
Trace,
Warning,
Information,
Error,
CriticalError,
Verbose
}
public static class Logger
{
static SourceLevels GetLogSeverity()
{
try
{
using (var rekKey = Registry.CurrentUser.OpenSubKey(@"Software\Workshare"))
{
if (rekKey != null)
{
var value = Convert.ToInt32(rekKey.GetValue("logging"));
if (Enum.IsDefined(typeof (SourceLevels), value))
{
var level = (SourceLevels) value;
return level;
}
}
}
return SourceLevels.Error;
}
catch (Exception)
{
return SourceLevels.Error;
}
}
static Listener _listener;
static Logger()
{
WorkShareTrace = new TraceSource(TRACESOURCENAME) {Switch = {Level = GetLogSeverity()}};
_listener = new Listener(WorkShareTrace);
}
public static readonly string LOGFILENAME = "WorkshareIntegration.log";
static readonly string TRACESOURCENAME = "WS";
static private readonly TraceSource WorkShareTrace;
static TraceEventType SeverityRemap(Severity severity)
{
switch (severity)
{
case Severity.CriticalError: return TraceEventType.Critical;
case Severity.Error: return TraceEventType.Error;
case Severity.Information: return TraceEventType.Information;
case Severity.Trace: return TraceEventType.Resume;
case Severity.Verbose: return TraceEventType.Verbose;
case Severity.Warning: return TraceEventType.Warning;
default: return TraceEventType.Verbose;
}
}
static string MessageRemap(string message)
{
return DateTime.Now + " " + message;
}
static string MessageRemap(Exception message)
{
return DateTime.Now + " " + message.ToString();
}
static string MessageRemap(string message, Exception ex)
{
return DateTime.Now + " " + message + " " + ex.ToString();
}
#region WriteMethods
public static void Write(string message, Severity severity)
{
WorkShareTrace.TraceEvent(SeverityRemap(severity), (int)severity, MessageRemap(message));
WorkShareTrace.Flush();
}
public static void Write(Exception exception, Severity severity)
{
WorkShareTrace.TraceEvent(SeverityRemap(severity), (int)severity, MessageRemap(exception));
WorkShareTrace.Flush();
}
public static void Write(string message, Exception exception, Severity severity)
{
WorkShareTrace.TraceEvent(SeverityRemap(severity), (int)severity, MessageRemap(message, exception));
WorkShareTrace.Flush();
}
public static void WriteError(string message, Exception exception)
{
WorkShareTrace.TraceEvent(SeverityRemap(Severity.Error), (int)Severity.Error, MessageRemap(message, exception));
WorkShareTrace.Flush();
}
public static void WriteTrace(string message)
{
WorkShareTrace.TraceEvent(SeverityRemap(Severity.Trace), (int)Severity.Trace, message);
WorkShareTrace.Flush();
}
public static void WriteError(Exception exception)
{
try
{
WorkShareTrace.TraceEvent(SeverityRemap(Severity.Error), (int)Severity.Error, MessageRemap("", exception));
WorkShareTrace.Flush();
}
catch (Exception )
{
}
}
#endregion
public static void WriteWarning(string p)
{
try
{
WorkShareTrace.TraceEvent(SeverityRemap(Severity.Warning), (int)Severity.Warning, p);
WorkShareTrace.Flush();
}
catch (Exception)
{
}
}
}
public class Listener
{
public Listener(TraceSource listenSource)
{
if (instance == null)
{
instance = new TextWriterTraceListener(Path);
instance.Name = "Workshare Logging Listener";
}
if (!listenSource.Listeners.Contains(instance))
{
listenSource.Listeners.Add(instance);
}
}
readonly TextWriterTraceListener instance = null;
static string _Path;
public static string Path
{
get
{
if (_Path == null)
{
_Path = System.IO.Path.GetTempPath() + "\\" + Logger.LOGFILENAME;
}
return _Path;
}
}
}
}
<file_sep>/WSComponents/src/WSCloudService/FolderDetails.cs
using System;
using System.Collections.Generic;
namespace WorksharePlatform
{
public class FolderDetails
{
private int _id;
public FolderDetails()
{
Permissions = new Permissions();
}
public int Id
{
get { return _id; }
set
{
_id = value;
Permissions.FolderId = _id;
}
}
public int Ancestry { get; set; }
public string Name { get; set; }
public string Description { get; set; }
public int ParentId { get; set; }
public bool NotifyOwner { get; set; }
public DateTime? ExpiresAt { get; set; }
public int ChildFoldersCounter { get; set; }
public int FileCounter { get; set; }
public Permissions Permissions { get; set; }
public UserDetails Creator { get; set; }
public UserDetails Owner { get; set; }
public bool IsDeleted { get; set; }
public string Url { get; set; }
public override string ToString()
{
return string.Format("Folder ID={0}, Name={1}", Id, Name);
}
public int MemberCount { get; set; }
}
public class FolderMembers
{
public List<UserDetails> Members { get; set; }
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Strategies/ScanStrategy.cs
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
namespace Workshare.IManage.Strategies
{
class ScanStrategy : Workshare.Integration.Processor.Strategies.ScanStrategy
{
public ScanStrategy(WsProxy wsProxy, IAuthProvider auth, DmsWorkerBase dmsWorker,SyncInfoService syncInfoService):base(wsProxy,auth,syncInfoService)
{
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CheckOutDialog/ContentConverter.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Data;
namespace Workshare.Components.Views.CheckOutDialog
{
public class ContentConverter:IValueConverter
{
public object Convert(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
var filename = value as string;
if (string.IsNullOrEmpty(filename))
{
return Properties.Resources.CHECKOUT_TEXT_IF_DOCUMENTS;
}
else
{
return string.Format(Properties.Resources.CHECKOUT_TEXT_IF_DOCUMENT, filename);
}
}
public object ConvertBack(object value, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/DMSAuthProvider.cs
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Components.Common
{
public class DMSAuthProvider:IAuthProvider
{
public const string DeviceCredential = CookieAwareClient.device_credential;
public UserDetails CheckIfAuth()
{
var user = GetCurrentWSUser();
if (user == null)
throw new CloudUnAuthorized();
return user;
}
private UserDetails _currentUser;
public UserDetails GetCurrentWSUser()
{
return _currentUser ?? (_currentUser = new UserDetails());
}
public void SetCurrentUser(UserDetails user)
{
_currentUser = user;
}
public WsUser GetCurrentWSUser2()
{
var u = GetCurrentWSUser();
if (u != null)
{
PlatformService.RefreshUserIfNeeded(u);
}
return u == null ? null : new WsUser(u);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/UserControls/WsSplitButton.xaml.cs
using System;
using System.ComponentModel;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using Workshare.Components.Common;
using Workshare.Integration;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Services;
using WorksharePlatform;
namespace Workshare.Components.Views.TrueSyncDialog.UserControls
{
/// <summary>
/// Interaction logic for WsSplitButton.xaml
/// </summary>
public partial class WsSplitButton : UserControl
{
private BackgroundWorker breakLinkWorker = new BackgroundWorker();
public WsSplitButton()
{
InitializeComponent();
breakLinkWorker.DoWork += breakLinkWorker_DoWork;
}
private void BtnImport_OnClick(object sender, RoutedEventArgs e)
{
var parent = ((Panel)(((Button)sender).Parent)).FindName("MenuImport");
if (parent != null && ((Menu)parent).Items.Count > 0)
{
var item = (MenuItem)((Menu)parent).ItemContainerGenerator.ContainerFromItem(((Menu)parent).Items[0]);
if (item == null) return;
if (item.Role == MenuItemRole.TopLevelHeader)
{
item.IsSubmenuOpen = true;
}
else if (item.Role == MenuItemRole.TopLevelItem)
{
if (item.Command != null)
{
item.Command.Execute(item.CommandParameter);
}
}
}
else if(parent!=null)
{
ImportFileActivityVm importFileActivityVm = ((Menu)parent).DataContext as ImportFileActivityVm;
if (importFileActivityVm != null && importFileActivityVm.State == ImportFileActivityState.Scanned && importFileActivityVm.Changes.Any(c => c.Type == ChangeType.RemoteDeleted))
{
ImportFileActivity importFileActivity = importFileActivityVm.data as ImportFileActivity;
if (importFileActivity != null)
{
importFileActivityVm.State = ImportFileActivityState.DeletingLink;
while (!breakLinkWorker.IsBusy)
{
breakLinkWorker.RunWorkerAsync(importFileActivityVm);
break;
}
}
}
}
}
private void breakLinkWorker_DoWork(object sender, DoWorkEventArgs e)
{
ImportFileActivityVm importFileActivityVm = e.Argument as ImportFileActivityVm;
if (importFileActivityVm != null)
{
ImportFileActivity importFileActivity = importFileActivityVm.data as ImportFileActivity;
if (importFileActivity != null)
{
try
{
var syncInfo = WSApplication.Instance.Module.Resolve<SyncInfoService>();
importFileActivity.DmsFile.AddHistory("FileDeletedOrMoved", "Synced from Workshare on " + DateTime.Now.ToString(Activity.TimeFormat) + ". File has been deleted or moved", Operations.Sync);
syncInfo.BreakLink(importFileActivity.DmsFile);
importFileActivityVm.State = ImportFileActivityState.LinkIsDeleted;
}
catch (Exception ex)
{
if (ex is FileCheckoutedToAnotherUser)
{
importFileActivityVm.State = ImportFileActivityState.CheckedOutToAnother;
importFileActivity.CheckedOutTo = ((FileCheckoutedToAnotherUser)ex).UserName;
}
else
{
importFileActivityVm.State = ImportFileActivityState.Error;
importFileActivity.Error = ex;
}
}
}
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/Ext.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.Remoting.Messaging;
using System.Text;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor.Maps
{
public static class Ext
{
public static ISyncInfo GetSyncInfo(this SyncInfoService dmsWorker, FileMap map, bool ForceRequest = true)
{
return (map == null || map.LocalFile == null) ? null : dmsWorker.GetSyncInfo(map.LocalFile, ForceRequest);
}
public static ISyncInfo GetSyncInfo(this SyncInfoService dmsWorker, FolderMap map, bool ForceRequest = true)
{
return (map == null || map.LocalFolder == null) ? null : dmsWorker.GetSyncInfo(map.LocalFolder, ForceRequest);
}
public static IEnumerable<int> GetSendData(this FolderMap map, SyncInfoService syncService, bool ForceRequest = true)
{
return (map.LocalFolder == null) ? new int[0] : syncService.GetSendData(map.LocalFolder, ForceRequest) ?? new int[0];
}
public static IEnumerable<int> GetSendData(this SyncInfoService syncService, FolderMap map, bool ForceRequest = true)
{
return (map.LocalFolder == null) ? new int[0] : syncService.GetSendData(map.LocalFolder, ForceRequest) ?? new int[0];
}
public static int? GetWsId(this FileMap map)
{
return map == null || map.WsFile == null ? (int?)null : map.WsFile.Id;
}
public static string GetLocalId(this FileMap map)
{
return map == null || map.LocalFile == null ? null : map.LocalFile.DMSId.ToString();
}
public static string GetLocalId(this FolderMap map)
{
return map == null || map.LocalFolder == null ? null : map.LocalFolder.DMSId.ToString();
}
public static int? GetWsId(this FolderMap map)
{
return map == null || map.WsFolder == null ? (int?)null : map.WsFolder.Id;
}
public static VersionMap GetLatestLocalVersionNotUploadedToWs(this FileMap fileMap)
{
return fileMap.Maps.OfType<VersionMap>()
.Where(a => a.LocalVersion!=null && a.WsVersion==null)
.OrderByDescending(a => a.LocalVersion.Number)
.FirstOrDefault();
}
public static bool ContainsChildWsVersion(this FileMap map, int wsId)
{
return map.Maps.OfType<VersionMap>()
.Any(p => p.WsVersion != null && p.WsVersion.Id == wsId);
}
public static List<ItemException> GetItemErrorsList(List<ItemMap> errors)
{
var errorsList = new List<ItemException>();
foreach (var ex in errors)
{
var folderMap = ex as FolderMap;
var fileMap = ex as FileMap;
if (folderMap != null)
{
errorsList.Add(new ItemException
{
Error = folderMap.LocalFolder == null
? ((ex.Error is BaseException)
? ex.Error
: new BaseException(string.Empty, ex.Error.Message, ex.Error))
: ((ex.Error is BaseException)
? ex.Error
: new BaseException(folderMap.LocalFolder.Name, ex.Error.Message, ex.Error)),
Item = folderMap.LocalFolder
});
}
else if (fileMap != null)
{
errorsList.Add(new ItemException
{
Error = fileMap.LocalFile == null
? ((ex.Error is BaseException)
? ex.Error
: new BaseException(string.Empty, ex.Error.Message, ex.Error))
: ((ex.Error is BaseException)
? ex.Error
: new BaseException(fileMap.LocalFile.Name, ex.Error.Message, ex.Error)),
Item = fileMap.LocalFile
});
}
else
{
errorsList.Add(new ItemException
{
Error = ex.Error
});
}
}
return errorsList;
}
public static bool HasLocal(this FolderMap map)
{
if (map == null) return false;
return map.LocalFolder != null;
}
public static bool HasRemote(this FolderMap map)
{
if (map == null) return false;
return map.WsFolder != null && !map.WsFolder.IsDeleted;
}
public static bool HasLocal(this FileMap map)
{
if (map == null) return false;
return map.LocalFile != null;
}
public static bool HasRemote(this FileMap map)
{
if (map == null) return false;
return map.WsFile != null && !map.WsFile.IsDeleted;
}
public static bool HasRemote(this VersionMap map)
{
if (map == null) return false;
return map.WsVersion != null;
}
public static bool HasLocal(this VersionMap map)
{
if (map == null) return false;
return map.LocalVersion != null;
}
public static FolderMap GetActualParent(this FileMap map)
{
if (map == null) return null;
return map.IndirectParent ?? map.Parent as FolderMap;
}
public static FolderMap GetLocalParent(this FileMap map)
{
var res = map.Parent as FolderMap;
if (res == null && map.HasLocal())
{
res = new FolderMap
{
LocalFolder = map.LocalFile.ParentFolder,
ProcessState = ProcessState.Scanned
};
}
return res;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/SelectFolder/SelectFolderForm.cs
using System;
using System.Diagnostics;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Timers;
using System.Windows.Forms;
using System.Windows.Threading;
using mshtml;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using RES = Workshare.Components.Properties.Resources;
using Workshare.Components.WSLogger;
using Microsoft.Win32;
namespace Workshare.Components.Views.SelectFolder
{
public partial class SelectFolderForm : Form
{
[DllImport("urlmon.dll", CharSet = CharSet.Ansi)]
private static extern int UrlMkSetSessionOption(int dwOption, string pBuffer, int dwBufferLength, int dwReserved);
const int URLMON_OPTION_USERAGENT = 0x10000001;
JSCallback callbacks = new JSCallback();
readonly string DialogRelUri = "home/dialogs.sdif?caller=imanage&extcall=hello";
public int SelectedFolderId { private set; get; }
Uri DialogUri
{
get
{
return new Uri(WSApplication.Instance.Server.ToString() + DialogRelUri);
}
}
public void HideScriptErrors(WebBrowser wb, bool hide)
{
try
{
var fiComWebBrowser = typeof(WebBrowser).GetField("webBrowser1", BindingFlags.Instance | BindingFlags.NonPublic);
if (fiComWebBrowser == null) return;
var objComWebBrowser = fiComWebBrowser.GetValue(wb);
if (objComWebBrowser == null)
{
wb.Navigated += (o, s) => HideScriptErrors(wb, hide); //In case we are to early
return;
}
objComWebBrowser.GetType().InvokeMember("Silent", BindingFlags.SetProperty, null, objComWebBrowser, new object[] { hide });
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Error);
}
}
public SelectFolderForm()
{
SelectedFolderId = -1;
InitializeComponent();
this.Width = 594;
this.Height = 510;
this.StartPosition = FormStartPosition.CenterParent;
this.Load += SelectFolderView_Loaded;
this.AutoScroll = false;
this.Disposed += SelectFolderForm_Disposed;
webBrowser1.ScrollBarsEnabled = false;
webBrowser1.Navigated += webBrowser_Navigated;
callbacks.OnCancel += callbacks_OnCancel;
callbacks.OnOk += callbacks_OnOk;
webBrowser1.ObjectForScripting = callbacks;
webBrowser1.ScriptErrorsSuppressed = true;
//webBrowser1.NavigateError += webBrowser1_NavigateError;
webBrowser1.Navigated += new WebBrowserNavigatedEventHandler(
(object sender, WebBrowserNavigatedEventArgs args) =>
{
Action<HtmlDocument> blockAlerts = (HtmlDocument d) =>
{
HtmlElement h = d.GetElementsByTagName("head")[0];
HtmlElement s = d.CreateElement("script");
IHTMLScriptElement e = (IHTMLScriptElement)s.DomElement;
e.text = "window.alert=function(){};";
h.AppendChild(s);
};
WebBrowser b = sender as WebBrowser;
blockAlerts(b.Document);
for (int i = 0; i < b.Document.Window.Frames.Count; i++)
try { blockAlerts(b.Document.Window.Frames[i].Document); }
catch (Exception) { };
});
ChangeUserAgent();
}
void webBrowser1_NavigateError(object sender, NewWebBrowser.WebBrowserNavigateErrorEventArgs e)
{
MessageBox.Show(e.StatusCode.ToString());
}
public void ChangeUserAgent()
{
string ua = @"User-Agent:Mozilla/4.0 (compatible; MSIE 8.0)";
UrlMkSetSessionOption(URLMON_OPTION_USERAGENT, ua, ua.Length, 0);
}
void SelectFolderForm_Disposed(object sender, EventArgs e)
{
webBrowser1.Dispose();
}
void m_closeTimer_Elapsed(object sender, ElapsedEventArgs e)
{
Logger.Write("Inside m_closeTimer_Elapsed", Severity.Information);
Dispatcher.CurrentDispatcher.BeginInvoke(new Action(() =>
{
Logger.Write("Inside Dispatcher", Severity.Information);
this.DialogResult = System.Windows.Forms.DialogResult.OK;
this.Close();
Logger.Write("ThisClose Executed", Severity.Information);
}));
}
void callbacks_OnOk(int obj)
{
try
{
Logger.Write("Inside Callback_OnOk", Severity.Verbose);
this.SelectedFolderId = obj;
this.Close();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
bool? Res;
void callbacks_OnCancel()
{
try
{
this.Close();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
void webBrowser_Navigated(object sender, WebBrowserNavigatedEventArgs e)
{
var doc = webBrowser1.Document;
var str =Authentication.CookieReader.Reader.GetCookie(DialogUri.ToString());
if (!str.Contains(DMSAuthProvider.DeviceCredential))
{
this.SelectedFolderId = -10;
this.Close();
}
if (doc != null)
{
var head = doc.GetElementsByTagName("head")[0]; // Head is one and only element as a rule, so no problems here...
if (head == null)
{
Logger.Write("Html Document Head is null in SelectFolderForm", Severity.CriticalError);
return;
}
var script = doc.CreateElement("script");
var scrElement = (IHTMLScriptElement)script.DomElement;
scrElement.text = RES.injectedJS.Replace("\r", "").Replace("\n", "").Replace("\t", "");
head.AppendChild(script);
doc.InvokeScript("init");
}
pbLoader.Visible = false;
}
void SelectFolderView_Loaded(object sender, EventArgs e)
{
Logger.WriteTrace(string.Format("Web browser control version: {0}", webBrowser1.Version));
if (webBrowser1.Version.Major <= ModuleViewBase.IE8MAJORVERSION )
{
webBrowser1.Navigate(DialogUri, null, null, @"User-Agent:Mozilla/4.0 (compatible; MSIE 8.0)");
}
else
{
webBrowser1.Navigate(DialogUri);
}
}
private void btn_Refresh_Click(object sender, System.Windows.RoutedEventArgs e)
{
webBrowser1.Refresh();
}
private void btn_Dlg_Click(object sender, System.Windows.RoutedEventArgs e)
{
webBrowser1.Navigate(DialogUri);
}
private void btn_Home_Click(object sender, System.Windows.RoutedEventArgs e)
{
webBrowser1.Navigate(WSApplication.Instance.Server);
}
private void btn_Url_Click(object sender, System.Windows.RoutedEventArgs e)
{
}
private void btn_Test_Click(object sender, System.Windows.RoutedEventArgs e)
{
}
}
[PermissionSet(SecurityAction.Demand, Name = "FullTrust")]
[System.Runtime.InteropServices.ComVisibleAttribute(true)]
public class JSCallback
{
public event Action OnCancel;
public event Action<int> OnOk;
public void onCancel()
{
if (OnCancel != null)
{
OnCancel();
}
}
public void onOk(int folder_id)
{
if (OnOk != null)
{
OnOk(folder_id);
}
}
public void jsLogger(string msg)
{
var regKey = Registry.CurrentUser.OpenSubKey(@"Software\Workshare");
if (regKey != null)
{
string logFromWeb = regKey.GetValue("UseWebLog") as string;
if (!string.IsNullOrEmpty(logFromWeb) && logFromWeb == "1")
{
Logger.Write("WEB LOG: " + msg, Severity.Information);
}
}
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Contrete/ManVersion.cs
using System;
using Workshare.Integration;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.Services;
namespace Workshare.IManage.Contrete
{
internal class ManVersion : BaseDmsVersion, IDmsVersion
{
public string Id { set; get; }
public int Number
{
get
{
return doc.m_document.Version;
}
}
public object ID {
get
{
return doc.ID;
}
set
{
}
}
private ManFile doc;
public ManVersion(ManFile doc)
{
this.doc = doc;
Id = doc.m_document.ObjectID;
}
public IDMSFile GetLatestFile()
{
return doc.GetLatest();
}
public string Name
{
get
{
return doc.Name;
}
}
public bool WasChangedAfterSend2(bool useForcerequest = true)
{
var syncInfo = Application.iManInstance.Module.Resolve<SyncInfoService>().GetSyncInfo(this, useForcerequest);
if (syncInfo == null) return false;
return doc.EditDate.Ticks.ToString() != syncInfo.DmsEditTime;
}
public string DisplayName
{
get { return doc.DisplayName; }
}
internal string GetCurrentCheckedInFile()
{
return doc.GetCurrentCheckedInFile();
}
public DateTime EditTime
{
get
{
return doc.Modified;
}
}
public void AddHistory(string eventName, string coment, Operations operation)
{
doc.AddHistory(eventName, coment, operation);
}
public IDMSFile AsFile()
{
return doc;
}
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/Layouts/WorkshareCloud/WorkshareCloudService.aspx.cs
using System;
using Microsoft.SharePoint;
using Microsoft.SharePoint.WebControls;
using System.Web;
using System.Threading;
using System.IO;
using System.Web.Services;
using System.Web.Script.Serialization;
using System.Web.Script.Services;
using WorksharePlatform;
using WorkshareCloud.Common;
using System.Net;
using System.Collections.Generic;
namespace WorkshareCloud.Layouts.WorkshareCloud
{
public partial class WorkshareCloudService : LayoutsPageBase
{
protected void Page_Load(object sender, EventArgs e)
{
}
[WebMethod]
[ScriptMethod(ResponseFormat = ResponseFormat.Json)]
public static object Send()
{
return Execute(() =>
{
var user = CloudAuthenication.GetCurrentUser();
try
{
string data = string.Empty;
var sendInfos = Deserialize<SendItemInformations>(HttpContext.Current, out data);
if (sendInfos != null && sendInfos.infos != null)
{
WorkshareIntegration.Instance.SendItems(sendInfos.infos, user);
}
else
{
throw new BadRequest(data);
}
return string.Empty;
}
finally
{
CloudAuthenication.Update(user);
}
});
}
static object Execute(Func<object> action)
{
try
{
return action();
}
catch (ListProcessException ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.InternalServerError;
return new ErrorInfo(ErrorCode.ListItemSyncError, ex.Message, ex.ToJSON());
}
catch (BadRequest ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.BadRequest;
return new ErrorInfo(ex);
}
catch (ItemNotFound ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.NotFound;
return new ErrorInfo(ex);
}
catch (CloudUnAuthorized)
{
CloudAuthenication.Logout();
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.ServiceUnavailable;
return new ErrorInfo(ErrorCode.CloudUnAuth);
}
catch (OfflineException ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.ServiceUnavailable;
return new ErrorInfo(ex);
}
catch (FileCheckoutedToAnotherUser)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.MethodNotAllowed;
return new ErrorInfo(ErrorCode.FileCheckedOut);
}
catch (UnauthorizedAccessException ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.Forbidden;
return new ErrorInfo(ex);
}
catch (CloudFolderAccessDenied)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.Forbidden;
return new ErrorInfo(ErrorCode.CloudItemIsForbidden);
}
catch (AlreadySentException ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.MethodNotAllowed;
return new ErrorInfo(ex);
}
catch (CloudFolderNotFound)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.NotFound;
return new ErrorInfo(ErrorCode.CloudFolderNotFound);
}
catch (CloudFileNotFound)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.NotFound;
return new ErrorInfo(ErrorCode.CloudFileNotFound);
}
catch (CloudFieldNotFound)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.NotFound;
return new ErrorInfo(ErrorCode.CloudFieldNotFound);
}
catch (Exception ex)
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.InternalServerError;
return new ErrorInfo(ex);
}
}
[WebMethod]
[ScriptMethod(ResponseFormat = ResponseFormat.Json)]
public static object Sync()
{
return Execute(() =>
{
var user = CloudAuthenication.GetCurrentUser();
try
{
var data=string.Empty;
var syncInfos = Deserialize<SyncItemInformations>(HttpContext.Current, out data);
if (syncInfos.infos != null)
{
foreach (var syncInfo in syncInfos.infos)
{
Guid ListGuid;
if (syncInfo != null && Extensions.GuidTryParse(syncInfo.listGuid, out ListGuid))
{
WorkshareIntegration.Instance.SyncItems(syncInfos.infos, user);
}
else
{
throw new BadRequest(data);
}
}
}
return string.Empty;
}
finally
{
CloudAuthenication.Update(user);
}
});
}
[WebMethod]
[ScriptMethod(ResponseFormat=ResponseFormat.Json)]
public static object Update()
{
return Execute(() =>
{
string data = string.Empty;
var sendInfo = Deserialize<SendItemInformation>(HttpContext.Current, out data);
Guid ListGuid;
if (Extensions.GuidTryParse(sendInfo.listGuid, out ListGuid))
{
WorkshareIntegration.Instance.EnsureListInitialized(SPContext.Current.Web.Lists[ListGuid]);
return "";
}
else
{
throw new BadRequest(string.Format("ListGuid={0}",sendInfo.listGuid));
}
});
}
[WebMethod]
[ScriptMethod(ResponseFormat = ResponseFormat.Json)]
public static object DialogSettings()
{
return Execute(() =>
{
try
{
var res = PlatformService.GetDialogSettings(new UserDetails());
return new
{
dialog_create_folder_height = res.dialog_create_folder_height,
dialog_create_folder_width = res.dialog_create_folder_width,
dialog_login_height = res.dialog_login_height,
dialog_login_width = res.dialog_login_width,
dialog_select_folder_height = res.dialog_select_folder_height,
dialog_select_folder_width = res.dialog_select_folder_width
};
}
finally
{
PlatformService._Host = null;
}
});
}
[WebMethod]
public static object Login(string email, string password)
{
return Execute(() =>
{
if (string.IsNullOrEmpty(email) || string.IsNullOrEmpty(password))
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.ServiceUnavailable;
return new ErrorInfo(ErrorCode.WrongCredentials);
}
try
{
CloudAuthenication.Login(new UserDetails()
{
Email = email,
Password = <PASSWORD>
});
return string.Empty;
}
catch (WebException ex)
{
HttpWebResponse res = (HttpWebResponse)ex.Response;
if (res.StatusCode == HttpStatusCode.NotFound || (int)res.StatusCode == 422)//422 - entity is unprocessible
{
HttpContext.Current.Response.StatusCode = (int)HttpStatusCode.ServiceUnavailable;
return new ErrorInfo(ErrorCode.WrongCredentials);
}
else throw;
}
});
}
[WebMethod]
public static object Logout()
{
return Execute(() =>
{
CloudAuthenication.Logout();
return string.Empty;
});
}
static T Deserialize<T>(HttpContext context, out string data)
{
data = string.Empty;
try
{
context.Request.InputStream.Position = 0;
using (var reader = new StreamReader(context.Request.InputStream, context.Request.ContentEncoding))
{
data = reader.ReadToEnd();
return new JavaScriptSerializer().Deserialize<T>(data);
}
}
catch
{
return default(T);
}
}
}
public class ErrorInfo
{
public ErrorInfo(ErrorCode error)
{
code = (int)error;
switch (error)
{
case ErrorCode.General: description = Common.Properties.Resources.STR_ERROR_HAPPEN; break;
case ErrorCode.WrongCredentials: description = Common.Properties.Resources.STR_WRONGAUTH_CAPTION; break;
case ErrorCode.CloudUnAuth: description = Common.Properties.Resources.STR_WRONGAUTH_CAPTION; break;
case ErrorCode.CloudFileNotFound: description = Common.Properties.Resources.STR_CLOUDFILENOTFOUND; break;
case ErrorCode.CloudFolderNotFound: description = Common.Properties.Resources.STR_CLOUDFOLDERNOTFOUND; break;
case ErrorCode.CloudItemIsForbidden: description = Common.Properties.Resources.STR_UNABLESENDITEM_TEXT; break;
case ErrorCode.FileCheckedOut: description = Common.Properties.Resources.STR_FILEISCHECKEDOUTBYANOTHERUSER_TEXT; break;
case ErrorCode.CloudFieldNotFound: description = Common.Properties.Resources.STR_VIEWNOTINITIALIZED_TEXT; break;
case ErrorCode.ItemNotFound: description = Common.Properties.Resources.STR_LOCALFILENOTFOUND_TEXT; break;
default: description = Common.Properties.Resources.STR_ERROR; break;
}
}
public ErrorInfo(Exception ex):this(ErrorCode.General,string.IsNullOrEmpty(ex.Message)?ex.GetType().Name:ex.Message)
{
}
public ErrorInfo(ErrorCode error, string text)
{
code = (int)error;
description = text;
}
public ErrorInfo(ErrorCode error, string text, object data)
: this(error, text)
{
this.data = data;
}
public int code { get; private set; }
public string description { get; private set; }
public object data { get; private set; }
}
public static class HttpExt
{
public static void WriteObj(this HttpResponse response, object obj)
{
using (var writer = new StreamWriter(response.OutputStream))
{
writer.Write(new JavaScriptSerializer().Serialize(obj));
}
}
public static void WriteErrorAndEndRequest(this HttpResponse response, ErrorCode error)
{
response.ClearContent();
response.WriteObj(new ErrorInfo(error));
}
}
public enum ErrorCode
{
General=1,
ListItemSyncError = 5,
CloudUnAuth=100,
WrongCredentials=200,
FileCheckedOut=300,
CloudItemIsForbidden=403,
CloudFileNotFound=404,
CloudFolderNotFound=405,
CloudItemNotFound=406,
FileConflict = 503,
SPFileNewer = 502,
CloudFieldNotFound=506,
ItemNotFound = 310
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/IManageModule.cs
using Microsoft.Practices.Unity;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Services;
using Workshare.IManage.Integration;
using Workshare.IManage.Strategies;
using Workshare.IManage.Views;
using Workshare.Integration;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.IManage
{
class IManageModule : WsIntegrationModule
{
protected override void Initialize()
{
base.Initialize();
this.Container.RegisterInstance<ModuleBase>(this);
this.Container.RegisterType<Workshare.Components.Views.Common.CommandInvoker>();
this.Container.RegisterType<DmsWorkerBase, DmsWorker>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<IAuthProvider, DMSAuthProvider>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<IModuleView, ModuleView>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<IWSIntegration, WorkshareIntegration>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<DiscardedService, DiscardedService>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<SyncInfoService, SyncInfoService>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<WebDataStorageService, WebDataStorageService>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<IWSIntegration, WorkshareIntegration>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<ErrorReporterService>(new ContainerControlledLifetimeManager());
this.Container.RegisterType<Workshare.Integration.Processor.Strategies.ActionStrategy.ImportStrategy>();
this.Container.RegisterType<Workshare.Integration.Processor.Strategies.ActionStrategy.UploadStrategy>();
this.Container.RegisterType<Workshare.Integration.Processor.Strategies.DmsScanStrategyBase, Workshare.IManage.Strategies.ScanStrategy>();
this.Container.RegisterType<Workshare.Integration.Processor.Strategies.DmsProcessStrategyBase, Workshare.IManage.Strategies.ProcessStrategy>();
this.Container.RegisterType<Workshare.IManage.Contrete.ImportDialog>();
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/MapController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Processor.Changes.Visitors;
namespace Workshare.Integration.Processor.Maps
{
public class MapController
{
public ItemMap FindMapById(ItemMap rootElement, string id)
{
var finder = new MapFinder(id);
rootElement.Apply(finder);
return finder.Result;
}
public FolderMap GetFirstParentLinkedWithExistedOnWs(ItemMap rootElement, string mapId)
{
var map = FindMapById(rootElement,mapId);
if (map == null) return null;
if (map is FileMap)
{
var fileMap = (FileMap) map;
if (fileMap.IndirectParent != null)
{
return fileMap.IndirectParent;
}
}
var currentMap = map;
while (currentMap!=null)
{
var folderMap = currentMap as FolderMap;
if (folderMap.HasRemote() && folderMap.HasLocal())
{
return folderMap;
}
currentMap = currentMap.Parent;
}
return null;
}
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/CloudFieldValue.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
using WorksharePlatform;
namespace WorkshareCloud.Common
{
public class CloudPathFieldValue
{
public readonly static string CloudField = "CloudPath";
const char SEPARATOR = '/';
public int ItemId { get; private set; }
public int ParentFolderId { get; private set; }
public int VersionId { get; private set; }
public long ItemModifiedBy { get; private set; }
CloudPathFieldValue()
{
ItemId = -1;
ParentFolderId = -1;
VersionId = -1;
ItemModifiedBy = -1;
}
public CloudPathFieldValue(FileDetails value, SPListItem item)
: this()
{
var fileDetailes = (FileDetails)value;
ItemId = fileDetailes.CurrentVersion.FileId;
ParentFolderId = fileDetailes.FolderId;
if (fileDetailes.CurrentVersion != null)
{
VersionId = fileDetailes.CurrentVersion.Id;
}
var DateModify = (DateTime)item[SPBuiltInFieldId.Modified];
ItemModifiedBy = DateModify.Ticks;
}
public CloudPathFieldValue(FolderDetails value, SPListItem item)
: this()
{
var folderDetailes = (FolderDetails)value;
ParentFolderId = folderDetailes.ParentId;
ItemId = folderDetailes.Id;
var DateModify = (DateTime)item[SPBuiltInFieldId.Modified];
ItemModifiedBy = DateModify.Ticks;
}
public CloudPathFieldValue(string value)
: this()
{
if (!string.IsNullOrEmpty(value))
{
var parts = value.Split(SEPARATOR);
ParentFolderId = Int32.Parse(parts[0]);
ItemId = Int32.Parse(parts[1]);
VersionId = Int32.Parse(parts[2]);
ItemModifiedBy =long.Parse(parts[3]);
}
}
public static CloudPathFieldValue Empty
{
get
{
return new CloudPathFieldValue();
}
}
public override string ToString()
{
if (HasValue)
{
return string.Format("{1}{0}{2}{0}{3}{0}{4}", SEPARATOR, ParentFolderId, ItemId, VersionId, ItemModifiedBy);
}
else
{
return string.Empty;
}
}
public bool HasValue { get { return (ItemId > -1 && VersionId > -1) || ParentFolderId > -1; } }
}
}
<file_sep>/OpenText/README.md
OTIntegration
=============
OTIntegration
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/TrueSyncDialogWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Controls;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncDialog
{
/// <summary>
/// Interaction logic for TrueSyncDialogWindow.xaml
/// </summary>
public partial class TrueSyncDialogWindow
{
public TrueSyncDialogWindow()
{
InitializeComponent();
}
private readonly TrueSyncDialogViewModel _vm;
private readonly IModuleView _view;
public TrueSyncDialogWindow(ModuleBase module, CommandInvoker commandInvoker, IEnumerable<IDMSItem> items, IModuleView view)
{
InitializeComponent();
_view = view;
DataContextChanged += TrueSyncDialogWindow_DataContextChanged;
DataContext = _vm = new TrueSyncDialogViewModel(module, commandInvoker, items, view);
Loaded += TrueSyncDialogWindow_Loaded;
Closed += TrueSyncDialogWindow_Closed;
}
void TrueSyncDialogWindow_DataContextChanged(object sender, DependencyPropertyChangedEventArgs e)
{
var model = e.NewValue as TrueSyncDialogViewModel;
if (model != null)
{
var vm = model;
vm.ScanCompleted += vm_ScanCompleted;
}
}
void vm_ScanCompleted(object sender, EventArgs e)
{
if (importActive.HasItems)
{
SyncTab.IsSelected = true;
}
else if (uploadActive.HasItems)
{
SendTab.IsSelected = true;
}
}
void TrueSyncDialogWindow_Closed(object sender, EventArgs e)
{
_view.TopWindow = IntPtr.Zero;
_view.SuppressProgressDialog(false);
}
void TrueSyncDialogWindow_Loaded(object sender, RoutedEventArgs e)
{
_view.SuppressProgressDialog(true);
if (_vm != null)
{
if (_vm.RescanCommand.CanExecute(""))
{
_vm.RescanCommand.Execute("");
}
Title = string.Format("{0}{1}{2}", FindResource("RES_TITLE_BAR_PART_1"), _vm.WorkspaceName, FindResource("RES_TITLE_BAR_PART_2"));
}
var handle=new System.Windows.Interop.WindowInteropHelper(this).Handle;
_view.TopWindow = handle;
if (AsDropDown)
{
Focusable = false;
WindowStartupLocation = WindowStartupLocation.Manual;
WindowStyle = WindowStyle.None;
bar.Visibility = Visibility.Visible;
UpdateLocation();
Deactivated += TrueSyncDialogWindow_Deactivated;
Closing += TrueSyncDialogWindow_Closing;
SizeChanged += DropDownTrueSyncDialogWindow_SizeChanged;
}
this.KeepPositionOnResizing = !AsDropDown;
}
void DropDownTrueSyncDialogWindow_SizeChanged(object sender, SizeChangedEventArgs e)
{
UpdateLocation();
}
void UpdateLocation()
{
int taskBarYVirtualOffset = 50;
var left = ButtonLeft + ButtonWidth - ActualWidth;
var top = ButtonTop + ButtonHeight;
left = (left >= 0) ? left : 0;
left = (left + ActualWidth > SystemParameters.PrimaryScreenWidth) ? SystemParameters.VirtualScreenWidth - ActualWidth : left;
top = (top >= 0) ? top : 0;
top = ((top + (int)ActualHeight) >= SystemParameters.PrimaryScreenHeight - taskBarYVirtualOffset) ? (int)SystemParameters.VirtualScreenHeight - (int)ActualHeight - taskBarYVirtualOffset : top;
Left = left;
Top = top;
}
void TrueSyncDialogWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
Deactivated -= TrueSyncDialogWindow_Deactivated;
}
void TrueSyncDialogWindow_Deactivated(object sender, EventArgs e)
{
Close();
}
private void ImportSwitchButton_Click(object sender, RoutedEventArgs e)
{
SwitchDiscarded(importActive, importDiscarded, btnImportSwitch);
}
void SwitchDiscarded(Control activeConterol, Control discardedControl, Button switchButton)
{
if (activeConterol.Visibility == Visibility.Visible)
{
activeConterol.Visibility = Visibility.Collapsed;
discardedControl.Visibility = Visibility.Visible;
switchButton.Content = "View current activities";
if (activeConterol == uploadActive) _vm.IsShowUploadDiscarded = true;
else _vm.IsShowImportDiscarded = true;
}
else
{
activeConterol.Visibility = Visibility.Visible;
discardedControl.Visibility = Visibility.Collapsed;
if (activeConterol == uploadActive) _vm.IsShowUploadDiscarded = false;
else _vm.IsShowImportDiscarded = false;
switchButton.Content = "View discarded activities";
}
}
private void UploadSwitch_Click(object sender, RoutedEventArgs e)
{
SwitchDiscarded(uploadActive, uploadDiscarded, btnUploadSwitch);
}
public int ButtonLeft { get; set; }
public int ButtonTop { get; set; }
public bool AsDropDown { get; set; }
public int ButtonWidth { get; set; }
public int ButtonHeight { get; set; }
public IEnumerable<FileMapActivity> ItemsToSkip
{
get { return _vm.ItemsToSkip; }
}
private void OwnWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
_vm.OnViewClosing();
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Views/ModuleView.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using System.Threading;
using Com.Interwoven.WorkSite.iManage;
using CSOfficeRibbonAccessibility;
using Microsoft.Office.Core;
using Workshare.Components;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Presenter;
using Workshare.Components.Properties;
using Workshare.Components.Views;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncDialog;
using Workshare.Components.Views.TrueSyncUploadFilesDialog;
using Workshare.Components.WSLogger;
using Workshare.IManage.Contrete;
using Workshare.IManage.Presenter;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
namespace Workshare.IManage.Views
{
public class ModuleView: ModuleViewBase
{
ModuleBase _module;
readonly WsProxy _wsProxy;
public ModuleView(ModuleBase module, IWSIntegration integration)
: base(integration)
{
_module = module;
ActivieWindowHandle = Process.GetCurrentProcess().MainWindowHandle;
_wsProxy = new WsProxy();
}
public virtual void OnTrueSyncItemsClicked(SyncItemsClickedArgs args, bool fromRibbon = false)
{
((ModulePresenter)m_presenter).OnSyncItemsClicked(args, fromRibbon);
}
override public ModulePresenterBase CreatePresenter(IWSIntegration integration)
{
return new ModulePresenter(this);
}
int CHILDID_SELF = 0;
TrueSyncDialogWindow dlg = null;
internal void ShowTrueSyncDialog(CommandInvoker commandInvoker, IEnumerable<IDMSItem> items, Action<bool?, IEnumerable<FileMapActivity>> closed, bool fromRibbon)
{
if (dlg != null)
{
dlg.Close();
dlg = null;
return;
}
dlg = new TrueSyncDialogWindow(_module, commandInvoker, items, this);
bool asDropDown=true;
int left = 0, top = 0, width = 0, height = 0;
if (fromRibbon)
{
try
{
var topW = MSAAHelper.GetAccessibleObjectFromHandle(Process.GetCurrentProcess().MainWindowHandle);
var list = new List<IAccessible>();
IAccessible rre = MSAAHelper.GetAccessibleObjectByNameAndRole(topW, new System.Text.RegularExpressions.Regex("^Sync"), "push button", true);
if (rre == null)
{
Thread.Sleep(100);
rre = MSAAHelper.GetAccessibleObjectByNameAndRole(topW, new System.Text.RegularExpressions.Regex("^Sync"), "push button", true);
}
rre.accLocation(out left, out top, out width, out height, CHILDID_SELF);
Marshal.ReleaseComObject(topW);
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Error);
asDropDown = false;
}
}
else
{
asDropDown = false;
}
dlg.Closed += (s, e) =>
{
if (closed != null)
{
closed(((OwnWindow)s).OwnDialogResult, dlg.ItemsToSkip);
}
dlg = null;
};
if (asDropDown)
{
dlg.ButtonLeft = left ;
dlg.ButtonTop = top ;
dlg.ButtonWidth = width;
dlg.ButtonHeight= height;
dlg.AsDropDown = true;
dlg.Show();
}
else
{
SetMainAsParent(dlg);
dlg.AsDropDown = false;
dlg.ShowDialog();
}
}
private bool CheckDocModDuringLastAccess(ManFile doc)
{
if (!(DateTime.Compare(doc.m_document.AccessTime, doc.Modified) < 0))
{
TimeSpan interval = doc.m_document.AccessTime - doc.Modified;
return (interval.CompareTo(new TimeSpan(0, 0, 2)) < 0);
}
return false;
}
private bool LocalFileWasChanged(ManFile doc)
{
if (!string.IsNullOrEmpty(doc.CheckoutPath))
{
// for checked out documents EditDate returns lastWriteTime for local file
return DateTime.Compare(doc.EditDate, doc.Modified) == 0;
}
else
{
return false;
}
}
public override void ShowFileSyncDialog(object file, EventOperation operation, string command)
{
var act = new Action(() =>
{
if (file is IManDocument)
{
Logger.Write("Start ShowFileSyncDialog", Severity.Information);
var doc = new ManFile(file as IManDocument);
var curversion = new ManFile(doc.m_document.Versions.ItemByIndex(doc.m_document.Version) as IManDocument);
var lastversionDoc = new ManFile(doc.m_document.Versions.ItemByIndex(doc.m_document.Versions.Count) as IManDocument);
var flds = doc.ParentFolders;
Logger.Write("Get required variables", Severity.Information);
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
if (flds != null && flds.Any(i => IsFolderExist(i)))
{
Logger.Write("If at least one folder has been sent to Workshare", Severity.Information);
if (Application.iManInstance.Presenter.LoginIfNeeded())
{
var dlg = WSApplication.Instance.Module.Resolve<TrueSyncUploadFilesDialog>();
SetMainAsParent(dlg);
doc.Refresh();
dlg.Initiailize(doc);
Logger.Write("Show dialog", Severity.Information);
dlg.ShowDialog();
}
}
else
{
if (flds[0].ParentFolder != null)
{
Logger.Write("If noone parent folder has been sent to Workshare", Severity.Information);
var cFolder = GetSyncFolder(flds[0].ParentFolder);
if (cFolder != null)
{
if (Application.iManInstance.Presenter.LoginIfNeeded())
{
if (operation == EventOperation.MoveFolder)
{
if (!IsFolderExist(cFolder)) return;
}
else
{
if (!IsFolderExist(flds[0].ParentFolder)) return;
}
var dlg = WSApplication.Instance.Module.Resolve<TrueSyncUploadFilesDialog>();
SetMainAsParent(dlg);
dlg.Initiailize(doc);
dlg.ShowDialog();
}
}
}
}
}
});
ExecuteInAppropriateThread(act);
}
private bool IsFolderExist(IDMSFolder folder)
{
try
{
if (folder != null)
{
var sis = Application.iManInstance.Module.Resolve<SyncInfoService>();
var syncInfo = sis.GetSyncInfo(folder, false);
if (syncInfo != null)
{
return Workshare.Integration.WsProxies.WsProxy.IsFolderExist(WSApplication.Instance.AuthProvider.GetCurrentWSUser2(), syncInfo.ItemId);
}
else
{
return false;
}
}
else
{
return false;
}
}
catch (WebException ex)
{
if (ex.IsCloudFolderAccessDenied())
return true;
else
throw;
}
}
public IDMSFolder GetSyncFolder(IDMSFolder fld)
{
IDMSFolder folder = fld;
if (folder != null)
{
if (Application.iManInstance.Module.Resolve<SyncInfoService>().GetSyncInfo(folder) != null)
{
return folder;
}
return GetSyncFolder(folder.ParentFolder);
}
return null;
}
}
}
<file_sep>/WSComponents/src/WSIntegration/SyncInfo/Factory.cs
using Workshare.Integration.Interfaces;
namespace Workshare.Integration.SyncInfo
{
class Factory:ISyncInfoFactory
{
public ISyncInfo CreateFileInfo()
{
return new FileSyncInfo();
}
public ISyncInfo CreateFolderInfo()
{
return new FolderSyncInfo();
}
public IVersionSyncInfo CreateVersionInfo()
{
return new VersionInfo();
}
public ISendDataInfo CreateSendDataInfo()
{
return new SendDataInfo();
}
}
}
<file_sep>/iManageIntegration/Src/Workshate.HookEvents/HookMoveContentsCmd.cs
using System;
using System.Collections;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.Worksite.iManExt2;
using Workshare.Components.Interfaces;
using Workshare.Components.WSLogger;
namespace Workshare.HookEvents
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("44B80FA0-1E93-45D3-8A56-C2E03D83DA8E")]
[ComVisible(true)]
public class HookMoveContentsCmd : ICommand, Com.Interwoven.Worksite.iManExt2._ICommandEvents_Event
{
IManMoveContentsCmd cmd;
bool isCancel = true;
public HookMoveContentsCmd()
{
cmd = new IManMoveContentsCmd();
cmd.OnCancel += cmd_OnCancel;
cmd.OnInitDialog += cmd_OnInitDialog;
cmd.PostOnOK += cmd_PostOnOK;
cmd.PreOnOK += cmd_PreOnOK;
}
public int Accelerator
{
get
{
return cmd.Accelerator;
}
set
{
cmd.Accelerator = value;
}
}
public object Bitmap
{
get
{
return cmd.Bitmap;
}
set
{
cmd.Bitmap = value;
}
}
public ContextItems Context
{
get
{
return cmd.Context;
}
}
public void Execute()
{
cmd.Execute();
if (!isCancel)
{
try
{
ProcessIManageEvents.ProcessEvent(Context, EventOperation.MoveFile, "HookMoveContentsCmd -> Execute");
}
catch (Exception exc)
{
Logger.Write(exc, Severity.Error);
}
}
}
public string HelpFile
{
get
{
return cmd.HelpFile;
}
set
{
cmd.HelpFile = value;
}
}
public int HelpID
{
get
{
return cmd.HelpID;
}
set
{
cmd.HelpID = value;
}
}
public string HelpText
{
get
{
return cmd.HelpText;
}
set
{
cmd.HelpText = value;
}
}
public void Initialize(ContextItems Context)
{
cmd.Initialize(Context);
}
public string MenuText
{
get
{
return cmd.MenuText;
}
set
{
cmd.MenuText = value;
}
}
public string Name
{
get
{
return cmd.Name;
}
set
{
cmd.Name = value;
}
}
public int Options
{
get
{
return cmd.Options;
}
set
{
cmd.Options = value;
}
}
public int Status
{
get
{
return cmd.Status;
}
set
{
cmd.Status = value;
}
}
public Commands SubCommands
{
get
{
return cmd.SubCommands;
}
set
{
cmd.SubCommands = value;
}
}
public string Title
{
get
{
return cmd.Title;
}
set
{
cmd.Title = value;
}
}
public CommandType Type
{
get
{
return cmd.Type;
}
set
{
cmd.Type = value;
}
}
public void Update()
{
cmd.Update();
}
void cmd_PreOnOK(object pMyInterface)
{
isCancel = false;
if (PreOnOK != null)
{
PreOnOK(pMyInterface);
}
}
void cmd_PostOnOK(object pMyInterface)
{
if (PostOnOK != null)
{
PostOnOK(pMyInterface);
}
}
void cmd_OnInitDialog(object pMyInterface)
{
if (OnInitDialog != null)
{
OnInitDialog(pMyInterface);
}
}
void cmd_OnCancel(object pMyInterface)
{
isCancel = true;
if (OnCancel != null)
{
OnCancel(pMyInterface);
}
}
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnCancelEventHandler OnCancel;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnInitDialogEventHandler OnInitDialog;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PostOnOKEventHandler PostOnOK;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PreOnOKEventHandler PreOnOK;
}
}
<file_sep>/WSComponents/src/WSComponents/Interfaces/IModulePresenter.cs
using System.Collections.Generic;
using Workshare.Components.Common;
using Workshare.Components.Views.Common;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using WorksharePlatform;
namespace Workshare.Components.Interfaces
{
public interface IModulePresenter
{
bool IsAnyItemInProgress();
void OnSendItemsClicked(SendItemsClickedArgs args);
void OnSyncItemsClicked(SyncItemsClickedArgs args);
void OnCollaborationItemsClicked(CollaborationItemsClickedArgs args);
WorkUnit GetSendCommand(IEnumerable<IDMSItem> items, IEnumerable<FileMapActivity> itemsToSkip, int destFolderId, CheckOutOptions option);
bool LoginIfNeeded();
DialogSettings GetCurrentDialogSettings(UserDetails user);
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Services/DmsWorkerBase.cs
using System;
using System.Collections.Generic;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Integration.Processor.Strategies
{
public abstract class DmsWorkerBase
{
public abstract void ImportNewFile(NewFileOptions options);
public abstract void ImportNewVersion(NewVersionsOptions options);
public abstract void RelateDocument(IDMSFile mainFile, IDMSFile relatedFile, string p);
public abstract IDMSFolder CreateSubFolder(IDMSFolder folder, NewFolderOptions options);
/// <summary>
/// Returns name with extension of file
/// </summary>
/// <param name="dmsVersion"></param>
/// <returns></returns>
public abstract string GetName(IDMSFile iDMSFile);
public abstract string GetFilePath(IDMSFile iDMSFile);
public abstract string GetFilePath(IDmsVersion dmsVersion);
/// <summary>
/// Returns name with extension of version
/// </summary>
/// <param name="dmsVersion"></param>
/// <returns></returns>
public abstract string GetName(IDmsVersion dmsVersion);
/// <summary>
/// Returns friendly name (without extension ) of version
/// </summary>
/// <param name="dmsVersion"></param>
/// <returns></returns>
public abstract string GetFriendlyName(IDmsVersion dmsVersion);
/// <summary>
/// Returns friendly name for _folder
/// </summary>
/// <param name="dmsVersion"></param>
/// <returns></returns>
public abstract string GetFriendlyName(IDMSFolder folder);
public abstract DateTime GetEditTime(IDmsVersion dmsVersion);
public abstract DateTime GetEditTime(IDMSFile dmsFile);
public abstract IEnumerable<BaseDMSItem> GetParents(BaseDMSFile item);
public abstract string GetFriendlyName(IDMSFile iDMSFile);
public abstract void CanImportNewVersionOrThrow(IDMSFile dmsFile);
public abstract void CanImportNewFileOrThrow(NewFileOptions options);
public abstract void AddHistory(IDmsVersion dmsVersion, string eventName, string comment, Operations operation);
public abstract void CanUpdateFileOrThrow(IDMSFile localFile, Operations operation);
public abstract void CanUpdateFolderOrThrow(IDMSFolder localFolder);
public abstract void CanUpdateVersionOrThrow(IDmsVersion localVersion, Operations operation);
public abstract void UnlockIfNotEdited(IDMSFile localFile);
}
public class NewFolderOptions
{
public WsFolder WsFolder { set; get; }
}
public class NewFileOptions
{
public WsFile WsFile { set; get; }
public IDMSFolder ParentFolder { set; get; }
public IDMSFile BasedOnFile { set; get; }
public IDMSFile CreatedFile { set; get; }
public IEnumerable<CreateversionInfo> VersionList { set; get; }
}
public class NewVersionsOptions
{
public WsFile WsFile { set; get; }
public IDMSFile File { set; get; }
public IEnumerable<CreateversionInfo> VersionList { set; get; }
}
public class CreateversionInfo
{
public string FriendlyName;
public string FilePath;
public IEnumerable<Activity> Activities;
public WsFile file;
public WsVersion version;
public IDmsVersion createdVersion;
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Visitors/UploadActivityFinder.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor.Changes.Visitors
{
class UploadActivityFinder : ItemMapVisitor
{
public IEnumerable<string> ActivitiesToUpdate = null;
readonly DiscardedService _discardedService;
readonly ChangesDetector _detector;
private readonly DmsWorkerBase _dmsWorker;
public UploadActivityFinder(DiscardedService discardedService, ChangesDetector detector, DmsWorkerBase dmsWorker)
{
this._discardedService = discardedService;
_detector = detector;
_dmsWorker = dmsWorker;
_options=new ActivityFinder.Options();
}
public List<UploadFileActivity> FoundedActivities = new List<UploadFileActivity>();
private ActivityFinder.Options _options;
public ActivityFinder.Options Options
{
get
{
return _options;
}
set
{
_options = value ?? new ActivityFinder.Options();
}
}
public override bool VisitEnter(FileMap fileMap)
{
return true;
}
public void UpdateState(UploadFileActivity activity, FileMap fileMap)
{
if (fileMap.ProcessState == ProcessState.Error)
{
if (fileMap.Error is FileCheckoutedToAnotherUser)
{
activity.State = UploadFileActivityState.CheckedOutToAnother;
activity.CheckedOutTo = ((FileCheckoutedToAnotherUser)fileMap.Error).UserName;
}
else if (fileMap.Error is CheckedOutOnAnotherMachine)
{
activity.State = UploadFileActivityState.CheckedOutOnAnotherMachine;
activity.CheckOutMachine = ((CheckedOutOnAnotherMachine)fileMap.Error).MachineName;
}
else if (fileMap.Error.IsCloudFolderAccessDenied())
{
activity.State = UploadFileActivityState.NoAccessOnWorkshare;
}
else
{
activity.State = UploadFileActivityState.Error;
activity.Error = fileMap.Error;
}
}
else if (fileMap.ProcessState == ProcessState.Processed)
{
if (activity.State == UploadFileActivityState.Uploading)
{
activity.State = UploadFileActivityState.Uploaded;
}
}
else if (fileMap.ProcessState == ProcessState.Scanned)
{
activity.State = UploadFileActivityState.Scanned;
}
else if (fileMap.ProcessState == ProcessState.Cancelled)
{
activity.State = UploadFileActivityState.Scanned;
}
}
public override void Visit(FileMap fileMap)
{
var existed = FoundedActivities.FirstOrDefault(a => a.MapId == fileMap.Id);
if (existed != null && Options.DoNotUpdate) return;
var uploadFileActivity = existed ?? new UploadFileActivity();
_detector.opt = new DetectorOptions() { UseForceRequest = _options.UseForceRequest };
_discardedService.options = new DiscardedServiceOptions() { UseForceRequest = _options.UseForceRequest };
try
{
if (ActivitiesToUpdate == null || ActivitiesToUpdate.Contains(uploadFileActivity.Id))
{
uploadFileActivity.Refresh(fileMap, _dmsWorker);
uploadFileActivity.ClearChanges();
UpdateState(uploadFileActivity, fileMap);
_detector.AddFolderSharedIfDetected(uploadFileActivity, fileMap);
_detector.AddLastUnSentOrChangedVersionIfDetected(uploadFileActivity, fileMap);
uploadFileActivity.IsDiscarded = _discardedService.IsDiscarded(uploadFileActivity);
this.RefreshAction(uploadFileActivity);
}
if (!FoundedActivities.Contains(uploadFileActivity) && ShouldDetectActivity(uploadFileActivity))
{
FoundedActivities.Add(uploadFileActivity);
}
}
catch (Exception ex)
{
uploadFileActivity.Error = ex;
uploadFileActivity.State = UploadFileActivityState.Error;
}
}
public virtual bool ShouldDetectActivity(UploadFileActivity activity)
{
return true;
}
private void RefreshAction(UploadFileActivity activity)
{
activity.Actions = new List<ItemMapActivityAction>();
if (activity.State == UploadFileActivityState.Error
|| activity.State == UploadFileActivityState.Uploaded
|| activity.State == UploadFileActivityState.Uploading)
{
//if we uploaded file by discarded activity we need to remove this activity from list of discarded activities on WDS
//otherwise - if user delete the file from workshare the 'true sync' dialog will show upload activity as discarded
if (activity.IsDiscarded == false && activity.State == UploadFileActivityState.Uploaded)
{
UploadFileActivity oldDiscardedActivity = new UploadFileActivity() { WsId = "-1", DmsFileId = activity.DmsFileId, Type = activity.Type };
if (_discardedService.IsDiscarded(oldDiscardedActivity))
_discardedService.Remove(oldDiscardedActivity);
}
return;
}
var versionToUpload = activity.Changes.FirstOrDefault(a => a.Type == ChangeType.NotSentVersion || a.Type == ChangeType.VersionChangedAfterSend);
if (versionToUpload!=null)
{
var upload = new UploadDocumentAction(activity, "Upload");
upload.VersionIds.Add(versionToUpload.LocalVersionId);
activity.Actions.Add(upload);
}
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Contrete/ManFolder.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Web.Script.Serialization;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.Common;
using Workshare.Components.Concrete;
using Workshare.Components.Exceptions;
using Workshare.Components.WSLogger;
using Workshare.Integration.SyncInfo;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using ActionType = Workshare.Integration.Processor.Services.ActionType;
using Permissions = Workshare.Integration.Enums.Permissions;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.IManage.Contrete
{
class ManFolder : BaseDMSFolder, IDMSFolder
{
internal IManFolder m_folder;
public ManFolder(IManFolder folder)
{
m_folder = folder;
InitializeName();
}
private void InitializeName()
{
_name = m_folder.Name ?? null;
}
private ManFile DisplayImportDialog(WorksharePlatform.FileDetails file)
{
var dlg = Application.Instance.Module.Resolve<ImportDialog>();
dlg.Initialize(this, file.FilePath, file.FriendlyName ?? file.Name, m_folder.Database);
dlg.ShowDialog();
return dlg.AddedFile;
}
public IDMSFolder RootFolder()
{
return new ManFolder(m_folder.Database.Root);
}
public IDMSFile AddFile(WorksharePlatform.FileDetails file, List<Activity> actnivities,IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool updateSyncInfoForVersions = true)
{
var a = true;
return AddFile(file, out a, actnivities, versionIdsToSkip,useProfileDialog, SyncVerOption, StartVersion,updateSyncInfoForVersions);
}
//this method should be avoided to use
public IDMSFile AddFile(WorksharePlatform.FileDetails file, out bool added, List<Activity> actnivities, IEnumerable<string> versionIdsToSkip, bool useProfileDialog = false, ConflictVersionOptions SyncVerOption = ConflictVersionOptions.Latest, int StartVersion = 2, bool updateSyncInfoForVersions = true)
{
Logger.Write("Adding document to folder", Severity.Trace);
added = false;
try
{
//var actnivities = PlatformService.GetFileActivitiesImanage(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.FolderId, file.Id);
if (SyncVerOption == ConflictVersionOptions.All)
{
file.DeleteFile();
file.FilePath = PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.Id, StartVersion);
}
else
{
var firstVersionToImport =
file.Versions.OrderBy(a => a.Version)
.LastOrDefault(a => !ManFile.ShouldSkip(versionIdsToSkip, a)) ?? file.CurrentVersion;
file.DeleteFile();
file.FilePath = PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.Id, firstVersionToImport.Version);
}
var createdFile = DisplayImportDialog(file);
added = createdFile != null;
if (updateSyncInfoForVersions && added)
{
var version = SyncVerOption == ConflictVersionOptions.All ? file.Versions.FirstOrDefault(v => v.Version == StartVersion) : file.Versions.OrderBy(a=>a.Version).LastOrDefault();
if (version != null)
{
GetService().UpdateSyncInfo(createdFile.GetVersions().First(),
new ActionContext(ActionType.Import)
{
WsVersion = new WsVersion(version, null)
});
}
}
if (createdFile != null)
{
if (SyncVerOption == ConflictVersionOptions.All)
{
if (!createdFile.CheckedOut)
{
createdFile.GetFileToCheckout();
}
/// TODO: REMOVE THAT IN FUTURE
if (StartVersion == 2)
{
createdFile.AddHistories(actnivities.Where(x => x.CurrentVersion == 1).ToList());
}
createdFile.AddHistories(actnivities.Where(x => x.CurrentVersion == StartVersion).ToList());
foreach (var ver in file.Versions.Where(x => x.Version > StartVersion && !ManFile.ShouldSkip(versionIdsToSkip,x)).OrderBy(x => x.Version).ToArray())
{
file.DeleteFile();
file.FilePath = PlatformService.DownloadFileVersion(Application.iManInstance.AuthProvider.GetCurrentWSUser(), file.Id, ver.Version);
var veriosn = (ManVersion)createdFile.GetLatest().AddVersion(file.FilePath, file, actnivities.Where(x => x.CurrentVersion == ver.Version).ToList());
if (updateSyncInfoForVersions)
{
GetService().UpdateSyncInfo(veriosn,new ActionContext(ActionType.Import)
{
WsVersion = new WsVersion(ver, null)
});
//veriosn.UpdateSyncInfo2(new WsVersion(ver,null));
}
}
}
else if (SyncVerOption == ConflictVersionOptions.Latest)
{
createdFile.AddHistories(actnivities);
}
return createdFile.GetLatest();
}
else
{
return null;
}
}
catch (FileNotCreatedException)
{
throw;
}
catch (Exception ex)
{
throw new FileNotCreatedException(ex.Message, ex);
}
}
public IDMSFolder AddSubFolder(WorksharePlatform.FolderDetails cloudFolder)
{
try
{
if (cloudFolder == null) throw new ArgumentNullException("cloudFolder");
var objDocFolders = (IManDocumentFolders)m_folder.SubFolders;
var objDocFolder = objDocFolders.AddNewDocumentFolderInheriting(cloudFolder.Name, "");
//Get location of folder in Workspace layout
objDocFolder.Location.Cell = "column0";
objDocFolder.Location.Order = 1;
var createdFolder = new ManFolder(objDocFolder);
// createdFolder.UpdateSyncInfo(cloudFolder, true);
GetService().UpdateSyncInfo(createdFolder,new ActionContext(ActionType.Import)
{
WsFolder = new WsFolder(cloudFolder,null)
});
m_folder.Refresh();
return createdFolder;
}
catch (Exception ex)
{
throw new FolderNotCreatedException(string.Format(RES.STR_CANNOT_ADD_NEW_FOLDER_ERROR_F, cloudFolder.Name, ex.Message));
}
}
public BaseDMSFolder AddSubFolder2(string name)
{
var objDocFolders = (IManDocumentFolders)m_folder.SubFolders;
var objDocFolder = objDocFolders.AddNewDocumentFolderInheriting(name, string.Empty);
return new ManFolder(objDocFolder);
}
public bool IsDeleted
{
get
{
try
{
int id = this.m_folder.FolderID;
return false;
}
catch
{
return true;
}
}
}
public bool WasUpdatedAfterSend(ISyncInfo synhInfo)
{
return false;
}
public bool WasUpdatedAfterSend2(bool useForceRequest = true)
{
return false;
}
public IDMSItemID ID
{
get { return new ManItemID(m_folder); }
}
public int DMSId
{
get
{
return m_folder.FolderID;
}
}
string _name = null;
public string Name
{
get
{
if (string.IsNullOrEmpty(_name))
{
InitializeName();
}
return _name;
}
set
{
}
}
public string Description
{
get
{
return m_folder.Description;
}
set
{
}
}
public bool DoesUserHavePermissions(imAccessRight permissions)
{
return (m_folder.EffectiveAccess & permissions) == permissions;
}
private bool IsSpecialFolder()
{
return (m_folder!=null && m_folder.Parent == null && (m_folder.ObjectID.ToLower().Contains("rootfavoritesfolder") || m_folder.ObjectID.ToLower().Contains("subscriptionfolder"))) ? true : false;
}
public bool DoesUserHavePermissions(Permissions permissions)
{
if (m_folder == null || m_folder.FolderID < 0 || IsSpecialFolder() || (m_folder.Workspace!=null && m_folder.Workspace.FolderID == m_folder.FolderID) )
{
return false;
}
else
{
switch (permissions)
{
case Permissions.EditItem: return (m_folder.EffectiveAccess & imAccessRight.imRightReadWrite) == imAccessRight.imRightReadWrite;
}
return true;
}
}
public bool CheckedOutToUser
{
get { return true; }
}
public bool CheckedOut
{
get { return true; }
}
public string DisplayName
{
get
{
return m_folder.Name;
}
set
{
}
}
public IEnumerable<IDMSFolder> SubFolders
{
get
{
var list = new List<IDMSFolder>();
foreach (IManFolder subFolder in m_folder.SubFolders)
{
list.Add(new ManFolder(subFolder));
}
return list;
}
set
{
}
}
public IEnumerable<IDMSFile> Files
{
get
{
var list = new List<IDMSFile>();
foreach (object content in m_folder.Contents)
{
if (content is IManDocument)
{
list.Add(new ManFile((IManDocument)content, m_folder));
}
}
return list;
}
set
{
}
}
public IDMSFolder ParentFolder
{
get
{
if (m_folder.Parent != null)
{
return new ManFolder(m_folder.Parent);
}
return null;
}
}
public void OnBeforeSending(OperationContext context)
{
}
private SyncInfoService GetService()
{
return WSApplication.Instance.Module.Resolve<SyncInfoService>();
}
public void OnSendError(object args, Exception e)
{
}
public void UpdateDisplayName(string name)
{
}
public void OnAfterAdd(OperationContext args)
{
}
public void OnAfterSending(OperationContext context)
{
}
public void OnBeforeSync(OperationContext args)
{
}
public void AddHistory(string eventName, string eventComment, Workshare.Integration.Operations operation)
{
}
public void AddHistories(List<Activity> activities)
{
}
public override string ToString()
{
return string.Format("Folder with ID={0}, Name={1}", ID, DisplayName);
}
internal bool HasRightToAddNewFiles()
{
return true;
}
public string DMSItemKey
{
get { return m_folder == null ? "" : Application.iManInstance.ServerKey +":!DatabaseName:"+ m_folder.Database.Name + ":!Folder:" + this.DMSId.ToString(); }
}
}
}
<file_sep>/WSComponents/src/WSComponents/Common/WorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Common
{
public enum WorkStatus
{
Unknown,
Pending,
Success,
Processing,
Error
}
public class WorkUnit : BasePropertyChanged<WorkUnit>, IWorkUnit
{
public event EventHandler<WorkUnitErrorEventArgs> OnError;
protected WorkUnit(IModuleView view)
{
_view = view;
TargetItems = new TargetItem[0];
this.Name = "Work unit";
}
protected readonly IModuleView _view;
public virtual void Execute()
{
}
private WorkStatus _status;
public virtual WorkStatus Status
{
get { return _status; }
set
{
if (_status != value)
{
_status = value;
PropertyHasChanged(p => p.Status);
PropertyHasChanged(p => p.StatusDescription);
}
}
}
public string Name
{
get; protected set;
}
public TargetItem[] TargetItems
{
get; protected set;
}
private string _statusDescription;
public string StatusDescription
{
get
{
return _statusDescription;
}
set
{
if (_statusDescription != value)
{
_statusDescription = value;
PropertyHasChanged(p => p.StatusDescription);
}
}
}
protected void ExecuteIntegration(Action action)
{
try
{
try
{
action();
}
catch (Exception ex)
{
if (OnError != null)
{
var args = new WorkUnitErrorEventArgs(this, ex);
OnError(this, args);
if (!args.Handled) throw;
}
}
}
catch (MultiItemsException ex)
{
if (ex.errorList.AsQueryble().Any(p => p.Error is CloudUnAuthorized))
{
WSApplication.Instance.AuthProvider.SetCurrentUser(null);
}
var sb = new StringBuilder();
ex.errorList.AsQueryble()
.ToList()
.ForEach(p => sb.AppendLine(p.Item != null ? p.Item.DisplayName + " : " : "" + p.Error.Message));
_view.ShowError(sb.ToString());
}
catch (CloudUnAuthorized ex)
{
WSApplication.Instance.AuthProvider.SetCurrentUser(null);
_view.ShowError(ex.Message);
}
catch (Exception ex)
{
_view.ShowError(ex.Message);
}
}
public virtual void OnAdded()
{
}
public string ItemName
{
get; set;
}
}
public class WorkUnitErrorEventArgs : EventArgs
{
public WorkUnitErrorEventArgs(IWorkUnit unit, Exception error)
{
Error = error;
WorkUnit = unit;
}
public Exception Error { get; private set; }
private IWorkUnit WorkUnit { get; set; }
public bool Handled { set; get; }
}
public class TrueSynhWorkUnit : WorkUnit
{
private readonly IEnumerable<IDMSItem> _items = new List<IDMSItem>();
public TrueSynhWorkUnit(IModuleView view, IEnumerable<SyncItemInformation> items)
: base(view)
{
Name = RES.STR_WORK_STATUS_SYNCHRONIZATION_TEXT;
_items = items.Where(e => e.item != null).Select(a => a.item);
TargetItems = _items.Where(s => s != null).Select(f => new TargetItem(f.ID.ToString())
{
Name = f.Name
}).ToArray();
var itemsText = "Files:";
_items.ToList().ForEach(p => itemsText += Environment.NewLine + p.DisplayName);
StatusDescription = string.Format(itemsText);
}
private bool ShouldBeProcessed(ImportFileActivity activity)
{
return activity.Changes.Any(a => a.Type == ChangeType.BothChanged
|| a.Type == ChangeType.RemoteAdded
|| a.Type == ChangeType.RemoteChanged);
}
public override void Execute()
{
try
{
var processor = WSApplication.Instance.Module.Resolve<Processor>();
SummaryProcessResult scanResult = processor.DirectScanFile(_items.OfType<BaseDMSFile>());
if (scanResult == null) return;
var activityFinder = WSApplication.Instance.Module.Resolve<ActivityFinder>();
var activities = activityFinder.GetImportActivities(scanResult, null);
var errors = activityFinder.GetErrors(scanResult);
if (errors.Any())
{
string errMsg = string.Empty;
List<string> listMsg = new List<string>();
errors.ForEach(e =>
{
string msg;
if (!string.IsNullOrEmpty(errMsg))
errMsg += Environment.NewLine;
if (e.Error is WebException && (e.Error as WebException).IsStatusCode(HttpStatusCode.Forbidden))
msg = RES.STR_YOU_DONT_HAVE_PERMISSION + activities.First(a => a.MapId.Equals(e.Id, StringComparison.OrdinalIgnoreCase)).Filename;
else
msg = e.Error.Message;
if (!listMsg.Contains(msg))
{
errMsg += msg;
listMsg.Add(msg);
}
});
_view.ShowError(new BaseException(RES.STR_UNABLESYNC_CAPTION, errMsg));
}
else
{
var activitiesToProcess =
activities
.Where(ShouldBeProcessed)
.Where(a => a.GetAllAvailableActions().Any())
.ToList();
var deletedFiles =
activities.Where(a => a.Changes.Any(c => c.Type == ChangeType.RemoteDeleted)).ToList();
if (deletedFiles.Any())
{
_view.ShowErrors(deletedFiles.Select(a => new ItemException()
{
Item = a.DmsFile,
Error = new CloudFileNotFound(Integration.Properties.Resources.STR_UNABLE_SYNC)
}));
var syncInfo = WSApplication.Instance.Module.Resolve<SyncInfoService>();
foreach (var deletedFile in deletedFiles)
{
deletedFile.DmsFile.AddHistory("FileDeletedOrMoved", "Synced from Workshare on " + DateTime.Now.ToString(Activity.TimeFormat) + ". File has been deleted or moved", Operations.Sync);
syncInfo.BreakLink(deletedFile.DmsFile);
}
}
if (activitiesToProcess.Any())
{
var actions = _view.SelectActions(activitiesToProcess);
var processOptions = new ProcessOptions();
processOptions.ActionsToApply.AddRange(actions.OfType<ItemMapActivityAction>());
scanResult = processor.Process(scanResult, processOptions);
activityFinder.UpdateImportActivities(scanResult, activitiesToProcess);
var processErrors = activitiesToProcess.Where(a => a.InKindOfErrorState()).ToList();
if (processErrors.Any())
{
_view.ShowErrors(
processErrors.ConvertAll(
e => new ItemException {Error = e.Error ?? new Exception(e.State.ToString()), Item = e.DmsFile}));
}
}
}
}
catch (Exception ex)
{
Logger.WriteError(ex);
_view.ShowError(ex.Message);
}
}
}
public class SendWorkUnit : WorkUnit
{
private readonly IEnumerable<IDMSItem> _mItems = new List<IDMSItem>();
private readonly int _mDestinationFolderId = -1;
private readonly CheckOutOptions _mCheckOutOption;
public SendWorkUnit(IModuleView view, IEnumerable<IDMSItem> items, int destFolderId, CheckOutOptions option)
: base(view)
{
_mItems = items;
_mDestinationFolderId = destFolderId;
_mCheckOutOption = option;
Name = RES.STR_WORK_STATUS_SENDING_TEXT;
TargetItems = items.Select(a => new TargetItem(a.ID.ToString())
{
Name = a.Name
}).ToArray();
var itemsText = "Files:" + Environment.NewLine;
_mItems.ToList().ForEach(p => itemsText += p.DisplayName + Environment.NewLine);
var val = StatusDescription;
this.StatusDescription = string.Format(val + Environment.NewLine + itemsText);
}
public override void Execute()
{
ExecuteIntegration(() =>
{
var operationContext = new OperationContext(_mCheckOutOption);
try
{
Logger.Write("SendWorkUnit::Execute START", Severity.Information);
WSApplication.Instance.Integration.SendItems(_mItems,
new FolderDetails {Id = _mDestinationFolderId}, operationContext);
}
catch (MultiItemsException)
{
throw;
}
catch (Exception ex)
{
var itemCollection = new ItemsErrorsCollection(operationContext);
_mItems.ToList().ForEach(localItem => itemCollection.Add(localItem, ex));
itemCollection.Check();
throw;
}
});
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Services/DiscardedService.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Web.Script.Serialization;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Services
{
public class DiscardedService
{
#region Nested Types
class DataEntry
{
public string Id { set; get; }
public long WsDateTick { set; get; }
public long LocalEditDateTick { set; get; }
}
class DataEnties : IWebDataStorageData
{
List<DataEntry> _entries = new List<DataEntry>();
public string DataType
{
get
{
return "DMSUserCache";
}
set
{
}
}
public List<DataEntry> Entries
{
get { return _entries; }
set { _entries = value; }
}
}
#endregion
private IAuthProvider _auth = null;
private const string Delimiter = "|||";
WebDataStorageService _wdsService;
public DiscardedServiceOptions options;
DataEnties _entries = new DataEnties();
public DiscardedService(IAuthProvider auth_provider, WebDataStorageService wdsService)
{
_auth = auth_provider;
_wdsService = wdsService;
options = new DiscardedServiceOptions() { UseForceRequest = false };
GetCahce();
}
void UpdateCahce()
{
try
{
var s_z = new JavaScriptSerializer();
string _data = s_z.Serialize(_entries);
_wdsService.AddUserData(_entries.DataType, _data, DateTime.MinValue);
}
catch (Exception ex)
{
Logger.WriteError("Error during write cahce data to Web data storage", ex);
//throw;
}
}
void GetCahce()
{
try
{
var s_z = new JavaScriptSerializer();
string _data = _wdsService.GetUsertData(_entries.DataType, DateTime.MinValue, options.UseForceRequest);
if (!string.IsNullOrEmpty(_data))
{
_data = _data.Replace("/Date(", "\\/Date(").Replace("/\"", "\\/\"");
_entries = s_z.Deserialize<DataEnties>(_data);
}
}
catch (Exception ex)
{
Logger.WriteError("Error during get cahce data to Web data storage", ex);
//throw;
}
}
void RemoveLocally(FileMapActivity item)
{
var s = new DataEntry
{
Id = GetIdForActivity(item)
};
_entries.Entries.RemoveAll(p => Compare(s, p));
}
bool Compare(DataEntry item1, DataEntry item2)
{
return item1.Id == item2.Id;
}
string CreateCacheItem(DataEntry item)
{
return string.Format("{1}{0}{2}{0}{3}{4}", Delimiter, item.Id, item.WsDateTick, item.LocalEditDateTick, Environment.NewLine);
}
DataEntry AddLocally(FileMapActivity item)
{
var s = new DataEntry
{
Id = GetIdForActivity(item),
WsDateTick = item.WsLastUpdateDate.Ticks,
LocalEditDateTick = item.LocalEditTime.Ticks
};
_entries.Entries.Add(s);
return s;
}
public bool IsDiscarded(FileMapActivity item)
{
var Id = GetIdForActivity(item);
var entry = _entries.Entries.LastOrDefault(a => a.Id == Id);
if (entry == null)
{
return false;
}
return item.WsLastUpdateDate.Ticks <= entry.WsDateTick && item.LocalEditTime.Ticks <= entry.LocalEditDateTick;
}
public void Add(FileMapActivity item)
{
var s = AddLocally(item);
UpdateCahce();
}
public void Remove(FileMapActivity item)
{
RemoveLocally(item);
UpdateCahce();
}
public void Refresh()
{
Logger.WriteTrace(string.Format("Refresing Discarded service."));
GetCahce();
}
private string GetIdForActivity(FileMapActivity item)
{
//if you change forming of Id don't forget to change deleting discarded activity in UploadActivityFinder.RefreshAction
return string.Format("{0}---{1}---{2}", item.WsId, item.DmsFileId, item.Type);
}
}
public class DiscardedServiceOptions
{
public bool UseForceRequest { get; set; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/VMs/DocumentActionVm.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes.Activities;
namespace Workshare.Components.Views.TrueSyncDialog.VMs
{
public class DocumentActionVM
{
public string Title { get { return action.Title; } }
public ItemMapActivityAction action;
public IItemMapActivityVm activityVm;
public IEnumerable<DocumentActionVM> Actions
{
get
{
return action.Actions.Select(p => new DocumentActionVM(p, activityVm)).ToList();
}
}
public DocumentActionVM(ItemMapActivityAction a, IItemMapActivityVm vm)
{
this.action = a;
this.activityVm = vm;
}
public bool CanExecute()
{
return ((FileMapActivity) activityVm.data).CanExecute(action);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Strategies/DmsProcessStrategyBase.cs
using System.Collections.Generic;
using Workshare.Integration.Processor.Maps;
namespace Workshare.Integration.Processor.Strategies
{
public abstract class DmsProcessStrategyBase
{
public abstract ProcessResult Process(FileMap fileMap, DmsProcessOptions dmsProcessOptions);
public abstract ProcessResult Process(FolderMap foldermap, DmsProcessOptions dmsProcessOptions);
}
public class ProcessResult
{
public static ProcessResult Processed = new ProcessResult(ProcessState.Processed);
public static ProcessResult Cancelled = new ProcessResult(ProcessState.Cancelled);
public ProcessResult(ProcessState result)
{
this.Result = result;
}
public ProcessState Result { private set; get; }
}
public class DmsProcessOptions
{
public DmsProcessOptions(IEnumerable<ItemMapActivityAction> actions)
{
Actions = actions ?? new List<ItemMapActivityAction>();
}
public IEnumerable<ItemMapActivityAction> Actions { private set; get; }
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/WorkUnits/ScanWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Windows.Forms.VisualStyles;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.DmsItems;
using Workshare.Integration.Processor.DmsItems.Visitors;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.TrueSyncDialog.WorkUnits
{
public class ScanWorkUnit : WorkUnit
{
private readonly List<IDMSItem> _items;
private readonly TrueSyncDialogViewModel _vm;
private readonly bool _loadingMore;
private readonly Thread _scanThread;
public ScanWorkUnit(IModuleView view, IEnumerable<IDMSItem> items, TrueSyncDialogViewModel vm, Thread scanThread,
bool loadingMore = false)
: base(view)
{
if (view == null) throw new ArgumentNullException("view");
if (items == null) throw new ArgumentNullException("items");
if (vm == null) throw new ArgumentNullException("vm");
_items = items.ToList();
_vm = vm;
_loadingMore = loadingMore;
Name = "Scanning";
TargetItems = _items.Select(a => new TargetItem(a.ID.ToString())
{
Name = a.Name
}).ToArray();
StatusDescription = "Scanning...";
_scanThread = scanThread = new Thread(new ThreadStart(Scan));
}
public override void OnAdded()
{
_vm.State = !_loadingMore
? TrueSyncDialogViewModel.StateEnum.Scanning
: TrueSyncDialogViewModel.StateEnum.LoadingMore;
base.OnAdded();
}
public override void Execute()
{
try
{
_scanThread.Start();
}
catch (Exception ex)
{
_view.ShowError(ex);
}
}
private void Scan()
{
Logger.WriteTrace(string.Format("ScanWorkUnit.Execute. loadingMode={0}; itemsPerPage={1}", _loadingMore, _vm.ItemsPerPage));
_vm.Module.Resolve<DiscardedService>().Refresh();
var processor = _vm.Module.Resolve<Processor>();
var existedActivities = _vm.AllActivities.Select(a => a.data).ToList();
SummaryProcessResult scanMaps;
var scanOptions = new ScanOptions(_vm.CancellationToken)
{
ItemsPerPage = _vm.ItemsPerPage
};
var itemToScan = _items.OfType<BaseDMSFolder>().First();
if (!_loadingMore)
{
var itemsDetector = new ItemsDetector(_vm.Module.Resolve<ActivityFinder>(), _vm);
scanMaps = processor.ScanFirstPage(itemToScan, scanOptions, itemsDetector);
}
else
{
scanMaps = processor.ScanNextPage(itemToScan, scanOptions, _vm.ScanResult);
}
var adapter = _vm.Module.Resolve<ActivityFinder>();
var activities = adapter.GetAllActivities(scanMaps, existedActivities.OfType<FileMapActivity>().ToList());
while (activities.OfType<ImportFileActivity>().Where(p => p.Changes.Any(x => x.Type == ChangeType.RemoteAdded))
.All(p => p.IsDiscarded) && scanMaps.Iterator != null && scanMaps.Iterator.HasMore)
{
scanMaps = processor.ScanNextPage(itemToScan, scanOptions, scanMaps);
activities = adapter.GetAllActivities(scanMaps, activities);
}
var errors = adapter.GetErrors(scanMaps);
_vm.OnCompleted(errors, activities, scanMaps);
}
}
class ItemsDetector : IItemsDetector
{
private readonly ActivityFinder _finder;
private readonly TrueSyncDialogViewModel _vm;
public ItemsDetector(ActivityFinder finder, TrueSyncDialogViewModel vm)
{
_finder = finder;
_vm = vm;
}
public int GetCurrentItemsCount(DMSVisitor visitor)
{
var v = (ScanVisitor) visitor;
var existedActivities = _vm.AllActivities.Select(a => a.data).ToList();
return _finder.GetAllActivities(v.Result, existedActivities.OfType<FileMapActivity>()).Count(a => a.InKindOfErrorState() || a.InKindOfProceeedState() || TrueSyncDialogViewModel.FilterActivities(a));
}
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/Layouts/WorkshareCloud/Scripts/WorkshareCore.js
////RESOURCES
var RES = {
STR_PROMPT_TO_CHANGE_LOCATION: "Some operations are in progress. Do you really want to leave page?",
STR_ALERT_TEXT_SENDING: "Sending...",
STR_ALERT_TEXT_SYNHRONIZATION: "Synchronization...",
STR_ALERT_TEXT_UPDATING_LIST: "Updating...",
STR_ALERT_TEXT_COMPLETED: "Completed",
STR_MENUITEM_CAPTION_UPDATELIST: "Update view to work with Workshare",
STR_MENUITEM_CAPTION_SEND_TO_WORKSHARE: "Send to Workshare",
STR_MENUITEM_CAPTION_SYNHRONIZE: "Sync from Workshare",
STR_MESSAGE_ERROR: "Something went wrong: ",
STR_SUCCESSFULLY_LOGINED: "Successfully logged in to Workshare",
STR_MESSAGE_NOT_ENOUGHT_PERMISSIONS: "You don't have permission to perform this action",
INT_MENUOPTION_UPDATE_VIEW: 0,
INT_MENUOPTION_SEND_TO_WS: 1,
INT_MENUOPTION_SYNC_FROM_WS: 2,
ERROR_WORKSHARE_FEILD_NOT_FOUND: "cannot find hidden input",
ERROR_NO_ITEM_SELECTED: "Please select the checkbox to the left of the file/folder and try again.",
ERROR_OTHER_OPERATION_IN_PROGRESS: "Other operation is being executed. Please wait...",
CANNOT_BE_RESENT: "Cannot be sent back to Workshare.\n",
CAN_BE_RESENT: "Can be sent back to Workshare.\n",
PARENT_FOLDER_DELETED: "Parent folder is deleted",
COOKIE_USER_CREDENTIALS: "user_credentials",
COOKIE_SESSION_ID: "_session_id",
COOKIE_AUTH_TOKEN: "tmp_auth_token",
CONFLICT_REPLACE: 1,
CONFLICT_KEEP_BOTH: 2,
CONFLICT_NONE: 0,
STR_CONFLICT_HEADER: "Replace with older file?",
STR_CONFLICT_DESCRIPTION_DESTINATION_NEWER: "The file being synced from Workshare appears to be an older file",
STR_CONFLICT_DESCRIPTION_SOURCE_NEWER: "The file source location appears to contain a newer instance of the file",
STR_CONFLICT_FILE: "File :",
STR_CONFLICT_DATE: "Date :",
STR_CONFLICT_MODIFIER: "Modified by :",
STR_CONFLICT_DLGTITLE: "Resolve conflict",
STR_CONFLICT_REPLACEBUTTON_TEXT: "Replace File",
STR_CONFLICT_KEEPBOTHBUTTON_TEXT: "Keep Both Files",
STR_CONFLICT_CANCELBUTTON_TEXT: "Cancel",
STR_CONFLICT_SHAREPOINT_TITLE: "SharePoint",
STR_CONFLICT_WORKSHARE_TITLE: "Workshare",
STR_ERROR_WORKSHARE_TITLE: "Workshare",
STR_CANNOT_CONNECT_TO_SP_SERVICE: "Cannot establish connection with the Workshare Service on SharePoint"
};
var WSContext =
{
//WS_SERVER: "https://dev.workshare.com/",
//WS_SERVER: "https://my.workshare.com/",
WS_SERVER: "https://qa.workshare.com/",
WS_DIALOG_RELATIVE: "home/dialogs.sdif",
LOGIN_DIALOG_RELATIVE: "login"
}
var ERRORCODE = {
ItemListErrors: 5,
CluudUnAuth: 100,
ItemNotFound: 406,
SyncConflict: 503
};
var HTTP_CODES =
{
SERVICE_UNAVAILABLE: 503
};
//item class
function SPItem(itemID, type) {
this.id = itemID;
this.FSObjType = type;
};
//Data for one-request sending
function SendItemsInfo(folderId, items, listGuid) {
this.items = items
this.folderId = folderId;
this.listGuid = listGuid;
};
function SyncItemInfo(item, conflictOption) {
this.item = item;
this.conflictOption = conflictOption;
}
//Data for one-request synchronization
function SyncItemsInfo(items, listGuid) {
this.items = items
this.listGuid = listGuid;
};
//SP CLOUD SERVICE
function SendItemToFolder(sendItemsInfos, success, error) {
ExecuteWSPCloudMehod("Send", { infos: sendItemsInfos },
function (return_data, textStatus) {
if (success !== undefined) {
success(return_data, textStatus);
}
},
function (xhr, textStatus, errorThrown) {
if (error !== undefined) {
error(xhr, textStatus, errorThrown);
}
else {
defaultErrorHandler(xhr, textStatus, errorThrown);
}
}, RES.STR_ALERT_TEXT_SENDING, RES.STR_ALERT_TEXT_COMPLETED);
};
function Login(email, pass, success, error) {
ExecuteWSPCloudMehod("Login", { email: email, password: <PASSWORD> },
function (return_data, textStatus) {
if (success !== undefined) {
success();
}
},
function (xhr, textStatus, errorThrown) {
if (error !== undefined) {
error(xhr, textStatus, errorThrown);
}
else {
defaultErrorHandler(xhr, textStatus, errorThrown);
}
}, RES.STR_ALERT_TEXT_SENDING, RES.STR_ALERT_TEXT_COMPLETED);
};
function Logout(success) {
ExecuteWSPCloudMehod("Logout", {},
function (return_data, textStatus) {
success();
},
defaultErrorHandler, RES.STR_ALERT_TEXT_SENDING, RES.STR_ALERT_TEXT_COMPLETED);
};
function SyncItem(syncItemsInfos, success, error) {
ExecuteWSPCloudMehod("Sync", { infos: syncItemsInfos },
function (return_data, textStatus) {
if (success !== undefined) {
success();
}
return;
},
function (xhr, textStatus, errorThrown) {
if (error !== undefined) {
error(xhr, textStatus, errorThrown);
}
else {
defaultErrorHandler(xhr, textStatus, errorThrown);
}
}, RES.STR_ALERT_TEXT_SYNHRONIZATION, RES.STR_ALERT_TEXT_COMPLETED);
};
function updateView(listGuid, success) {
ExecuteWSPCloudMehod("Update", { items: [], folderId: -1, listGuid: listGuid },
function (return_data, textStatus) {
if (success !== undefined) {
success(return_data, textStatus);
}
},
defaultErrorHandler, RES.STR_ALERT_TEXT_UPDATING_LIST, RES.STR_ALERT_TEXT_COMPLETED);
};
var OperationsContext = {
inProgress: 0,
operationName: "",
operationStarted: function (opName) {
this.operationName = opName;
this.inProgress++;
},
operationFinished: function () {
this.operationName = "";
this.inProgress--;
},
anyLiveOperation: function () {
return this.inProgress > 0;
}
};
function DialogsSettings(l_w, l_h, ch_w, ch_h, cr_w, cr_h) {
this.l_w = l_w;
this.l_h = l_h;
this.ch_w = ch_w;
this.ch_h = ch_h;
this.cr_w = cr_w;
this.cr_h = cr_h;
}
var defaultDialogSettings = new DialogsSettings(1026, 728, 596, 514, 596, 514);
var cachedSettings=null;
function GetDialogSettings(callback) {
if(cachedSettings)
{
return callback(cachedSettings);
}
else
{
ExecuteOrDelayUntilScriptLoaded(function () {
ExecuteOrDelayUntilScriptLoaded(function () {
$.ajax({
type: "POST",
url: getUrl("WorkshareCloudService.aspx") + "/" + "DialogSettings",
contentType: "application/json; charset=utf-8",
dataType: "json",
data: JSON.stringify({}),
success: function (res, textStatus) {
var dlgSettings = new DialogsSettings(res.d.dialog_login_width, res.d.dialog_login_height, res.d.dialog_select_folder_width, res.d.dialog_select_folder_height, res.d.dialog_create_folder_width, res.d.dialog_create_folder_height);
cachedSettings=dlgSettings;
callback(dlgSettings);
},
error: function (xhr, textStatus, errorThrown) {
callback(defaultDialogSettings);
}
});
}, "sp.js");
}, "core.js");
}
}
function ExecuteWSPCloudMehod(method, data, onSucces, onError, onStartText, onSuccessText) {
ExecuteOrDelayUntilScriptLoaded(function () {
ExecuteOrDelayUntilScriptLoaded(function () {
try {
OperationsContext.operationStarted(method);
var notificationId = SP.UI.Notify.addNotification(onStartText, true);
$.ajax({
type: "POST",
url: getUrl("WorkshareCloudService.aspx") + "/" + method,
contentType: "application/json; charset=utf-8",
dataType: "json",
data: JSON.stringify(data),
success: function (return_data, textStatus) {
OperationsContext.operationFinished();
SP.UI.Notify.removeNotification(notificationId);
SP.UI.Notify.addNotification(onSuccessText, false);
onSucces(return_data, textStatus);
return;
},
error: function (xhr, textStatus, errorThrown) {
OperationsContext.operationFinished();
SP.UI.Notify.removeNotification(notificationId);
if (xhr.readyState == 4) {
var errorInfo = getErrorInfo(xhr);
if (errorInfo != null) {
if (errorInfo.code == 403 || errorInfo.code == 404 || errorInfo.code == 5) {
//defaultErrorHandler(xhr, textStatus, errorThrown);
//not display error notification
}
else {
SP.UI.Notify.addNotification(errorInfo.description, false);
}
}
else {
SP.UI.Notify.addNotification(errorThrown, false);
}
if (onError) {
onError(xhr, textStatus, errorThrown);
}
else {
defaultErrorHandler(xhr, textStatus, errorThrown);
}
}
return;
}
});
}
catch (err) {
OperationsContext.operationFinished();
alert(err);
}
}, "sp.js");
}, "core.js");
};
function getCurrentItem() {
return new SPItem(currentItemID, 0);
};
/// DIALOGS
function closeDialog(result, data) {
ExecuteOrDelayUntilScriptLoaded(function () {
if (result) {
SP.UI.ModalDialog.commonModalDialogClose(SP.UI.DialogResult.OK, data);
}
else {
SP.UI.ModalDialog.commonModalDialogClose(SP.UI.DialogResult.cancel, data);
}
}, "sp.js");
};
function getDlgSizes(settings) {
if(settings)
{
return {
width: settings.ch_w,
height: settings.ch_h
}
}
else
{
return {
width: 594,
height: 512
};
}
};
function getFrameSize(dlgSize) {
return {
width: dlgSize.width,
height: dlgSize.height
};
};
function setSizes(dlg, iframe, settings) {
var dlgSize = getDlgSizes(settings);
var frameSize = getFrameSize(dlgSize);
if (dlg) {
dlg.css("height", dlgSize.height + "px").css("width", dlgSize.width + "px");
dlg.parent().css("height", dlgSize.height + "px").css("width", dlgSize.width + "px");
}
if (iframe) {
iframe.css("height", frameSize.height + "px").css("width", frameSize.width + "px");
}
}
function showDialog(src, callback, jq) {
GetDialogSettings(function (settings) {
function receiver(e) {
if (window.removeEventListener) {
window.removeEventListener("message", receiver, false);
}
else {
window.detachEvent("onmessage", receiver);
}
dialogClosed(GetDialogResult(e.data));
}
if (jq) {
if (window.addEventListener) {
window.addEventListener("message", receiver, false);
}
else {
window.attachEvent("onmessage", receiver);
}
var dlgSize = getDlgSizes(settings);
var dlg = $("<div></div>");
var frm = $("<iframe scrolling='no'></iframe>").attr("src", src).css("border", "none");
dlg.append(frm);
dlg.dialog({
buttons: {},
resizable: false,
height: dlgSize.height,
width: dlgSize.width,
position: "center",
modal: true,
open: function (event, ui) {
//hide close button.
$(this).css("overflow", "hidden");
$(this).parent().css("overflow", "hidden");
$(this).parent().addClass("selectfolder-dialog");
$(this).parent().find('.ui-dialog-titlebar-close:first').hide();
$(this).parent().find('.ui-dialog-titlebar').hide();
setSizes(dlg, frm, settings);
}
});
function dialogClosed(data) {
dlg.dialog("close");
dlg.remove();
if (callback) {
callback(data);
}
}
}
else {
//ie has problems with iframe to qa.workshare.com
//we can set <meta http-equiv="X-UA-Compatible" content="IE=9"/> on SharePoint master page, then dialog will work, but...
var res = window.showModalDialog(src, null, "dialogHeight:700px,dialogWidth:700px,center:yes,status:no");
// var res = window.showModalDialog(src, null, "dialogHeight:1000,dialogWidth:1000,center:yes,status:no");
if (callback) {
callback(GetDialogResult(res));
};
}
});
}
var isIE = (function () {
var div = document.createElement('div');
div.innerHTML = '<!--[if IE]><i></i><![endif]-->';
return (div.getElementsByTagName('i').length === 1);
}());
function GetDialogResult(data) {
try {
var res = $.parseJSON(data);
if (res) {
return res;
}
else {
return data;
}
}
catch (err) {
//alert("Cannot parse dialog result");
return data;
}
}
function showSelectFolderDialog(items, initialFolderId, callback) {
var params = "?caller=sharepoint&extcall=hello";
var src = WSContext.WS_SERVER + WSContext.WS_DIALOG_RELATIVE + params;
showDialog(src, callback, true);
};
function redirectToLogin() {
var params = "?claim_token_url=" + encodeURIComponent(window.location).replace("%2520", "%20") + "&reuse_session=1";
var src = WSContext.WS_SERVER + WSContext.LOGIN_DIALOG_RELATIVE + params;
window.location = src;
}
function showResolveDlgRec(index, conflictedItems, callback) {
if (index < 0) {
callback(conflictedItems);
}
else {
showResolveDlgInternal(conflictedItems[index], function (result) {
if (result) {
if (result.cancel) {
conflictedItems.splice(index, 1);
}
else if (result.action) {
conflictedItems[index].item.conflictOption = result.action;
}
else {
conflictedItems[index].item.conflictOption = RES.CONFLICT_NONE;
}
}
showResolveDlgRec(index - 1, conflictedItems, callback);
});
}
}
function onDialogOpened(dlg) {
dlg.parent().find('.ui-dialog-titlebar:first').prepend($("<img/>").addClass("ws-worksharelogo").attr("src", getUrl("/Images/Workshare.ico")));
}
function getConflictedVersHtml(title, info) {
return $("<table cellpadding=\"4px\" cellspacing=\"3px\"></table>")
.append($("<tr></tr>")
.append($("<td></td>")
.append($("<img/>").attr("title", name).attr("src", getUrl("FileIcon.ashx?size=40&filename=" + info.name))))
.append($("<td></td>")
.append($("<div></div>").addClass("SourceName")
.append($("<b></b>").html(title)))
.append($("<div></div>").addClass("FileDetail").append($("<span></span>")
.append($("<b></b>").html(RES.STR_CONFLICT_FILE))).append($("<span></span>").html(info.name).attr("title", info.name)))
.append($("<div></div>").addClass("FileDetail").append($("<span></span>")
.append($("<b></b>").html(RES.STR_CONFLICT_DATE))).append($("<span></span>").html(info.modified)))
.append($("<div></div>").addClass("FileDetail").append($("<span></span>")
.append($("<b></b>").html(RES.STR_CONFLICT_MODIFIER))).append($("<span></span>").html(info.modifier).attr("title", info.modifier)))));
}
function getContentHtml(conflictedItem) {
return $("<div></div>").addClass("ui_extend_file_conflict")
.append($('<div></div>').addClass("conflictdlg-headertext")
.append($("<h2></h2>").html(RES.STR_CONFLICT_HEADER)))
.append($('<div></div>').addClass("conflictdlg-descriptiontext")
.append($("<p></p>").html(RES.STR_CONFLICT_DESCRIPTION_DESTINATION_NEWER)))
.append($('<div></div>').addClass("conflictdlg-fileInfo").append(getConflictedVersHtml(RES.STR_CONFLICT_WORKSHARE_TITLE, conflictedItem.wsInfo)))
.append($('<div></div>').addClass("conflictdlg-arrow").append($("<img/>").attr("src", getUrl("Images/conflict-arrow.png"))))
.append($('<div></div>').addClass("conflictdlg-fileInfo").append(getConflictedVersHtml(RES.STR_CONFLICT_SHAREPOINT_TITLE, conflictedItem.spInfo)));
}
function showResolveDlgInternal(item, callback) {
var dlg = getContentHtml(item).attr("title", RES.STR_CONFLICT_WORKSHARE_TITLE);
var buttons = {};
buttons[RES.STR_CONFLICT_REPLACEBUTTON_TEXT] = function () { dlg.result = { action: RES.CONFLICT_REPLACE }; dlg.dialog("close"); };
buttons[RES.STR_CONFLICT_KEEPBOTHBUTTON_TEXT] = function () { dlg.result = { action: RES.CONFLICT_KEEP_BOTH }; dlg.dialog("close"); };
buttons[RES.STR_CONFLICT_CANCELBUTTON_TEXT] = function () { dlg.dialog("close"); };
dlg.dialog({
buttons: buttons, modal: true, width: 590, resizable: false,
close: function () {
if (dlg.result) {
callback(dlg.result);
}
else {
callback({ cancel: "true" });
}
},
open: function () {
$(this).parent().find(".ui-dialog-buttonset button:first-child").addClass("bluebutton");
onDialogOpened($(this));
}
});
}
function showResolveDlg(conflictedItems, callback) {
showResolveDlgRec(conflictedItems.length - 1, conflictedItems, callback);
}
function getErrorCode(xhr) {
try {
return getErrorInfo(xhr).code;
}
catch (err) {
return null;
}
};
function getErrorInfo(xhr) {
try {
var errorInfo = $.parseJSON(xhr.responseText);
if (errorInfo.d.code !== undefined && errorInfo.d.description !== undefined) {
return { code: errorInfo.d.code, description: errorInfo.d.description, data: errorInfo.d.data };
}
else {
return null;
}
}
catch (err) {
return null;
}
}
function defaultErrorHandler(xhr, textStatus, errorThrown) {
try {
var errorInfo = getErrorInfo(xhr);
if (errorInfo) {
switch (errorInfo.code) {
default: showError(errorInfo); break;
}
}
else {
if (xhr.status == HTTP_CODES.SERVICE_UNAVAILABLE) {
alert(RES.STR_CANNOT_CONNECT_TO_SP_SERVICE);
}
else {
alert(textStatus);
}
}
}
catch (err) {
alert('Status: ' + textStatus + '\nDesc: ' + errorThrown + '\nData: ' + xhr.responseText);
}
};
function showError(errorInfo) {
switch (errorInfo.code) {
case 1: alert("ErrorCode : " + errorInfo.code + "\nDescription : " + errorInfo.description); break;
default: alert(errorInfo.description);
}
};
function handleException(err) {
alert(RES.STR_MESSAGE_ERROR + err);
};
function getUrl(worksharelativeurl) {
return SP.Utilities.Utility.getLayoutsPageUrl("WorkshareCloud/" + worksharelativeurl);
};
function RunSelectedMethod(runParam) {
$.cookie("last_function", null, { path: '/' });
if (runParam) {
if (runParam.selectedFunctID == 0) {
showSelectFolderDialog(runParam.items, 0, Function.createDelegate({ items: runParam.items, listGuid: runParam.listGUID }, selectFolderCallback));
}
else {
SyncFromWorkshare(runParam.items, runParam.listGUID);
}
}
}
function EnsureUserLogin(runParam) {
var fst = $.cookie("tmp_auth_token");
var sid = $.cookie(RES.COOKIE_SESSION_ID);
var crd = $.cookie(RES.COOKIE_USER_CREDENTIALS);
$.cookie("tmp_auth_token", null, { path: '/' });
$.cookie(RES.COOKIE_SESSION_ID, null, { path: '/' });
$.cookie(RES.COOKIE_USER_CREDENTIALS, null, { path: '/' });
//if ((fst != null) && (fst.length > 0) || (sid != null) && (crd != null)) {
// return true;
//}
//else {
var funct = JSON.stringify(runParam);
$.cookie("last_function", funct, { path: '/' });
redirectToLogin();
//}
};
var QueryString = function () {
// This function is anonymous, is executed immediately and
// the return value is assigned to QueryString!
var query_string = {};
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split("=");
query_string[pair[0]] = pair[1];
}
return query_string;
}();
function checkSelectedMethod() {
var param = $.cookie("last_function");
if (param != null && $.cookie(RES.COOKIE_AUTH_TOKEN) != null) {
var last_function_param = $.parseJSON(param);
RunSelectedMethod(last_function_param);
}
};
function eraseCookie(name) {
var date = new Date();
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000));
var expires = "; expires=" + date.toGMTString();
document.cookie = escape(name) + "=" + escape("") + expires + "; path=/";
};
<file_sep>/WSComponents/src/WSCloudService.Tests/PlatformServiceTests.cs
using System;
using System.Net;
using NUnit.Framework;
using WorksharePlatform;
namespace WorkshareCloud.ServiceProxy.Tests
{
[TestFixture]
public class PlatformServiceTests
{
[TestFixtureSetUp]
public void Initialize()
{
ServicePointManager.DefaultConnectionLimit = int.MaxValue;
}
public void UploadFileToFolderWothDifferentPermisions(bool canAddnewVersion=true)
{
using (var user1 = new UserContext(TestUtils.USER1, TestUtils.USER1_PASS))
{
var newFolder = WS.CreateFolder(user1, new Permisions()
{
CanAddVersions = canAddnewVersion
}, new[] { TestUtils.USER2 });
try
{
using (var user2 = new UserContext(TestUtils.USER2, TestUtils.USER2_PASS))
{
var file = WS.GetTestFile();
file.FolderId = newFolder.Id;
PlatformService.UploadFile(user2, file);
Assert.IsNotNull(file.CurrentVersion);
Assert.IsTrue(file.CurrentVersion.FileId > 0);
}
}
finally
{
PlatformService.DeleteFolder(user1, newFolder.Id);
}
}
}
public void UploadFileTest(string filename)
{
using (var user1 = new UserContext(TestUtils.USER1, TestUtils.USER1_PASS))
{
var file = WS.GetTestFile();
try
{
file.FolderId = 0;
file.Name = filename;
PlatformService.UploadFile(user1, file);
Assert.IsNotNull(file.CurrentVersion);
Assert.IsTrue(file.CurrentVersion.FileId > 0);
}
finally
{
if (file.CurrentVersion != null)
{
//TODO - deletefile
}
}
}
}
[Test]
public void UploadFileToFolderWithDisabledAddNewVersion_Bug11217()
{
UploadFileToFolderWothDifferentPermisions(canAddnewVersion:false);
}
[Test]
public void UploadFileToFolderWithFullPermisions()
{
UploadFileToFolderWothDifferentPermisions(canAddnewVersion: true);
}
// [Test]
public void UploadFileWithLongName()//bug 11223
{
UploadFileTest(new String('a', 254) + ".docx");
}
[Test]
public void UploadNormalFile()
{
UploadFileTest("testdoc1.docx");
}
[Test]
public void TestDialogSetting()
{
PlatformService.Host = "dev.workshare.com";
var res = PlatformService.GetDialogSettings(new UserDetails());
Assert.IsTrue(res.dialog_select_folder_width > 0);
Assert.IsTrue(res.dialog_select_folder_height > 0);
Assert.IsTrue(res.dialog_login_width > 0);
Assert.IsTrue(res.dialog_login_height > 0);
Assert.IsTrue(res.dialog_create_folder_width > 0);
Assert.IsTrue(res.dialog_create_folder_height > 0);
}
}
}
<file_sep>/iManageIntegration/Src/Workshate.HookEvents/HookNewImportCmd.cs
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.Worksite.iManExt2;
using Workshare.Components.Interfaces;
namespace Workshare.HookEvents
{
[ClassInterface(ClassInterfaceType.None)]
[Guid("315C27E3-EEE9-46AE-B37B-37B0B24B19B2")]
[ComVisible(true)]
public class HookNewImportCmd : ICommand, Com.Interwoven.Worksite.iManExt2._ICommandEvents_Event
{
NewImportCmd cmd;
bool isCancel = true;
public HookNewImportCmd()
{
cmd = new NewImportCmd();
cmd.OnCancel += cmd_OnCancel;
cmd.OnInitDialog += cmd_OnInitDialog;
cmd.PostOnOK += cmd_PostOnOK;
cmd.PreOnOK += cmd_PreOnOK;
}
public int Accelerator
{
get
{
return cmd.Accelerator;
}
set
{
cmd.Accelerator = value;
}
}
public object Bitmap
{
get
{
return cmd.Bitmap;
}
set
{
cmd.Bitmap = value;
}
}
public ContextItems Context
{
get
{
return cmd.Context;
}
}
private void ProcessImportEvent()
{
var docs = ProcessIManageEvents.GetImportedDocuments(Context);
if (docs.Count != 0)
{
if (docs.Count == 1)
{
object doc = docs[0];
ProcessIManageEvents.LastImportedDocNumber = ((Com.Interwoven.WorkSite.iManage.IManDocument)doc).Number;
}
//Context.OfType<object>().ToList().ForEach(p => Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
ProcessIManageEvents.ProcessEvent(docs, EventOperation.AddNewDoc, "HookNewImportCmd");
}
else
{
Workshare.Components.WSLogger.Logger.Write("ImportedDocuments list is empty. No document to add", Components.WSLogger.Severity.Information);
}
}
public void Execute()
{
cmd.Execute();
if (!isCancel)
{
try
{
ProcessImportEvent();
}
catch (Exception ex)
{
Workshare.Components.WSLogger.Logger.WriteError(ex);
}
}
else
{
try
{
//when user drag&drop several files the DeskSite/FileSite with new updates shows NewProfileDlg only once (for the first file)
object duplicateProfileFromDoc = ProcessIManageEvents.GetCmdContextItemValueByName(cmd.Context, "IManExt.Import.DuplicateProfileFromDoc");
object noCmdUI = ProcessIManageEvents.GetCmdContextItemValueByName(cmd.Context, "IManExt.OpenCmd.NoCmdUI");
if (duplicateProfileFromDoc != null && noCmdUI != null && Convert.ToBoolean(noCmdUI) == true)
{
ProcessImportEvent();
}
}
catch (Exception ex)
{
Workshare.Components.WSLogger.Logger.WriteError(ex);
}
}
}
public string HelpFile
{
get
{
return cmd.HelpFile;
}
set
{
cmd.HelpFile = value;
}
}
public int HelpID
{
get
{
return cmd.HelpID;
}
set
{
cmd.HelpID = value;
}
}
public string HelpText
{
get
{
return cmd.HelpText;
}
set
{
cmd.HelpText = value;
}
}
ContextItems ctx;
public void Initialize(ContextItems Context)
{
cmd.Initialize(Context);
}
public string MenuText
{
get
{
return cmd.MenuText;
}
set
{
cmd.MenuText = value;
}
}
public string Name
{
get
{
return cmd.Name;
}
set
{
cmd.Name = value;
}
}
public int Options
{
get
{
return cmd.Options;
}
set
{
cmd.Options = value;
}
}
public int Status
{
get
{
return cmd.Status;
}
set
{
cmd.Status = value;
}
}
public Commands SubCommands
{
get
{
return cmd.SubCommands;
}
set
{
cmd.SubCommands = value;
}
}
public string Title
{
get
{
return cmd.Title;
}
set
{
cmd.Title = value;
}
}
public CommandType Type
{
get
{
return cmd.Type;
}
set
{
cmd.Type = value;
}
}
public void Update()
{
cmd.Update();
}
void cmd_PreOnOK(object pMyInterface)
{
isCancel = false;
if (PreOnOK != null)
{
PreOnOK(pMyInterface);
}
}
void cmd_PostOnOK(object pMyInterface)
{
if (PostOnOK != null)
{
PostOnOK(pMyInterface);
}
}
void cmd_OnInitDialog(object pMyInterface)
{
if (OnInitDialog != null)
{
OnInitDialog(pMyInterface);
}
}
void cmd_OnCancel(object pMyInterface)
{
isCancel = true;
if (OnCancel != null)
{
OnCancel(pMyInterface);
}
}
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnCancelEventHandler OnCancel;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_OnInitDialogEventHandler OnInitDialog;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PostOnOKEventHandler PostOnOK;
public event Com.Interwoven.Worksite.iManExt2._ICommandEvents_PreOnOKEventHandler PreOnOK;
}
}
<file_sep>/WSComponents/src/WSIntegration/WSIntegration.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using Workshare.Components.Helpers;
using Workshare.Components.WSLogger;
using Workshare.Integration.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Extensions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
using RES = Workshare.Integration.Properties.Resources;
namespace Workshare.Integration
{
public enum Operations
{
Send,
Sync,
Deleted
};
public class WSIntegrationBase : IWSIntegration
{
private readonly IAuthProvider _authProvider;
private readonly SyncInfoService _syncService;
protected WSIntegrationBase(IAuthProvider authProvider, SyncInfoService syncService)
{
ServicePointManager.DefaultConnectionLimit = int.MaxValue;//TODO remove this line. fix problem with multiple files uploading
this._syncService = syncService;
_authProvider = authProvider;
}
public void SendItems(IEnumerable<IDMSItem> localItems, FolderDetails folder, OperationContext args)
{
SendItems(localItems.ToList(), folder.Id, args);
}
private void SendItems(List<IDMSItem> localItems, int folderId, OperationContext context)
{
using (var errorlist = new ItemsErrorsCollection(context))
{
try
{
var user = _authProvider.CheckIfAuth();
SafeFuncCall(errorlist, null,
new ExceptionListSignal {IsAll = true, IsList = true, FolderError = true}, Operations.Send,
() =>
{
if (CheckFolderExistsWhenSendOrThrow(user, folderId) && localItems.Any())
{
foreach (IDMSItem item in localItems)
{
if (item is IDMSFolder)
{
SafeFuncCall(errorlist, item,
new ExceptionListSignal() {IsAll = true, IsList = true, FolderError = true},
Operations.Send, () =>
{
SendFolder((IDMSFolder) item, folderId, user, context, null);
});
}
else if (item is IDMSFile)
{
SafeFuncCall(errorlist, item,
new ExceptionListSignal() {IsAll = true, IsList = true, FileError = true},
Operations.Send, () =>
{
SendFile((IDMSFile) item, folderId, user, context);
});
}
}
errorlist.Check();
}
});
}
catch (CloudFolderNotFound ex)
{
SafeFuncCall(errorlist, null, new ExceptionListSignal() {IsAll = true, IsList = true},
Operations.Send, () =>
{
if (localItems.Any())
{
var user = _authProvider.CheckIfAuth();
foreach (IDMSItem item in localItems)
{
SafeFuncCall(errorlist, item,
new ExceptionListSignal() {IsAll = true, IsList = true}, Operations.Send, () =>
{
if (item is IDMSFolder)
{
//TODO remove SyncFolder((IDMSFolder)item, user, new SyncOptions(ConflictVersionOptions.None),context, true);
}
else if (item is IDMSFile)
{
_syncService.BreakLink((IDMSFile) item);
//((IDMSFile)item).UpdateSyncInfo(null, true, null,null);
}
});
}
}
});
errorlist.Clear();
errorlist.Add((IDMSItem) null, ex);
errorlist.Check();
}
}
}
protected bool CheckFolderExistsWhenSendOrThrow(UserDetails user, int itemID, int parentFolderID = -1)
{
try
{
if (!PlatformService.IsFolderExists(user, itemID, parentFolderID))
{
throw new CloudFolderNotFound(RES.STR_UNABLE_SEND);
}
return true;
}
catch (WebException ex)
{
if (ex.IsStatusCode((HttpStatusCode) 403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SEND, RES.STR_UNABLESENDITEM_TEXT, ex);
}
else if (ex.Status == WebExceptionStatus.ConnectFailure)
{
throw new OfflineException(ex);
}
else
{
throw;
}
}
}
private int UploadFileToWorkshareCloud(IDMSFile file, int folderId, UserDetails user, OperationContext context)
{
var fileDetailes = new FileDetails
{
Name = WsUtils.RemoveInvalidSymbolsFromFileName(file.Name),
FriendlyName =
WsUtils.RemoveInvalidSymbolsFromFileName(((string.IsNullOrEmpty(file.DisplayName))
? FileUtils.GetFileNameWithoutExtension(file.Name)
: file.DisplayName)),
FilePath = file.GetFilePath(),
FolderId = folderId
};
try
{
CheckFolderExistsWhenSendOrThrow(user, folderId);
PlatformService.UploadFile3(user, fileDetailes);
CheckFolderExistsWhenSendOrThrow(user, folderId);
_syncService.UpdateSyncInfo(file.GetVersions().OrderByDescending(a => a.Number).FirstOrDefault(),
new ActionContext(ActionType.Upload)
{
WsVersion = new WsVersion(fileDetailes.CurrentVersion, user)
});
_syncService.UpdateSyncInfo(file, new ActionContext(ActionType.Upload)
{
WsFile = new WsFile(fileDetailes, user)
});
return fileDetailes.CurrentVersion.FileId;
}
catch (WebException ex)
{
if (ex.IsStatusCode((HttpStatusCode) 422))
{
throw new CannotProcessItemException();
}
else if (ex.IsStatusCode((HttpStatusCode) 403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SEND, RES.STR_UNABLESENDFILE_TEXT, ex);
}
else if (ex.Status == WebExceptionStatus.ConnectFailure || ex.Status == WebExceptionStatus.NameResolutionFailure || ex.Status == WebExceptionStatus.ConnectionClosed)
{
throw new OfflineException(ex);
}
else
{
Logger.Write(ex, Severity.Error);
throw;
}
}
}
private int SendFile(IDMSFile localfile, int folderId, UserDetails user, OperationContext context)
{
localfile.CheckPermissionkOrThrow(Enums.Permissions.EditItem);
localfile.OnBeforeSending(context);
if (context.CheckOutOption == CheckOutOptions.CheckOut)
{
localfile.CheckNotCheckeOutToOtherUserOrThrow(Operations.Send);
}
try
{
var fileId = UploadFileToWorkshareCloud(localfile, folderId, user, context);
if (fileId > 0)
{
localfile.AddHistory(string.Empty,
string.Format(RES.STR_HISTORY_ITEMSEND, DateTime.Now.ToString(Activity.TimeFormat)),
Operations.Send);
}
localfile.OnAfterSending(context);
return fileId;
}
catch (Exception ex)
{
if (!(ex is OfflineException))
localfile.OnSendError(context, ex);
throw;
}
}
private int SendFolder(IDMSFolder folder, int parentFolderId, UserDetails user, OperationContext context,
List<int> parentChildren)
{
int newFolderID = -1;
using (var errorlist = new ItemsErrorsCollection(context))
{
var sentItems = new List<int>();
SafeFuncCall(errorlist, folder, new ExceptionListSignal {IsWeb = true, FolderError = true},
Operations.Send, () =>
{
folder.OnBeforeSending(context);
FolderDetails newFolder;
try
{
newFolder = PlatformService.CreateFolder(user, folder.Name, folder.Name, parentFolderId);
}
catch (WebException ex)
{
if (ex.IsStatusCode((HttpStatusCode) 403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SEND, RES.STR_UNANBESENDFOLDER_TEXT, ex);
}
throw;
}
_syncService.UpdateSyncInfo(folder, new ActionContext(ActionType.Upload)
{
WsFolder = new WsFolder(newFolder, user)
});
//folder.UpdateSyncInfo(newFolder, true);
folder.AddHistory(string.Empty,
string.Format(RES.STR_HISTORY_ITEMSEND, DateTime.Now.ToString(Activity.TimeFormat)),
Operations.Send);
if (parentChildren != null)
{
parentChildren.Add(newFolder.Id);
}
foreach (IDMSFolder item in folder.SubFolders)
{
SafeFuncCall(errorlist, item,
new ExceptionListSignal {IsAll = true, IsList = true, FolderError = true},
Operations.Send, () =>
{
var si = _syncService.GetSyncInfo(item);
if (si == null)
{
SendFolder(item, newFolder.Id, user, context, sentItems);
}
else
{
sentItems.Add(si.ItemId);
throw new FolderAlreadySentException();
}
});
}
//Task 22006. GroupBy used to select only latest version when worksite shows all versions in Document view.
foreach (IDMSFile file in folder.Files.GroupBy(x => x.DMSId).Select(x => x.First().GetLatest()))
{
SafeFuncCall(errorlist, file,
new ExceptionListSignal()
{
IsAll = true,
IsList = true,
IsWeb = true,
FolderError = true
}, Operations.Send, () =>
{
var si = _syncService.GetSyncInfo(file);
if (si == null)
{
sentItems.Add(SendFile(file, newFolder.Id, user, context));
}
else
{
sentItems.Add(si.ItemId);
throw new FileAlreadySentException();
}
});
}
_syncService.UpdateSendDataInfo(folder, sentItems);
newFolderID = newFolder.Id;
});
}
return newFolderID;
}
private static void ReThrowException(Exception ex, Operations operate)
{
if (ex is COMException)
{
Logger.WriteError("COMException in ReThrowException", ex);
if (ex.IsConnectionError())
throw new OfflineException();
else
{
Logger.WriteError("ReThrowException catch COMException :", ex);
throw new DMSItemNotFound((operate == Operations.Send) ? RES.STR_UNABLESEND_CAPTION : RES.STR_UNABLESYNC_CAPTION, RES.STR_DMSFOLDER_REMOVED);
}
}
if (ex is WebException)
{
if(ex.IsUnauthorized())
{
throw new CloudUnAuthorized(ex);
}
else
{
throw new OfflineException(ex);
}
}
else if (ex is UnauthorizedAccessException)
{
throw new CloudUnAuthorized(ex); // DMSUnAuthorizedException(ex); imanage
}
Logger.WriteError("ReThrowException catch exception : ", ex);
throw ex;
}
protected class ExceptionListSignal
{
public bool IsList = false;
public bool IsWeb = false;
public bool IsAll = false;
public bool FolderError = false;
public bool FileError = false;
}
protected void SafeFuncCall(ItemsErrorsCollection errorlist, IDMSItem item, ExceptionListSignal flags,
Operations operate, Action func)
{
try
{
try
{
func();
}
catch (Exception ex)
{
ReThrowException(ex, operate);
}
}
catch (Exception ex)
{
Trace.TraceError(ForTrace(ex));
if (flags.IsList && ex is MultiItemsException)
{
if (errorlist != null)
{
if (!errorlist.Equals(((MultiItemsException) ex).errorList))
{
errorlist.AddRange(((MultiItemsException) ex).errorList);
}
}
return;
}
if (flags.FolderError && ex is CloudFolderNotFound)
{
throw;
}
if (ex is DMSUnAuthorizedException)
{
throw;
}
if (ex is CloudUnAuthorized)
{
throw;
}
if (flags.IsWeb && ex is WebException)
{
if (flags.FileError && ((WebException) ex).IsStatusCode((HttpStatusCode) 403))
{
throw new CloudFolderAccessDenied(RES.STR_UNABLE_SYNC, RES.STR_UNABLESYNCFILE_TEXT, ex);
}
if (errorlist != null && item != null)
{
errorlist.Add(item, ex);
return;
}
else
{
throw;
}
}
if (flags.IsAll)
{
if (errorlist != null && item != null)
{
errorlist.Add(item, ex);
}
else
{
throw;
}
}
else
{
throw;
}
}
}
private string ForTrace(Exception ex)
{
return string.Format("{0}", ex);
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/OwnWindow.cs
using System;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Input;
using System.Windows.Interop;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Threading;
using Workshare.Components.Interfaces;
using Workshare.Components.WSLogger;
namespace Workshare.Components.Views.Common
{
public class OwnWindow:Window
{
public OwnWindow()
{
DataContextChanged += OwnWindow_DataContextChanged;
this.Loaded += OwnWindow_Loaded;
this.Closing += OwnWindow_Closing;
var ibd = new IconBitmapDecoder(new Uri(@"pack://application:,,/WSComponents;component/Resources/Workshare_48x48.ico", UriKind.RelativeOrAbsolute),
BitmapCreateOptions.None, BitmapCacheOption.Default);
this.Icon = ibd.Frames[0];
}
[DllImport("user32.dll")]
static extern bool EnableWindow(IntPtr hWnd, bool bEnable);
bool _ownerDisabled;
void OwnWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
if (_ownerDisabled)
{
EnableWindow(new WindowInteropHelper(this).Owner, true);
}
}
void OwnWindow_Loaded(object sender, RoutedEventArgs e)
{
if (DisableOwner)
{
_ownerDisabled = !EnableWindow(new WindowInteropHelper(this).Owner, false);
}
}
void OwnWindow_DataContextChanged(object sender, DependencyPropertyChangedEventArgs e)
{
var old = e.OldValue;
if (old is ICloseRequester)
{
((ICloseRequester)old).CloseRequested -= OwnWindow_CloseRequested;
}
var @new = e.NewValue;
if (@new is ICloseRequester)
{
((ICloseRequester)@new).CloseRequested += OwnWindow_CloseRequested;
}
}
public bool? OwnDialogResult { set; get; }
void OwnWindow_CloseRequested(bool? obj)
{
try
{
this.OwnDialogResult = obj;
if (!IsLoaded)
{
return;
}
if (ComponentDispatcher.IsThreadModal)
{
this.DialogResult = obj;
}
this.Close();
}
catch (Exception ex)
{
Logger.WriteError(ex);
}
}
protected bool KeepPositionOnResizing;
protected override void OnRenderSizeChanged(SizeChangedInfo sizeInfo)
{
base.OnRenderSizeChanged(sizeInfo);
if (KeepPositionOnResizing)
{
if (sizeInfo.HeightChanged)
{
var newTop=Top-(sizeInfo.NewSize.Height - sizeInfo.PreviousSize.Height)/2;
Top = newTop > 0 ? newTop : 0;
}
}
}
protected void OnTitleMouseDown(object sender, MouseButtonEventArgs e)
{
if (e.ChangedButton == MouseButton.Left)
{
this.DragMove();
}
}
public bool DisableOwner { get; set; }
}
public class OwnViewModel : OwnViewModel<OwnViewModel>
{
}
public class OwnViewModel<T> : BasePropertyChanged<T>, ICloseRequester
{
public event Action<bool?> CloseRequested;
public Dispatcher Dispatcher { private set; get; }
public OwnViewModel()
{
this.Dispatcher = Dispatcher.CurrentDispatcher;
}
internal void RaiseClose(bool? result)
{
if (CloseRequested != null)
{
CloseRequested(result);
}
}
ImageSource _footerImage;
public ImageSource FooterImage
{
get
{
if (_footerImage == null)
{
_footerImage = Utils.Convert(Properties.Resources.bottombar, ImageFormat.Png);
}
return _footerImage;
}
}
ImageSource _titleImage;
public ImageSource TitleImage
{
get
{
if (_titleImage == null)
{
_titleImage = Utils.Convert(Properties.Resources.toptitlebar, ImageFormat.Png);
}
return _titleImage;
}
}
ImageSource _closeImage;
public ImageSource CloseImage
{
get
{
if (_closeImage == null)
{
_closeImage = Utils.Convert(Properties.Resources.closebutton, ImageFormat.Png);
}
return _closeImage;
}
}
ImageSource _brandImage;
public ImageSource BrandImage
{
get
{
if (_brandImage == null)
{
_brandImage = Utils.ToImageSource(Properties.Resources.Workshare_48x48);
}
return _brandImage;
}
}
ImageSource _closeImageHover;
public ImageSource CloseImageHover
{
get
{
if (_closeImageHover == null)
{
_closeImageHover = Utils.Convert(Properties.Resources.closebutton_mouseover, ImageFormat.Png);
}
return _closeImageHover;
}
}
}
}
<file_sep>/WSComponents/src/WSCloudService/WSServer.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WorksharePlatform
{
public class WSServer
{
public WSServer()
{ }
public string ServerUrl { get; set; }
public string QuesryString { get; set; }
public string ctype { get; set; }
public string token { get; set; }
}
}
<file_sep>/WSComponents/src/Framework4Adapter/IAsposePdfAdapter.cs
using System.Collections.Generic;
using System.Runtime.InteropServices;
using WorksharePlatform;
namespace Framework4Adapter
{
[ComVisible(true)]
[Guid("CB5612CE-5D3C-4DEA-8E15-261F09F82649")]
public interface IAsposePdfAdapter
{
byte[] AddCommentInData(byte[] data, List<FileComment> comments);
}
}
<file_sep>/WSComponents/src/WSCloudService/DialogSettings.cs
namespace WorksharePlatform
{
public class DialogSettings
{
public int dialog_login_width { get; set; }
public int dialog_login_height { get; set; }
public int dialog_select_folder_width { get; set; }
public int dialog_select_folder_height { get; set; }
public int dialog_create_folder_width { get; set; }
public int dialog_create_folder_height { get; set; }
}
}
<file_sep>/SharePoint/src/WorkshareCloud.Common/Receivers/WorkshareReceiver.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
using WorksharePlatform;
namespace WorkshareCloud.Common.Receivers
{
public class WorkshareReceiver : SPItemEventReceiver
{
public override void ItemAdding(SPItemEventProperties properties)
{
EventFiringEnabled = false;
try
{
properties.AfterProperties[CloudPathFieldValue.CloudField] = string.Empty;
}
catch (Exception ex)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.Unexpected, Logging.Category.Default);
}
finally
{
EventFiringEnabled = true;
}
//base.ItemAdding(properties);
}
public override void ItemUncheckedOut(SPItemEventProperties properties)
{
EventFiringEnabled = false;
try
{
WorkshareIntegration.Instance.ClearFolderSyncDataIfNeed(properties.ListItem.File.ParentFolder.Item);
}
catch (Exception ex)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.Unexpected, Logging.Category.Default);
}
finally
{
EventFiringEnabled = true;
}
}
public override void ItemCheckedIn(SPItemEventProperties properties)
{
EventFiringEnabled = false;
try
{
WorkshareIntegration.Instance.ClearFolderSyncDataIfNeed(properties.ListItem.File.ParentFolder.Item);
}
catch (Exception ex)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.Unexpected, Logging.Category.Default);
}
finally
{
EventFiringEnabled = true;
}
}
public override void ItemCheckingIn(SPItemEventProperties properties)
{
EventFiringEnabled = false;
try
{
properties.AfterProperties.ChangedProperties.ClearCloudValue();
}
catch (Exception ex)
{
Logging.Logger.WriteTrace(ex, Microsoft.SharePoint.Administration.TraceSeverity.Unexpected, Logging.Category.Default);
}
finally
{
EventFiringEnabled = true;
}
}
}
}
<file_sep>/WSComponents/src/WSCloudService/FileDetails.cs
using System;
using System.Collections.Generic;
using System.IO;
using Workshare.Components.Helpers;
namespace WorksharePlatform
{
public class FileDetails
{
public const long ChunkSize = 5242880; //5MB
public int Id { get; set; }
public string Name { get; set; }
public string FriendlyName { get; set; }
public string FriendlyNameWithExtension
{
get
{
return FileUtils.ChangeExtension(FriendlyName, FileUtils.GetExtension(Name));
}
}
public int FolderId { get; set; }
public static long GetFileSizeByFilePath(string filePath)
{
FileInfo fi = new FileInfo(filePath);
return fi.Length;
}
public string FilePath { get; set; }
public string RemoteUrl { get; set; }
public string DownloadPassword { get; set; }
public bool IsChunkingRequired
{
get
{
if (CurrentVersion != null && CurrentVersion.Size > 0)
{
//this case is used during downloading the file
return (CurrentVersion.Size > FileDetails.ChunkSize);
}
else
{
//this case is used during uploading the file
return (GetFileSizeByFilePath(FilePath) > FileDetails.ChunkSize);
}
}
}
public bool IsChunkingRequiredForUpload
{
get
{
return (GetFileSizeByFilePath(FilePath) > FileDetails.ChunkSize);
}
}
public bool IsDeleted { get; set; }
public FileVersionDetails CurrentVersion { get; set; }
public MultiPartDetails CurrentPart { get; set; }
public UserDetails Creator { get; set; }
public UserDetails Updater { get; set; }
public List<FileVersionDetails> Versions { get; set; }
public override string ToString()
{
return string.Format("WsFile ID={0}, Name={1}", Id, Name);
}
public DateTime UpdateDate { get; set; }
public DateTime CreatedAt { get; set; }
public int VersionNumber { get; set; }
public void DeleteFile()
{
if (!string.IsNullOrEmpty(FilePath))
File.Delete(FilePath);
}
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Contrete/ManItemID.cs
using System;
using System.Collections.Generic;
using System.Text;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Integration.Interfaces;
namespace Workshare.IManage.Contrete
{
class ManItemID : IDMSItemID
{
string obj_ID;
public ManItemID(IManObject doc)
{
if (doc.HasObjectID)
{
obj_ID = doc.ObjectID;
}
else
{
throw new ArgumentException("doc");
}
}
public bool EqualTo(IDMSItemID obj)
{
if (obj is ManItemID)
{
return ((ManItemID)obj).obj_ID == this.obj_ID;
}
return false;
}
public override string ToString()
{
return obj_ID;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/RelayCommand.cs
using System;
using System.Windows.Input;
namespace Workshare.Components.Views.Common
{
public class RelayCommand:ICommand
{
Action<object> _execute;
Predicate<object> _canExecute;
public RelayCommand(Action<object> execute)
{
_execute = execute;
}
public RelayCommand(Predicate<object> canExecute, Action<object> execute)
{
_execute = execute;
_canExecute = canExecute;
}
public bool CanExecute(object parameter)
{
if (_canExecute != null)
{
return _canExecute(parameter);;
}
else
{
return true;
}
}
public event EventHandler CanExecuteChanged;
public void Execute(object parameter)
{
if (_execute != null)
{
_execute(parameter);
}
}
public void RaiseCanExecutechanged()
{
if (CanExecuteChanged != null)
{
CanExecuteChanged(this, EventArgs.Empty);
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Common/SyncItemInformation.cs
using Workshare.Integration.Enums;
using Workshare.Integration.Interfaces;
using WorksharePlatform;
namespace Workshare.Integration.Common
{
public class SyncItemInformation
{
public IDMSItem item;
public FileDetails wsItem;
public ConflictOptions syncOption;
public ConflictVersionOptions verOptions;
public bool AddOnlyNewFiles;//if sync _folder then only new files should be added
public SyncItemInformation(IDMSItem _item, ConflictOptions _option, ConflictVersionOptions _verOptions)
{
item = _item;
syncOption = _option;
verOptions = _verOptions;
wsItem = null;
}
public SyncItemInformation(FileDetails _wsitem, IDMSItem _item, ConflictOptions _option, ConflictVersionOptions _verOptions)
{
wsItem = _wsitem;
syncOption = _option;
verOptions = _verOptions;
item = _item;
}
public SyncItemInformation CopyFor(IDMSItem item)
{
return new SyncItemInformation(item, syncOption, verOptions);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/VersionMap.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Maps
{
public class VersionMap : ItemMap
{
public IDmsVersion LocalVersion { set; get; }
public WsVersion WsVersion { set; get; }
public override void Apply(ItemMapVisitor visitor)
{
visitor.Visit(this);
}
public override string GetId()
{
return GetMapId(this);
}
public static string GetMapId(VersionMap map)
{
if (map.LocalVersion == null)
{
return map.WsVersion == null ? string.Format("localversion:{0};wsversion:{1}", -1, -1) : string.Format("localversion:{0};wsversion:{1}", -1, map.WsVersion.Id);
}
if (map.WsVersion == null)
{
return string.Format("localversion:{0};wsversion:{1}", map.LocalVersion.Id, -1);
}
return string.Format("localversion:{0};wsversion:{1}", map.LocalVersion.Id, map.WsVersion.Id);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Common/OperationContext.cs
using Workshare.Integration.Enums;
namespace Workshare.Integration.Common
{
public class OperationContext
{
public CheckOutOptions CheckOutOption { get; private set; }
private OperationContext()
{
}
public OperationContext(CheckOutOptions option)
: this()
{
CheckOutOption = option;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Helpers/ProcessUtils.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using Workshare.Components.WSLogger;
namespace Workshare.Components.Helpers
{
public static class ProcessUtils
{
public static void StartProcess(string str)
{
if (!string.IsNullOrEmpty(str))
{
try
{
Process.Start(str);
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Error);
Debug.Assert(false, "Cannot navigate");
}
}
}
}
}
<file_sep>/SharePoint/src/WorksharePointCloud/CloudPathField.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SharePoint;
namespace WorkshareCloud
{
public class CloudPathField : SPFieldMultiLineText
{
public CloudPathField(SPFieldCollection fields, string fName)
: base(fields, fName) { }
public CloudPathField(SPFieldCollection fields,
string tName, string dName)
: base(fields, tName, dName) { }
public override string GetFieldValueAsHtml(object value)
{
var val = value as string;
var onWorkshare = !string.IsNullOrEmpty(val);
return string.Format("<input type=\"hidden\" value=\"{0}\" name=\"onWorkshare\" itemId=\"{1}\"/>", onWorkshare ? "true" : "false", "");
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Activities/FileMapActivity.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Workshare.Components.Helpers;
using System.Security;
using Microsoft.Win32;
using Workshare.Components.WSLogger;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Strategies;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Changes.Activities
{
public abstract class FileMapActivity : ItemMapActivity
{
public static void Update(FileMapActivity activity, FileMap map)
{
}
protected bool AnyAvailableActions()
{
return Actions != null && Actions.Any() && Actions.All(p => p.Actions.Any());
}
public void Refresh(FileMap map, DmsWorkerBase dmsWorker) //TODO review using of DMS Worker
{
base.Refresh(map);
WsFile = map.WsFile;
DmsFile = map.LocalFile;
var wsFilename = (WsFile != null) ? WsFile.FriendlyName + FileUtils.GetExtension(WsFile.Name) : null;
var localFilename = (DmsFile != null) ? DmsFile.Name : null;
Filename = localFilename ?? wsFilename;
ViewOnlineUrl = GetUrlOnWorkshare(map);
WsId = (WsFile == null) ? "-1" : WsFile.Id.ToString();
DmsId = (DmsFile == null) ? "-1" : DmsFile.ID.ToString();
DmsFileId = (DmsFile == null) ? "-1" : DmsFile.DMSId.ToString();
WsLastUpdateDate = (WsFile != null) ? WsFile.UpdateDate : DateTime.MinValue;
LocalEditTime = (DmsFile != null) ? dmsWorker.GetEditTime(DmsFile) : DateTime.MinValue;
}
public string Filename { set; get; }
public string ViewOnlineUrl { set; get; }
public string WsId { set; get; }
public string DmsId { set; get; }
/// <summary>
/// Dms file ID without the version information
/// </summary>
public string DmsFileId { get; set; }
public WsFile WsFile { set; get; }
public IDMSFile DmsFile { set; get; }
public bool IsLinked { set; get; } //TODO: Think about better solution here.
public string CheckedOutTo { get; set; }
public string CheckOutMachine { get; set; }
public DateTime WsLastUpdateDate { get; set; }
public DateTime LocalEditTime { get; set; }
/// <summary>
/// Determine if default browser is Internet Explorer 8
/// </summary>
/// <param name="uriScheme">Uri scheme (http, https etc.)</param>
/// <returns>true - default browser is Internet Explorer 8, otherwise - false</returns>
static bool DefaultBrowserIsIE8(string uriScheme)
{
bool ret = false;
try
{
using (RegistryKey defaultBrowserRegKey = Registry.CurrentUser.OpenSubKey(String.Format(@"Software\Microsoft\Windows\Shell\Associations\UrlAssociations\{0}\UserChoice", uriScheme)))
{
bool defaultBrowserIsIE = false;
if (defaultBrowserRegKey != null)
{
if (Convert.ToString(defaultBrowserRegKey.GetValue("Progid")).ToUpper().Equals(String.Format("IE.{0}", uriScheme.ToUpper()), StringComparison.Ordinal))
defaultBrowserIsIE = true;
}
else
{
using (RegistryKey defaultBrowserPathRegKey = Registry.ClassesRoot.OpenSubKey(String.Format(@"{0}\shell\open\command", uriScheme)))
{
if (defaultBrowserPathRegKey != null)
{
if (Convert.ToString(defaultBrowserPathRegKey.GetValue(null)).ToLower().Contains("iexplore.exe"))
defaultBrowserIsIE = true;
}
}
}
if (defaultBrowserIsIE)
{
using (RegistryKey ieRegKey = Registry.LocalMachine.OpenSubKey(@"Software\Microsoft\Internet Explorer"))
{
if (ieRegKey != null)
{
if (Convert.ToString(ieRegKey.GetValue("Version")).StartsWith("8.", StringComparison.Ordinal))
ret = true;
}
}
}
}
}
catch (Exception exc)
{
Logger.Write(exc, Severity.Warning);
}
return ret;
}
static string GetUrlOnWorkshare(FileMap filemap)
{
string wsUrl = string.Empty;
try
{
if (filemap.WsFile != null)
{
if (!string.IsNullOrEmpty(filemap.WsFile.RemoteUrl))
{
var uri = new Uri(filemap.WsFile.RemoteUrl);
//If IE is used as default browser and we use Process.Start(url) with url like http(s)://.../d/...
//it sends HTTP-request with Accept header */* and service sends HTTP-response with 401 status code
//in case of IE8 we should insert # before d/ to avoid this problem
//Note: We use this 'fix' because we don't have access to service source code
string urlPartD = (DefaultBrowserIsIE8(uri.Scheme) ? @"/#d/" : @"/d/");
wsUrl = string.Format("{0}://{1}{2}{3}", uri.Scheme, uri.Host, urlPartD, filemap.WsFile.DownloadPassword);
}
}
return wsUrl;
}
catch (Exception)
{
return string.Empty;
}
}
public List<ItemMapActivityAction> Actions { get; set; }
readonly List<FileActivityChange> _changes = new List<FileActivityChange>();
public IEnumerable<FileActivityChange> Changes { get { return _changes; } }
public bool HasChange(ChangeType changeType)
{
return Changes.Any(a => a.Type == changeType);
}
public void AddChange(FileActivityChange change)
{
change.Parent = this;
_changes.Add(change);
}
public Exception Error { get; set; }
public void ClearChanges()
{
_changes.Clear();
}
public virtual bool CanExecute(ItemMapActivityAction action)
{
return true;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncUploadFilesDialog/VMs/TrueSyncFilesScanDialogVm.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Linq;
using System.Windows.Input;
using Workshare.Components.Helpers;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncDialog;
using Workshare.Components.Views.TrueSyncDialog.VMs;
using Workshare.Components.Views.TrueSyncUploadFilesDialog.WorkUnits;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Components.Views.TrueSyncUploadFilesDialog.VMs
{
public class TrueSyncFilesScanDialogVm : OwnViewModel<TrueSyncFilesScanDialogVm>
{
public enum StateEnum
{
Default,
Loading,
Error,
NoInSyncedFolder,
NoChangesDetected,
NoAccessOnWorkshare
}
public bool ActivityToDisplay(UploadFileActivity activity)
{
return activity.Changes
.Any(a => a.Type == ChangeType.VersionChangedAfterSend
|| a.Type == ChangeType.NotSentVersion);
}
private StateEnum _state;
public StateEnum State
{
get
{
return _state;
}
set
{
if (_state != value)
{
_state = value;
this.PropertyHasChanged(a => a.State);
}
}
}
public ModuleBase Module { get; set; }
public ObservableCollection<ParentScan> ParentScans { set; get; }
#region Commands
public RelayCommand ProcessActivityCommand { get; set; }
public ICommand DiscardChangesCommand { get; set; }
public ICommand CloseCommand { set; get; }
public ICommand RescanCommand { get; set; }
public ICommand UploadAllCommand { get; set; }
public ICommand ViewOnlineCommand { get; set; }
#endregion
public string WorkspaceName { get; set; }
string _fileName;
public string FileName
{
get
{
return _fileName;
}
set
{
if (_fileName != null)
{
_fileName = value;
this.PropertyHasChanged(p => p.FileName);
}
}
}
internal IModuleView View;
private readonly DmsWorkerBase _dmsWorker;
private readonly CommandInvoker _invoker;
public IDMSFile File { set; get; }
public TrueSyncFilesScanDialogVm()
{
ParentScans = new ObservableCollection<ParentScan>();
var parentScan = new ParentScan
{
Activities = new ObservableCollection<FileUploadActivityVm>()
};
var upDumme = new UploadFileActivity
{
State = UploadFileActivityState.Error,
Error = new Integration.Exceptions.CheckedOutOnAnotherMachine("Us4er1"),
Filename = "dummy3.doc"
};
parentScan.Activities.Add(new FileUploadActivityVm(upDumme));
ParentScans.Add(parentScan);
}
public TrueSyncFilesScanDialogVm(ModuleBase module, CommandInvoker invoker)
{
ParentScans=new ObservableCollection<ParentScan>();
Module = module;
View = module.Resolve<IModuleView>();
_dmsWorker = module.Resolve<DmsWorkerBase>();
this._invoker = invoker;
CloseCommand = new RelayCommand(c => RaiseClose(false));
ProcessActivityCommand = new RelayCommand( p =>
{
var action = p as DocumentActionVM;
if (action != null && !action.Actions.Any())
{
ExecuteDocumentAction(new[] {action});
}
});
RescanCommand = new RelayCommand( p =>
{
var unit = new ScanWorkUnit(this, _dmsWorker);
invoker.AddToQueue(unit);
});
DiscardChangesCommand = new RelayCommand(p =>
{
var service = module.Resolve<DiscardedService>();
var item = p as IItemMapActivityVm;
if (item != null && ((UploadFileActivityVm)p)._data.State != UploadFileActivityState.Uploading)
{
service.Add(item.data as FileMapActivity);
item.IsDiscarded = true;
RaiseClose(true);
}
});
this.ViewOnlineCommand = new RelayCommand(p =>
{
var item = p as IFileActivityBaseVm;
if (item != null)
{
ProcessUtils.StartProcess(item.ViewOnlineUrl);
}
});
}
public void Initialize(IDMSFile file)
{
File = file;
}
internal void ExecuteDocumentAction(IEnumerable<DocumentActionVM> actions)
{
var actionsCanBeExecuted = actions.Where(a => a.CanExecute()).ToList();
if (actionsCanBeExecuted.Any())
{
var processWorkUnit = new ProcessWorkUnit(this, actionsCanBeExecuted);
_invoker.AddToQueue(processWorkUnit);
}
}
private Exception _error;
public Exception Error {
get { return _error; }
set
{
if (_error != value)
{
_error = value;
this.PropertyHasChanged(a=>a.Error);
}
}
}
private bool _isSharedExternally;
public bool IsSharedExternally
{
set
{
if (_isSharedExternally != value)
{
_isSharedExternally = value;
PropertyHasChanged(a => a.IsSharedExternally);
}
}
get
{
return _isSharedExternally;
}
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/DmsItems/BaseDmsVersion.cs
namespace Workshare.Integration.Processor.DmsItems
{
public class BaseDmsVersion : BaseDMSItem
{
//public override void Apply(DMSVisitor visitor)
//{
// visitor.Visit(this);
//}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/TrueSyncDialog/VMs/TrueSyncDialogViewModel.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Linq;
using System.Windows.Data;
using System.Windows.Input;
using System.Windows.Threading;
using Workshare.Components.Helpers;
using Workshare.Components.Interfaces;
using Workshare.Components.Views.Common;
using Workshare.Components.Views.TrueSyncDialog.WorkUnits;
using Workshare.Integration.Common;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Modularity;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes;
using Workshare.Integration.Processor.Changes.Activities;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.Processor.Services;
using Workshare.Integration.SettingsStorage;
using Workshare.Integration.WsProxies;
using WorksharePlatform;
namespace Workshare.Components.Views.TrueSyncDialog.VMs
{
public class TrueSyncDialogViewModel : OwnViewModel<TrueSyncDialogViewModel>
{
readonly Dispatcher _dispatcher;
readonly IEnumerable<IDMSItem> _items;
readonly IModuleView _view;
private readonly SyncInfoService _syncInfoService;
private System.Threading.Thread _scanThread;
internal SummaryProcessResult ScanResult;
public Guid Id { get; private set; }
public ModuleBase Module { get; set; }
// public enum StateEnum { Default, Scanning, NetError, NoFilesToSync, AllFilesAreSynced, RootFolderDeleted, Forbidden }
public enum StateEnum { Default, Scanning, NetError, NoFilesToSync, AllFilesAreSynced, RootFolderDeleted, Forbidden,
LoadingMore
}
private const int DefaultItemsPerPage = 50;
public int ItemsPerPage = DefaultItemsPerPage ;
public event EventHandler ScanCompleted;
internal OwnCancellationTokenSource CancellationToken=new OwnCancellationTokenSource();
StateEnum _state;
public StateEnum State
{
get { return _state; }
set
{
if (_state != value)
{
_state = value;
PropertyHasChanged(p => p.State);
}
}
}
private bool _IsShowImportDiscarded = false;
public bool IsShowImportDiscarded
{
get { return _IsShowImportDiscarded; }
set
{
if (_IsShowImportDiscarded != value)
{
_IsShowImportDiscarded = value;
UpdateButtonsState();
}
}
}
private bool _IsShowUploadDiscarded = false;
public bool IsShowUploadDiscarded
{
get { return _IsShowUploadDiscarded; }
set
{
if (_IsShowUploadDiscarded != value)
{
_IsShowUploadDiscarded = value;
UpdateButtonsState();
}
}
}
public string WorkspaceName
{
get;set;
}
public bool IsViewClosed { get; private set; }
public void OnViewClosing()
{
IsViewClosed = true;
var import =
AllActivities.OfType<ImportFileActivityVm>()
.Select(a => a._data)
.Where(d => (d.InKindOfErrorState() || d.InKindOfProceeedState()))
.OfType<FileMapActivity>().ToList();
var upload =
AllActivities.OfType<UploadFileActivityVm>()
.Select(a => a._data)
.Where(d => (d.InKindOfErrorState() || d.InKindOfProceeedState()))
.OfType<FileMapActivity>()
.ToList();
import.AddRange(upload);
ItemsToSkip = import;
CancellationToken.Cancel();
if (_scanThread != null) _scanThread.Abort();
}
public TrueSyncDialogViewModel()
{
Id = Guid.NewGuid();
var dummy = new ImportFileActivity {Filename = "dummy1.doc"};
new[]{
new FileActivityChange
{
Type=ChangeType.LocalChanged
},
new FileActivityChange
{
Type=ChangeType.NamesDiffer
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.FolderSharedWithOthers,
OtherMemebersCount = 12,
FolderSharedBy = new WsUser(new UserDetails
{
UserName = "Vitaly"
})
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
}
}.ToList().ForEach(dummy.AddChange);
var dummy2 = new ImportFileActivity
{
Filename = "dummy2asdjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj.doc"
};
new[]{
new FileActivityChange
{
Type=ChangeType.BothChanged
}
}.ToList().ForEach(dummy2.AddChange);
var dummy3 = new ImportFileActivity();
dummy2.State = ImportFileActivityState.Error;
dummy2.Error = new CheckedOutOnAnotherMachine("Us4er1");
dummy3.Filename = "dummy3.doc";
new[]{
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.BothChanged,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
},
new FileActivityChange
{
Type=ChangeType.Uploaded,
}
}.ToList().ForEach(dummy3.AddChange);
var upDumme = new UploadFileActivity
{
Error = new CheckedOutOnAnotherMachine("Us4er1"),
State = UploadFileActivityState.Error,
Filename = "dummy3.doc"
};
AllActivities = new ObservableCollection<IItemMapActivityVm>
{
new ImportFileActivityVm(dummy),
new ImportFileActivityVm(dummy2),
new ImportFileActivityVm(dummy3),
new ImportFileActivityVm(dummy),
new UploadFileActivityVm(upDumme),
};
ImportActiveItems = new CollectionViewSource { Source = AllActivities }.View;
ImportActiveItems.Filter = a => (a is ImportFileActivityVm) && FilterActivities(a) && !((ImportFileActivityVm)a)._data.IsDiscarded;
UploadActiveItems = new CollectionViewSource { Source = AllActivities }.View;
UploadActiveItems.Filter = a => (a is UploadFileActivityVm) && FilterActivities(a) && !((UploadFileActivityVm)a)._data.IsDiscarded;
ItemsToSkip = new List<FileMapActivity>();
}
public TrueSyncDialogViewModel(ModuleBase module, CommandInvoker invoker, IEnumerable<IDMSItem> items,IModuleView view)
{
Id = Guid.NewGuid();
Module = module;
_items = items.ToList();
_view = view;
_dispatcher = Dispatcher.CurrentDispatcher;
_syncInfoService = module.Resolve<SyncInfoService>();
ItemsToSkip = new LinkedList<FileMapActivity>();
var settingsStorage = module.Resolve<SettingsStorage>();
ItemsPerPage = settingsStorage.GetInt(SettingsOptions.PagingSizeInTrueSync, DefaultItemsPerPage);
ItemsPerPage = (ItemsPerPage > 0) ? ItemsPerPage : DefaultItemsPerPage;
AllActivities = new ObservableCollection<IItemMapActivityVm>();
ImportActiveItems = new CollectionViewSource {Source = AllActivities}.View;
ImportActiveItems.Filter =
a => (a is ImportFileActivityVm) && FilterActivities(a) && !((ImportFileActivityVm) a)._data.IsDiscarded;
ImportDiscardedItems = new CollectionViewSource {Source = AllActivities}.View;
ImportDiscardedItems.Filter =
a => (a is ImportFileActivityVm) && FilterActivities(a) && ((ImportFileActivityVm) a)._data.IsDiscarded;
UploadActiveItems = new CollectionViewSource {Source = AllActivities}.View;
UploadActiveItems.Filter =
a => (a is UploadFileActivityVm) && FilterActivities(a) && !((UploadFileActivityVm)a)._data.IsDiscarded && !((UploadFileActivityVm)a)._data.DmsFile.CheckedOut;
UploadDiscardedItems = new CollectionViewSource {Source = AllActivities}.View;
UploadDiscardedItems.Filter =
a => (a is UploadFileActivityVm) && FilterActivities(a) && ((UploadFileActivityVm) a)._data.IsDiscarded;
CloseCommand = new RelayCommand(c => RaiseClose(false));
this.WorkspaceName = _items.Select(p => p.Name).FirstOrDefault() ?? "Unknown";
this.ProcessActivityCommand = new RelayCommand(p =>
{
var action = p as DocumentActionVM;
if (action != null && action.CanExecute())
{
var u = new ProcessWorkUnit(Module, this._view, new[] {action}, this);
invoker.AddToQueue(u);
}
});
this.RescanCommand = new RelayCommand(p =>
{
var unit = new ScanWorkUnit(this._view, this._items, this, _scanThread);
invoker.AddToQueue(unit);
});
LoadMoreCommand = new RelayCommand(p =>
{
var unit = new ScanWorkUnit(this._view, this._items, this, _scanThread, true);
invoker.AddToQueue(unit);
});
this.ImportAllCommand = new RelayCommand(p =>
{
RaiseClose(false);
var activities = ImportActiveItems.OfType<ImportFileActivityVm>().ToList();
var actionsToExecute =
GetAcitionsToImportAll(
activities.Select(a => a._data)
.Where(
a =>
!a.InKindOfErrorState() && !a.InKindOfProceeedState() &&
!a.InKindOfProcessingState())
.ToList());
foreach (var item in actionsToExecute)
{
ProcessActivityCommand.Execute(new DocumentActionVM(item,
activities.First(a => a.Id == item.ActivityId)));
}
});
this.BreakLinksForRootCommand = new RelayCommand(p =>
{
var breakWorkUnit = new BreakFolderLinkWorkUnit(this, view);
breakWorkUnit.Execute();
this.RaiseClose(true); //strat old sync process
});
this.UploadAllCommand = new RelayCommand(p =>
{
foreach (var item in UploadActiveItems.OfType<UploadFileActivityVm>())
{
if (item.State == UploadFileActivityState.Scanned)
{
ProcessActivityCommand.Execute(item.Actions.FirstOrDefault());
}
}
RaiseClose(false);
});
this.DiscardChangesCommand = new RelayCommand(p =>
{
var service = module.Resolve<DiscardedService>();
var item = p as IItemMapActivityVm;
if (item != null)
{
service.Add(item.data as FileMapActivity);
item.IsDiscarded = true;
if (item is ImportFileActivityVm)
{
ImportActiveItems.Refresh();
ImportDiscardedItems.Refresh();
}
else
{
UploadActiveItems.Refresh();
UploadDiscardedItems.Refresh();
}
if (!HasActiveImportItems && !HasActiveUploadItems)
{
State = StateEnum.AllFilesAreSynced;
}
UpdateButtonsState();
}
});
this.ViewOnlineCommand = new RelayCommand(p =>
{
var item = p as IFileActivityBaseVm;
if (item != null)
{
ProcessUtils.StartProcess(item.ViewOnlineUrl);
}
});
this.ViewDiscardedCommand = new RelayCommand(p =>
{
this.State = StateEnum.Default;
});
this.ViewOnlineWorkspaceCommand = new RelayCommand(param =>
{
if (ScanResult != null)
{
var url =
ScanResult.Maps.OfType<FolderMap>()
.Where(p => p.WsFolder != null)
.Select(p => p.WsFolder.Url)
.FirstOrDefault();
return !string.IsNullOrEmpty(url);
}
return false;
},
param =>
{
if (ScanResult != null)
{
var url =
ScanResult.Maps.OfType<FolderMap>()
.Where(p => p.WsFolder != null)
.Select(p => p.WsFolder.Url)
.FirstOrDefault();
ProcessUtils.StartProcess(url);
}
});
}
private IEnumerable<ImportDocumentAction> GetAcitionsToImportAll(List<ImportFileActivity> importActivities)
{
return _view.SelectActions(importActivities);
}
private void UpdateButtonsState()
{
PropertyHasChanged(a => a.ImportAllButtonVisible);
PropertyHasChanged(a => a.UploadAllButtonVisible);
}
public bool ImportAllButtonVisible
{
get
{
if (IsShowImportDiscarded) return false;
else return HasActiveImportItems;
}
}
public bool UploadAllButtonVisible
{
get
{
if (IsShowUploadDiscarded) return false;
else return HasActiveUploadItems;
}
}
public bool HasActiveImportItems
{
get
{
return ImportActiveItems.OfType<object>().Any(); //TODO look for better line
}
}
public bool HasActiveUploadItems
{
get
{
return UploadActiveItems.OfType<object>().Any(); //TODO look for better line
}
}
bool OnlyNotSentImportItems()
{
var importActivities = AllActivities.OfType<ImportFileActivityVm>().ToList();
var uploadActivities = AllActivities.OfType<UploadFileActivityVm>().ToList();
return (importActivities.All(a => a._data.Changes.Any(p => p.Type == ChangeType.NotSent)) || !importActivities.Any())
&& (uploadActivities.All(a => !a._data.Changes.Any()) || !uploadActivities.Any());
}
public ObservableCollection<IItemMapActivityVm> AllActivities { private set; get; }
public IEnumerable<FileMapActivity> ItemsToSkip { get; set; }
public ICollectionView ImportActiveItems { set; get; }
public ICollectionView UploadActiveItems { set; get; }
public ICollectionView ImportDiscardedItems { set; get; }
public ICollectionView UploadDiscardedItems { set; get; }
public IItemMapActivityVm CreateVm(object a)
{
var data = a as ImportFileActivity;
if (data != null)
{
return new ImportFileActivityVm(data);
}
var activity = a as UploadFileActivity;
return activity != null ? new UploadFileActivityVm(activity) : null;
}
public bool HasMore
{
get
{
var res = ScanResult;
var iterator = (res == null) ? null : res.Iterator;
return iterator != null && iterator.HasMore;
}
}
private void RemoveDeletedActivitiesIfNeed(List<FileMapActivity> activities)
{
var deletedActivities = activities.Where(a => a is ImportFileActivity && (a as ImportFileActivity).State == ImportFileActivityState.Scanned && a.Changes.Any(c => c.Type == ChangeType.RemoteDeleted)).ToList();
if (deletedActivities.Count > 0)
{
var uploadedActivities = activities.Where(a => a is UploadFileActivity && (a as UploadFileActivity).State == UploadFileActivityState.Uploaded).ToList();
if (uploadedActivities.Count > 0)
{
var deletedActivitiesToHide = uploadedActivities.Join(deletedActivities, u => u.MapId, d => d.MapId, (u, d) => d).ToList();
deletedActivitiesToHide.ForEach(d => activities.Remove(d));
var deletedDiscardedActivities = deletedActivitiesToHide.Where(a => a.IsDiscarded).ToList();
deletedDiscardedActivities.ForEach(a =>
{
var fileMapActivity = a as FileMapActivity;
if (fileMapActivity != null)
{
var service = Module.Resolve<DiscardedService>();
service.Remove(fileMapActivity);
}
});
}
}
}
public void OnCompleted(List<ItemMap> errors, IEnumerable<FileMapActivity> newActivities, SummaryProcessResult scanresult)
{
// pass here processedIds. After get errors from newActivities, and save errors to (global?) list
var newScanCompleed = this.ScanResult != scanresult;
ScanResult = scanresult;
var newActivitiesList = newActivities.ToList();
RemoveDeletedActivitiesIfNeed(newActivitiesList);
var itemsToUpdate = AllActivities.Where(a => newActivitiesList.Any(c => c == a.data)).ToList();
var pairsToUpdate = itemsToUpdate.Select(p => new KeyValuePair<IItemMapActivityVm, ItemMapActivity>(p, newActivitiesList.First(a => a == p.data))).ToList();
var itemsToDelete = AllActivities.Except(itemsToUpdate).ToList();
var itemsToAdd = newActivitiesList.Where(c => AllActivities.All(p => p.data != c)).Select(CreateVm).Where(a => a != null).ToList();
System.Net.WebException ex;
this._dispatcher.Invoke(new Action(() =>
{
foreach (var i in itemsToAdd)
{
AllActivities.Add(i);
}
foreach (var i in itemsToDelete)
{
AllActivities.Remove(i);
}
foreach (var i in pairsToUpdate)
{
i.Key.PropertyChangedAll();
}
Exception exception = null;
if (errors.Any(p => p.Error.IsConnectionError()))
{
this.State = StateEnum.NetError;
}
else if (errors.Any(p => p.Error.IsUnauthorized()))
{
this.State = StateEnum.Forbidden;
}
/*else if (this.ScanResult.Maps.First<ItemMap>().Maps.Any(x => { exception = x.Error; return x.ProcessState == ProcessState.Error; }) &&
(ex = exception as System.Net.WebException) != null && ex.IsStatusCode((System.Net.HttpStatusCode)403))*/
else if ((this.ScanResult.Maps.All((x => { return x.ProcessState == ProcessState.Error && x.Error != null && x.Error is System.Net.WebException && (x.Error as System.Net.WebException).IsStatusCode((System.Net.HttpStatusCode)403); })) ||
(this.ScanResult.Maps.First<ItemMap>().Maps.All(x => { return x.ProcessState == ProcessState.Error && x.Error != null && x.Error is System.Net.WebException && (x.Error as System.Net.WebException).IsStatusCode((System.Net.HttpStatusCode)403);}))))
{
this.State = StateEnum.Forbidden;
}
else if (RootFolderDeletedOnWs())
{
this.State = StateEnum.RootFolderDeleted;
}
else if (OnlyNotSentImportItems())
{
this.State = StateEnum.NoFilesToSync;
}
else if (!HasActiveImportItems && !HasActiveUploadItems && !HasMore)
{
this.State = StateEnum.AllFilesAreSynced;
}
else
{
this.State = StateEnum.Default;
}
this.PropertyHasChanged(a => a.HasMore);
if (newScanCompleed && ScanCompleted != null)
{
ScanCompleted(this, EventArgs.Empty);
}
this.ViewOnlineWorkspaceCommand.RaiseCanExecutechanged();
UpdateButtonsState();
}));
}
private bool RootFolderDeletedOnWs()
{
var folder = ScanResult.Maps.OfType<FolderMap>();
return folder.All(f => !f.HasRemote()&&f.ProcessState==ProcessState.Scanned && _syncInfoService.IsLinked(f.LocalFolder));
}
public static bool FilterActivities(object activity)
{
var importActivity = activity as ImportFileActivity;
var vm = activity as ImportFileActivityVm;
if (vm != null)
{
importActivity = vm._data;
}
if (importActivity != null)
{
if (importActivity.InKindOfProceeedState() || importActivity.InKindOfErrorState())
{
return importActivity.IsLinked;
}
var allowedTypesToShow = new[] {
ChangeType.BothChanged,
ChangeType.RemoteAdded,
ChangeType.RemoteChanged,
ChangeType.RemoteDeleted
};
return importActivity.Changes.Any(a => allowedTypesToShow.Contains(a.Type));
}
var uploadActivity = activity as UploadFileActivity;
var activityVm = activity as UploadFileActivityVm;
if (activityVm != null)
{
uploadActivity = activityVm._data;
}
if (uploadActivity != null)
{
if (uploadActivity.InKindOfProceeedState() || uploadActivity.InKindOfErrorState())
{
return true;
}
var allowedTypesToShow = new[] {
ChangeType.NotSentVersion,
ChangeType.VersionChangedAfterSend
};
return uploadActivity.Changes.Any(a => allowedTypesToShow.Contains(a.Type));
}
return true;
}
#region Commands
public ICommand ImportAllCommand { get; set; }
public ICommand BreakLinksForRootCommand { get; set; }
public RelayCommand ProcessActivityCommand { get; set; }
public ICommand DiscardChangesCommand { get; set; }
public ICommand CloseCommand { set; get; }
public ICommand ViewOnlineCommand { get; set; }
public ICommand ViewDiscardedCommand { get; set; }
public ICommand RescanCommand { get; set; }
public RelayCommand ViewOnlineWorkspaceCommand { set; get; }
public RelayCommand LoadMoreCommand { set; get; }
public ICommand UploadAllCommand { get; set; }
#endregion
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Command.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Windows.Forms;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.Common;
using Workshare.Components.WSLogger;
using Workshare.IManage.Contrete;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Services;
namespace Workshare.IManage
{
public enum iManType {FileSite, DeskSite};
public abstract class Command : ICommand, ICommand2
{
readonly string GETSELECTEDITEMS = "SelectedIManObjects";
readonly string GETSELECTEDFOLDER = "SelectedFolderObject";
readonly string GETDMS = "IManDMS";
readonly string GETDESTINATIONOBJECT = "IManDestinationObject";
internal SyncInfoService syncService = Application.iManInstance.Module.Resolve<SyncInfoService>();
public virtual int Accelerator
{
get
{
return 0;
}
set
{
}
}
void SafeCOMCall(Action call)
{
try
{
call();
}
catch (System.Runtime.InteropServices.COMException comException)
{
Logger.WriteError((Exception)comException);
if (comException.ErrorCode == -2147221399 || comException.ErrorCode == -2147221402 || comException.ErrorCode == -2147221503)
{
WSApplication.Instance.View.ShowError((Workshare.Integration.Exceptions.BaseException)new Workshare.Integration.Exceptions.OfflineException());
}
else
{
Logger.WriteError(comException);
#if DEBUG
MessageBox.Show(comException.ToString(), "FIX ME PLEASE!!!!..");
#else
MessageBox.Show(string.Format("Unexpected error during command execution occured:\n{0}\n Please try again and if error is persisted contact support.",comException.Message),"Unexpected error");
#endif
}
}
catch (Exception ex)
{
Logger.WriteError(ex);
#if DEBUG
MessageBox.Show(ex.ToString(), "FIX ME PLEASE!!!!...");
#else
if (ex is BaseException)
{
WSApplication.Instance.View.ShowError(ex);
}
else
{
MessageBox.Show(string.Format("Unexpected error during command execution occured:\n{0}\n Please try again and if error is persisted contact support.", ex.Message), "Unexpected error");
}
#endif
}
}
public virtual object Bitmap
{
get
{
return null;
}
set
{
}
}
ContextItems m_Context = null;
public virtual ContextItems Context
{
get
{
return m_Context;
}
}
public virtual void ExecuteAction()
{
if (!Application.iManInstance.Presenter.LoginIfNeeded())
{
return;
}
}
public void Execute()
{
SafeCOMCall(() =>
{
ExecuteAction();
});
}
public virtual string HelpFile
{
get
{
return string.Empty;
}
set
{
}
}
public virtual int HelpID
{
get
{
return 0;
}
set
{
}
}
public virtual string HelpText
{
get
{
return "HelpText";
}
set
{
}
}
public virtual void InitializeAction(ContextItems Context)
{
}
public void Initialize(ContextItems Context)
{
SafeCOMCall(() =>
{
m_Context = Context;
InitializeAction(m_Context);
});
}
public virtual string MenuText
{
get
{
return "MenuText";
}
set
{
}
}
public virtual string Name
{
get
{
return "Name";
}
set
{
}
}
public virtual int Options
{
get
{
return 0;
}
set
{
}
}
public virtual int Status
{
get
{
//Context.OfType<object>().ToList().ForEach(p => System.Diagnostics.Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
return (int)((!Application.iManInstance.Presenter.IsAnyItemInProgress() && IsAnyServerAvailable()) ? CommandStatus.nrActiveCommand : CommandStatus.nrGrayedCommand);
}
set
{
}
}
public virtual Commands SubCommands
{
get
{
return null;
}
set
{
}
}
public virtual string Title
{
get
{
return "Title";
}
set
{
}
}
public virtual CommandType Type
{
get
{
return CommandType.nrStandardCommand;
}
set
{
}
}
public virtual void UpdateAction()
{
}
public void Update()
{
SafeCOMCall(() =>
{
UpdateAction();
});
}
#region privates
protected bool IsAnyServerAvailable()
{
try
{
var dms = Context.Item(GETDMS) as IManDMS;
return dms != null && dms.Sessions != null && dms.Sessions.Count > 0;
}
catch
{ }
//var ns = Context.Item("NRTSessions") as NRTSessions;
return false;
}
protected virtual List<IDMSItem> GetContextItems()
{
var selectedItems = new List<IDMSItem>();
var items = GetSelectedItemsInContext();
if (items != null)
{
var contextFolder = GetContextFolder();
if (string.IsNullOrEmpty(WSApplication.Instance.ServerKey))
{
WSApplication.Instance.ServerKey = SetServerKey(contextFolder);
}
foreach (object item in items)
{
if (item is IManFolder)
{
var manFolder = new ManFolder((IManFolder)item);
selectedItems.Add(manFolder);
}
else if (item is IManDocument)
{
if (contextFolder != null)
{
selectedItems.Add(new ManFile((IManDocument) item, contextFolder).GetLatest());
}
else
{
selectedItems.Add(new ManFile((IManDocument)item).GetLatest());
}
}
}
}
return selectedItems;
}
private string SetServerKey(IManFolder _folder)
{
try
{
var session = (_folder == null)? Context.Item("SelectedIManSession") as IManSession:_folder.Database.Session;
var nrtdms = session.DMS.ObjectID;
IPAddress server_ip;
var ipAddres = "!server:";
if (IPAddress.TryParse(session.ServerName, out server_ip))
{
ipAddres += session.ServerName.Replace(".", "_");
}
else
{
ipAddres += GetServerIp(session.ServerName);
}
return nrtdms + ipAddres;
}
catch
{
return "";
}
}
private string GetServerIp(string hostname)
{
IPHostEntry entry = Dns.GetHostEntry(hostname);
return entry.AddressList.FirstOrDefault().ToString().Replace(".","_");
}
protected iManType GetClientType()
{
string applicationName = "DeskSite";
try
{
IManDMS _dms = Context.Item(GETDMS) as IManDMS;
applicationName = _dms.ApplicationName;
}
catch (Exception ex)
{
Logger.Write(ex, Severity.Warning);
}
if (string.Compare(applicationName, "FileSite", StringComparison.InvariantCultureIgnoreCase) == 0)
{
return iManType.FileSite;
}
else
{
return iManType.DeskSite;
}
}
protected IManFolder GetContextFolder()
{
try
{
return Context.Item(GETSELECTEDFOLDER) as IManFolder;
}
catch (Exception ex)
{
return null;
}
}
protected List<IDMSFolder> GetSelectedFolders()
{
try
{
var folder = Context.Item(GETSELECTEDFOLDER) as IManFolder;
if (folder != null)
{
if (string.IsNullOrEmpty(WSApplication.Instance.ServerKey))
{
WSApplication.Instance.ServerKey = SetServerKey(folder);
}
return new List<IDMSFolder> { new ManFolder(folder) };
}
}
catch (ArgumentException ex)
{
Logger.WriteWarning(ex.Message);
}
return new List<IDMSFolder>();
}
protected IEnumerable GetSelectedItemsInContext()
{
if (GetClientType() == iManType.DeskSite)
{
try
{
//Context.OfType<object>().ToList().ForEach(p => Trace.TraceInformation((p.GetType().InvokeMember("Name", System.Reflection.BindingFlags.GetProperty, null, p, new object[0]) ?? "-").ToString()));
//Trace.TraceInformation("Current folder is = " + ((IManFolder)Context.Item("SelectedFolderObject")).Name);
return Context.Item(GETSELECTEDITEMS) as IEnumerable ?? new object[0];
}
catch (ArgumentException ex)
{
try
{
Update();
var folder = Context.Item(GETSELECTEDFOLDER) as IManFolder;
if (folder != null)
{
return new object[] { folder };
}
}
catch (ArgumentException)
{
}
} // not selected items in the context
}
else // if it's filesite
{
bool folderObject = false;
try
{
if (Context != null)
{
IManFolder obj = Context.Item(GETDESTINATIONOBJECT) as IManFolder;
if (obj != null)
{
folderObject = true;
}
}
}
catch
{
folderObject = false;
}
if (folderObject)
{
var folder = Context.Item(GETDESTINATIONOBJECT) as IManFolder;
return new object[] { folder };
}
else
{
IEnumerable obj = null;
try
{
obj = Context.Item(GETSELECTEDITEMS) as IEnumerable;
}
catch
{
/*Application.iManInstance.View.ShowErrors(new List<Workshare.Integration.Exceptions.ItemException>() { new Workshare.Integration.Exceptions.ItemException()
{ Item = null, Error = new Workshare.Integration.Exceptions.DMSUnAuthorizedException() } });*/
}
return obj;
}
}
return new object[0];
}
#endregion
public virtual object LargeBitmap
{
get { return Bitmap; }
set { }
}
public string TextLabel
{
get { return Title; }
set { }
}
public void ShowError(BaseException ex, IDMSItem item)
{
Logger.WriteError(ex);
Application.iManInstance.View.ShowErrors(new List<ItemException>()
{
new ItemException()
{
Item = item,
Error = ex
}
});
}
public void ShowErrors(BaseException ex, List<IDMSItem> items)
{
Logger.WriteError(ex);
Application.iManInstance.View.ShowErrors(items.ConvertAll(i => new ItemException() { Item = i, Error = ex }));
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/Common/BasePropertyChanged.cs
using System;
using System.ComponentModel;
using System.Linq.Expressions;
namespace Workshare.Components.Views.Common
{
public abstract class BasePropertyChanged<T>
: INotifyPropertyChanged
{
#region Property Change Event
public event PropertyChangedEventHandler PropertyChanged;
public void PropertyHasChanged(string propertyName, string method = "Blank")
{
var handler = PropertyChanged;
if (handler != null)
{
handler(this, new PropertyChangedEventArgs(propertyName));
}
}
public void PropertyChangedAll()
{
PropertyHasChanged(string.Empty);
}
public virtual void PropertyHasChanged(Expression<Func<T, object>> property)
{
if (property == null)
{
return;
}
var memberExp = property.Body as MemberExpression;
if (memberExp == null)
{
var ur = property.Body as UnaryExpression;
if (ur != null)
{
memberExp = ur.Operand as MemberExpression;
if (memberExp == null)
{
return;
}
}
else
{
return;
}
}
PropertyHasChanged(memberExp.Member.Name, memberExp.Member.ReflectedType.Name);
}
#endregion
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/Contrete/ImportDialog.cs
using System;
using System.IO;
using Com.Interwoven.Worksite.iManExt;
using Com.Interwoven.WorkSite.iManage;
using Workshare.Components.WSLogger;
namespace Workshare.IManage.Contrete
{
class ImportDialog
{
ManFolder _folder;
string _filePath = string.Empty;
IManDatabase _datebase;
string _name;
public void Initialize(ManFolder parentFolder, string filePath, string name, IManDatabase datebase)
{
this._folder = parentFolder;
this._filePath = filePath;
this._name = name;
this._datebase = datebase;
}
public ManFile AddedFile { set; get; }
public void ShowDialog()
{
Logger.WriteTrace("Displaying import dialog.");
Components.Presenter.ModulePresenterBase.InvokeInSTAThreadIfNeed(() =>
{
try
{
var added = false;
var importCmd = new ImportCmdClass();
importCmd._ICommandEvents_Event_OnInitDialog += objDlg =>
{
var dlg = objDlg as NewProfileDlg;
if (dlg != null)
{
// commited through the task 16976
//var oDocClasses = m_folder.Database.SearchDocumentClasses("", imSearchAttributeType.imSearchBoth, true);
//var oDocClass = (oDocClasses.Contains("DOC")) ? oDocClasses.ItemByName("DOC") : null;
dlg.SetAttributeValueByID(AttributeID.nrDescription, _name, true);
//dlg.SetAttributeValueByID(AttributeID.nrClass, oDocClass, true);
dlg.SetAttributeValueByID(AttributeID.nrAuthor, _datebase.Session.UserID, true);
}
};
importCmd.PostOnOK += o =>
{
Logger.Write("PostOk event", Severity.Trace);
added = true;
};
var contextItems = new ContextItems
{
{"ParentWindow", (int) Application.iManInstance.View.ActivieWindowHandle},
{"DestinationObject", (_folder != null) ? (object) _folder.m_folder : _datebase},
{"IManExt.Import.FileName", _filePath},
{"IManExt.OpenCmd.NoCmdUI", false},
{"IManExt.NewProfile.ProfileNoUI", false},
{"IManExt.Import.KeepCheckedOut", CheckinOptions.nrDontKeepCheckedOut}
};
importCmd.Initialize(contextItems);
importCmd.Update();
if (importCmd.Status == (int)CommandStatus.nrActiveCommand)
{
Logger.Write("Executing import cmd", Severity.Trace);
IManDocument doc = null;
using (HooksDisabler.Create())
{
importCmd.Execute();
}
var brefresh = (bool)contextItems.Item("IManExt.Refresh");
if (brefresh)
{
//Succeeded in importing a document to WorkSite
doc = (IManDocument)contextItems.Item("ImportedDocument");
}
if (added)
{
AddedFile = new ManFile(doc, _folder == null ? null : _folder.m_folder);
}
}
else
{
throw new Exception("You cannot import file.");
}
}
finally
{
try
{
File.Delete(_filePath);
}
catch (Exception e)
{
Logger.Write(e, Severity.Error);
}
}
});
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Changes/Activities/ActivityExts.cs
using System.Collections.Generic;
using System.Linq;
using Workshare.Integration.Exceptions;
namespace Workshare.Integration.Processor.Changes.Activities
{
public static class ActivityExts
{
public static string AsString(this IEnumerable<FileActivityChange> change)
{
var res = string.Empty;
change.ToList().ForEach(a => res += a.Type + ",");
return res;
}
public static IEnumerable<ItemMapActivityAction> GetAllActions(IEnumerable<ItemMapActivityAction> activityVm)
{
var res = new List<ItemMapActivityAction>();
foreach (var documentAction in activityVm.OfType<ItemMapActivityAction>())
{
if (documentAction.Actions != null && documentAction.Actions.Any())
{
res.AddRange(GetAllActions(documentAction.Actions.ToList()));
}
else if(!(documentAction is ItemMapActivityActionGroup))
{
res.Add(documentAction);
}
}
return res.Distinct().ToList();
}
public static IEnumerable<ItemMapActivityAction> GetAllAvailableActions(this FileMapActivity activity)
{
if(activity.Actions==null)return new List<ItemMapActivityAction>();
return GetAllActions(activity.Actions);
}
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Services/WebDataStorageService.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using Workshare.Components.WSLogger;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.WsProxies;
namespace Workshare.Integration.Processor.Services
{
class CacheDataObject
{
public string Data { get; set; }
public DateTime AddDate { get; set; }
public DateTime ObjectDate { get; set; }
}
public class WebDataStorageService
{
private static int AllowedTimeSpan = 3000;
Dictionary<string, object> _Acc_cache = new Dictionary<string, object>();
Dictionary<string, object> _User_cache = new Dictionary<string, object>();
IAuthProvider _auth;
WsProxy _proxy;
WsUser User
{
get
{
return (_auth == null) ? null : _auth.GetCurrentWSUser2();
}
}
public WebDataStorageService(IAuthProvider auth, WsProxy proxy)
{
_auth = auth;
_proxy = proxy;
}
#region Work with account data
void UpdateAccountCache(string _key, CacheDataObject _data)
{
if (_Acc_cache.ContainsKey(_key))
{
_Acc_cache[_key] = _data;
}
else
{
_Acc_cache.Add(_key, _data);
}
}
public void AddAccountData(string _key, string _data, DateTime objectUpdateDate)
{
var cdo = new CacheDataObject()
{
Data = _data,
AddDate = DateTime.Now,
ObjectDate = objectUpdateDate
};
_proxy.AddAccountDataToWDS(User, _key, _data);
UpdateAccountCache(_key, cdo);
}
public void DeleteAccountData(string _key)
{
_proxy.DeleteAccountDataOnWDS(User, _key);
UpdateAccountCache(_key, null);
}
public string GetAccountData(string _key, DateTime objectUpdateDate, bool ForceRequest = true)
{
if (_Acc_cache.ContainsKey(_key))
{
var cdo = _Acc_cache[_key] as CacheDataObject;
if (cdo == null && !ForceRequest)
{
return null;
}
else
{
if (cdo != null)
{
TimeSpan ts = DateTime.Now - cdo.AddDate;
if ((ts.TotalMilliseconds <= AllowedTimeSpan || !ForceRequest) && (objectUpdateDate != DateTime.MinValue && objectUpdateDate == cdo.ObjectDate))
{
return (cdo == null) ? null : cdo.Data;
}
}
}
}
string _data = GetDataFromWDS(_proxy.GetAccountDataFromWDS, User, _key);
var ncdo = new CacheDataObject()
{
Data = _data,
AddDate = DateTime.Now,
ObjectDate = objectUpdateDate
};
UpdateAccountCache(_key, ncdo);
return _data;
}
#endregion
#region Work with user data
void UpdateUserCache(string _key, CacheDataObject _data)
{
if (_User_cache.ContainsKey(_key))
{
_User_cache[_key] = _data;
}
else
{
_User_cache.Add(_key, _data);
}
}
public void AddUserData(string _key, string _data, DateTime objectUpdateDate)
{
var cdo = new CacheDataObject()
{
Data = _data,
AddDate = DateTime.Now,
ObjectDate = objectUpdateDate
};
_proxy.AddUserDataToWDS(User, _key, _data);
UpdateUserCache(_key, cdo);
}
public void DeleteUserData(string _key)
{
_proxy.DeleteUserDataOnWDS(User, _key);
UpdateUserCache(_key, null);
}
public string GetUsertData(string _key, DateTime objectUpdateDate, bool ForceRequest = true)
{
if (_User_cache.ContainsKey(_key) && !ForceRequest)
{
var cdo = _User_cache[_key] as CacheDataObject;
if (cdo == null && !ForceRequest)
{
return null;
}
else
{
if (cdo != null)
{
TimeSpan ts = DateTime.Now - cdo.AddDate;
if (ts.TotalMilliseconds <= AllowedTimeSpan || !ForceRequest)
{
return (cdo == null) ? null : cdo.Data;
}
}
}
}
var _data = GetDataFromWDS(_proxy.GetUserDataFromWDS, User, _key);//_proxy.GetUserDataFromWDS(User, _key);
var ncdo = new CacheDataObject()
{
Data = _data,
AddDate = DateTime.Now,
ObjectDate = objectUpdateDate
};
UpdateUserCache(_key, ncdo);
return _data;
}
#endregion
#region Start scan for all items
#endregion
#region Support methods
string GetDataFromWDS(Func<WsUser, string, string> funct, WsUser _user, string _key)
{
byte counter = 0;
while (counter < 3)
{
try
{
var _data = funct(_user, _key);
return _data;
}
catch (WebException e)
{
if (e.Response != null && e.Response is HttpWebResponse &&
((HttpWebResponse)e.Response).StatusCode == HttpStatusCode.NotFound)
{
return null;
}
else if (e.Status == WebExceptionStatus.Timeout ||
(e.Response != null && e.Response is HttpWebResponse &&
(((HttpWebResponse)e.Response).StatusCode == HttpStatusCode.RequestTimeout || (((HttpWebResponse)e.Response).StatusCode == HttpStatusCode.GatewayTimeout))))
{
counter++;
Logger.Write("Try to get data from WDS, attempt - " + (counter + 1).ToString(), e, Severity.Error);
}
else
{
if (e.IsConnectionError())
throw new OfflineException();
else
throw new CannotGetDataFromServer();
}
}
catch (UnauthorizedAccessException)
{
throw;
}
catch (Exception ex)
{
Logger.Write("Critical exeption during recieve data from WDS ", ex, Severity.Error);
throw new CannotGetDataFromServer();
}
}
if (counter > 2)
{
throw new CannotGetDataFromServer();
}
return null;
}
#endregion
}
}
<file_sep>/WSComponents/src/WSIntegration/Processor/Maps/MapAnylizer.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Workshare.Components.WSLogger;
using Workshare.Integration;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor.Strategies;
namespace Workshare.Integration.Processor.Maps
{
public class MapAnylizer
{
public IAuthProvider _auth;
private readonly DmsWorkerBase _dmsWorker;
public MapAnylizer(IAuthProvider auth, DmsWorkerBase dmsWorker)
{
_auth = auth;
_dmsWorker = dmsWorker;
}
public bool IsFolderSharedWithExternalUsers(FolderMap map)
{
if (map == null) return false;
var wsFolder = map.WsFolder;
if (wsFolder == null) return false;
var user = _auth.GetCurrentWSUser2();
if (user == null) return false;
var members = wsFolder.Members;
var otherUsers = members.GetWithoutCurrentUser(user);
if (user.InPrivateDomain)
{
var currentUserDomain = user.Domain;
Logger.WriteTrace(string.Format("User in private domain. Domain={0}; Email={1}; UserName={2}", currentUserDomain, user.Email, user.UserName));
var accountUsers = WorksharePlatform.PlatformService.GetAccountUsersData(user._user);
return otherUsers.Any(m => !accountUsers.Any(u => m.Email.Equals(u.Email, StringComparison.OrdinalIgnoreCase)));
}
else
{
Logger.WriteTrace(string.Format("User in public domain. Domain={0}; Email={1}; UserName={2}", user.Domain, user.Email, user.UserName));
return otherUsers.Any();
}
}
public string GetName(ItemMap itemMap)
{
if (itemMap is FolderMap)
{
var foldermap = (FolderMap) itemMap;
if (foldermap.LocalFolder != null)
{
return _dmsWorker.GetFriendlyName(foldermap.LocalFolder);
}
else if (foldermap.WsFolder != null)
{
return foldermap.WsFolder.Name;
}
}
else if (itemMap is FileMap)
{
var fileMap = (FileMap)itemMap;
if (fileMap.LocalFile != null)
{
return _dmsWorker.GetFriendlyName(fileMap.LocalFile);
}
else if (fileMap.WsFile != null)
{
return fileMap.WsFile.Name;
}
}
return itemMap.Id;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Interfaces/IModuleView.cs
using System;
using System.Collections.Generic;
using Workshare.Components.Presenter;
using Workshare.Components.Views.Common;
using Workshare.Integration.Enums;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Changes.Activities;
using WorksharePlatform;
namespace Workshare.Components.Interfaces
{
public enum EventOperation { UpdateCurrent, AddNewVersion, AddNewDoc, DeclareAsRecord, MoveFile, MoveFolder }
public interface IModuleView
{
IModulePresenter m_presenter { get; set; }
IntPtr ActivieWindowHandle { get; set; }
void ShowErrors(IEnumerable<ItemException> errors);
void ShowError(Exception ex);
void ShowError(string message);
void ShowError(BaseException ex);
void OnSendItemsClicked(SendItemsClickedArgs args);
void OnSynchItemsClicked(SyncItemsClickedArgs args);
void OnCollaborationItemsClicked(CollaborationItemsClickedArgs args);
void ShowProgressWindow(CommandInvoker invoker);
bool ShowLogin(out UserDetails user, DialogSettings settings);
int ShowSelectFolder(DialogSettings settings);
CheckOutOptions ShowCheckOutDialogIfNeed(SendItemsClickedArgs args);
ModulePresenterBase CreatePresenter(IWSIntegration integration);
IntPtr TopWindow { set; get; }
void ShowFileSyncDialog(object file, EventOperation operation, string command);
void SuppressProgressDialog(bool p);
List<ImportDocumentAction> SelectActions(List<ImportFileActivity> importActivities);
}
}
<file_sep>/WSComponents/src/WSIntegration/Enums/Permissions.cs
namespace Workshare.Integration.Enums
{
public enum Permissions
{
EditItem=1
}
}
<file_sep>/iManageIntegration/Src/Workshare.IManage/HooksDisabler.cs
using System;
namespace Workshare.IManage
{
public class HooksDisabler:IDisposable
{
public static bool DisableHooks;
public static HooksDisabler Create()
{
return new HooksDisabler();
}
private readonly bool _previousValue;
protected HooksDisabler()
{
_previousValue =DisableHooks;
DisableHooks = true;
}
public void Dispose()
{
DisableHooks = _previousValue;
}
}
}
<file_sep>/WSComponents/src/WSComponents/Views/CeaseCollaborationDialog/WorkUnits/ProcessWorkUnit.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Workshare.Components.Common;
using Workshare.Components.Interfaces;
using Workshare.Integration.Exceptions;
using Workshare.Integration.Interfaces;
using Workshare.Integration.Processor;
using Workshare.Integration.Processor.Maps;
using Workshare.Integration.WsProxies;
using RES_CMP = Workshare.Components.Properties.Resources;
using RES_INT = Workshare.Integration.Properties.Resources;
namespace Workshare.Components.Views.CeaseCollaborationDialog.WorkUnits
{
class ProcessWorkUnit : WorkUnit
{
private IEnumerable<IDMSItem> _items;
private CeaseCollaborationDialogViewModel _vm;
private CeaseCollaborationImportType _importAction;
private CeaseCollaborationType _fileAction;
private List<int> DeletedWsFolder;
public ProcessWorkUnit(IModuleView view, IEnumerable<IDMSItem> items, CeaseCollaborationDialogViewModel vm)
: base(view)
{
_vm = vm;
_items = items;
this.Name = "Closing collaboration";
TargetItems = _items.Select(f => new TargetItem(f.ID.ToString()) { Name = f.Name }).ToArray();
_importAction = _vm.ImportAction;
_fileAction = _vm.FileAction;
}
public override void Execute()
{
try
{
var scanResult = _vm.ScanResult;
var processor = _vm.Module.Resolve<Processor>();
var processOptions = new ProcessOptions();
var visitor = _vm.Module.Resolve<GetCeaseCollaborationActionsVisitor>();
visitor.Initialize(_importAction, _fileAction);
scanResult.Apply(visitor);
processOptions.ActionsToApply.AddRange(visitor.Result);
scanResult = processor.Process(scanResult, processOptions);
var adapter = _vm.Module.Resolve<Workshare.Integration.Processor.Changes.ActivityFinder>();
var errors = adapter.GetErrors(scanResult);
if (errors.Any(p => p.Error.IsConnectionError()))
{
throw new OfflineException();
}
else if(errors.Any(p => p.Error.IsConnectionTimeOutError()))
{
throw new OfflineException(RES_INT.STR_CONNECTION_ERROR_TEXT_TIMEOUT, errors.Where(p => p.Error.IsConnectionTimeOutError()).Select(p => p.Error).FirstOrDefault());
}
else
{
if (_vm.CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.RemoteDeleted].Count > 0)
{
List<ItemException> itemExceptions = new List<ItemException>();
_vm.CeaseCollaborationCloseSpaceErrorFiles[CeaseCollaborationCloseSpaceError.RemoteDeleted].ForEach(i =>
{
itemExceptions.Add(new ItemException() { Item = i.LocalFile, Error = new BaseException(RES_CMP.STR_UNABLE_TO_CLOSE_COLLABORATION_SPACE, RES_INT.STR_CLOUDFILENOTFOUND) });
});
_view.ShowErrors(itemExceptions);
}
}
FinalizeCeaseCollaboration();
}
catch (Exception ex)
{
_view.ShowError(ex);
}
}
private void FinalizeCeaseCollaboration()
{
var scanResult = _vm.ScanResult;
DeletedWsFolder = new List<int>();
var fileMaps = scanResult.Maps.OfType<FileMap>();
if (_fileAction == CeaseCollaborationType.Delete && fileMaps.Any())
{
var _wsProxy = _vm.Module.Resolve<Workshare.Integration.WsProxies.WsProxy>();
var _auth = _vm.Module.Resolve<IAuthProvider>();
WsUser user = _auth.GetCurrentWSUser2();
var dmsFolders = _items.Select(f => f.ParentFolder).GroupBy(f => f.DMSId).Select(f => f.First());
foreach (var dmsFolder in dmsFolders)
{
var dmsParent = dmsFolder;
var usingFolder = dmsParent;
var syncInfoService = _vm.Module.Resolve<Workshare.Integration.Processor.Services.SyncInfoService>();
while (dmsParent != null)
{
var syncInfo = syncInfoService.GetSyncInfo(dmsParent);
if (syncInfo == null) break;
var wsFolder = _wsProxy.TryGetFolder(user, syncInfo.ItemId);
if (wsFolder != null && !wsFolder.Files.Any() && FinalizeWsSubFolders(wsFolder.SubFolders))
{
_wsProxy.DeleteFolder(user, wsFolder.Id);
FinalizeLocalSubFolders(dmsParent.SubFolders, usingFolder);
syncInfoService.BreakLink(dmsParent);
}
else break;
usingFolder = dmsParent;
dmsParent = dmsParent.ParentFolder;
}
}
}
}
private bool FinalizeWsSubFolders(IEnumerable<WsFolder> wsSubFolders)
{
var isEmpty = true;
var user = _vm.Module.Resolve<IAuthProvider>().GetCurrentWSUser2();
var wsProxy = _vm.Module.Resolve<WsProxy>();
foreach (var subFolder in wsSubFolders)
{
if (subFolder.Files.Any()) return false;
isEmpty = FinalizeWsSubFolders(subFolder.SubFolders);
if (isEmpty)
{
wsProxy.DeleteFolder(user, subFolder.Id);
DeletedWsFolder.Add(subFolder.Id);
}
}
return isEmpty;
}
private void FinalizeLocalSubFolders(IEnumerable<IDMSFolder> subFolders, IDMSFolder usingFolder)
{
var syncInfoService = _vm.Module.Resolve<Workshare.Integration.Processor.Services.SyncInfoService>();
foreach (var subFolder in subFolders.Where(f => !f.ID.EqualTo(usingFolder.ID)))
{
FinalizeLocalSubFolders(subFolder.SubFolders, usingFolder);
var syncInfo = syncInfoService.GetSyncInfo(subFolder);
if (syncInfo != null && DeletedWsFolder.Contains(syncInfo.ItemId))
{
syncInfoService.BreakLink(subFolder);
}
}
}
}
}
| 56f9f405556eb56398649be31d9e3678585a22f2 | [
"Markdown",
"C#",
"JavaScript"
] | 237 | C# | Szaharov/DMS2CloudIntegration | 6c4ff58f83044b6cc2da95bd2db0134b7fab102d | e110d8773f184a60810c9d302e0d8dcd3d54c246 | |
refs/heads/main | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using TMPro;
public class Events : MonoBehaviour
{
[SerializeField] GameObject[] objectList; // pauseMenu, player1, player2, vicMenuJ1, vicMenuJ2, menu, message, timer
private Player lifePlayer;
private PlayerController playerController;
private BallMovement ballVel;
private TextMeshProUGUI messageText;
private TextMeshProUGUI timerText;
private GameObject[] ball;
public bool isPaused;
public bool isInGame;
private float timers;
public static bool isAI;
private int i = 0;
public void Start() {
BallMovement.isPlaying = false;
BallMovement.isPlayingSimu = false;
InvokeRepeating("EventXSeconds", 15.0f, 15.0f);
Time.timeScale = 0f;
isAI = false;
messageText = objectList[6].GetComponent<TextMeshProUGUI>();
timerText = objectList[7].GetComponent<TextMeshProUGUI>();
}
void Update()
{
ChangeGameMode();
if(Input.GetKeyDown(KeyCode.Escape)) { //check if game is launched to use pause menu
if(isPaused) {
Pause(false, 1f);
} else {
if(isInGame) {
Pause(true, 0f);
}
}
}
if(isInGame) {
timerText = objectList[7].GetComponent<TextMeshProUGUI>();
timers += Time.deltaTime;
int minutes = Mathf.FloorToInt(timers / 60F);
int seconds = Mathf.FloorToInt(timers % 60F);
int milliseconds = Mathf.FloorToInt((timers * 100F) % 100F);
timerText.SetText(minutes.ToString("00") + ":" + seconds.ToString("00") + ":" + milliseconds.ToString("00"));
}
}
public void EventXSeconds() { //all possible events, +- ball velocity, +- players velocity & + life players
if(isInGame == true) {
i++;
int chance = Random.Range(1, 100);
if(chance <= 20) {
messageText.SetText("MALUS : SPEEDBALL");
ChangeBallVelocity(10f);
Invoke("ResetBonusMalus", 10.0f);
} else if(chance > 21 && chance <= 40) { // 20%
messageText.SetText("BONUS : SLOWBALL");
ChangeBallVelocity(1f);
Invoke("ResetBonusMalus", 10.0f);
} else if(chance > 41 && chance <= 60) {
messageText.SetText("BONUS : SPEEDBOOST");
ChangePlayerVelocity(objectList[1], 8f);
ChangePlayerVelocity(objectList[2], 8f);
Invoke("ResetBonusMalus", 10.0f);
} else if(chance > 61 && chance <= 80) {
messageText.SetText("MALUS : SLOWBOOST");
ChangePlayerVelocity(objectList[1], 3f);
ChangePlayerVelocity(objectList[2], 3f);
Invoke("ResetBonusMalus", 10.0f);
} else if(chance > 96 && chance <= 100) { // 5%
messageText.SetText("BONUS : LIFE");
ChangeLifePlayer(objectList[1], 1);
ChangeLifePlayer(objectList[2], 1);
Invoke("ResetBonusMalus", 10.0f);
}
}
}
private void ResetBonusMalus() {
messageText.SetText("");
ChangeBallVelocity(5f);
ChangePlayerVelocity(objectList[1], 5.5f);
ChangePlayerVelocity(objectList[2], 5.5f);
}
private void ChangeBallVelocity(float vel) {
ball = GameObject.FindGameObjectsWithTag("ball");
ballVel = ball[0].GetComponent<BallMovement>();
ballVel.ballSpeed = vel;
}
private void ChangePlayerVelocity(GameObject obj, float vel) {
playerController = obj.GetComponent<PlayerController>();
playerController.playerSpeed = vel;
}
private void ChangeLifePlayer(GameObject obj, int health) {
lifePlayer = obj.GetComponent<Player>();
lifePlayer.life += health;
}
public void Pause(bool b, float t) {
objectList[0].SetActive(b);
Time.timeScale = t;
isPaused = b;
}
public void Restart() { //restart a party
objectList[1].transform.position = new Vector3(-5, 0, 0); //respawn
objectList[2].transform.position = new Vector3(5, 0, 0);
ChangeLifePlayer(objectList[1], 3); //reset lifes
ChangeLifePlayer(objectList[2], 3);
objectList[3].SetActive(false);
objectList[4].SetActive(false);
objectList[5].SetActive(false);
messageText.SetText("");
Time.timeScale = 1f;
isInGame = true;
timers = 0;
i = 0;
ball = GameObject.FindGameObjectsWithTag("ball");
ball[0].transform.position = new Vector3(0, -3, 0);
ballVel = ball[0].GetComponent<BallMovement>();
ballVel.ballSpeed = 5f;
}
public void ChangeGameMode()
{
if (Input.GetKeyDown(KeyCode.F1))
{
if (isAI)
{
isAI = false;
}
else
{
isAI = true;
}
}
}
}
<file_sep># Disc_Battle
Repository for the course Prog video games advanced
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Player : MonoBehaviour
{
public int life;
[SerializeField] GameObject[] objectList; // victoryDeathMenu, Life1, Life2, Life3
[SerializeField] Events ev;
[SerializeField] PlayerController playerC;
void Start() {
this.life = 3;
}
void Update() {
if(life >= 3) {
life = 3;
objectList[1].SetActive(true);
objectList[2].SetActive(true);
objectList[3].SetActive(true);
}
if(life == 2) {
objectList[1].SetActive(false);
}
if(life == 1) {
objectList[2].SetActive(false);
}
if(life <= 0) {
life = 0;
objectList[3].SetActive(false);
Time.timeScale = 0f;
ev.isInGame = false;
objectList[0].SetActive(true);
}
}
void OnCollisionEnter(Collision other) {
if (other.gameObject.CompareTag("ball")) // if ball touch a player, life-- & respawn
{
life--;
if(playerC.isPlayer) {
this.transform.position = new Vector3(-5, 0, 0);
} else {
this.transform.position = new Vector3(5, 0, 0);
}
}
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class BallEvents : MonoBehaviour
{
//Ball value
[SerializeField] private GameObject ball;
public void SpawnBall() {
Instantiate(ball, ball.transform.position, Quaternion.identity);
}
}
<file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerController : MonoBehaviour
{
//Smash values
[SerializeField] private GameObject smashAreaPlayer;
[SerializeField] private Transform smashPointPlayer;
//Movement values
private Vector3 lastDirectionIntent;
private Vector3 lastDirectionIntent2;
public float playerSpeed = 5.5f;
public bool isPlayer;
void Update() {
GetMovement(isPlayer);
SmashDaBall();
lastDirectionIntent2.Normalize();
lastDirectionIntent.Normalize();
}
private void FixedUpdate() {
//Move Player1 or Player2
if(isPlayer) {
gameObject.transform.localPosition += lastDirectionIntent * (Time.deltaTime * playerSpeed);
} else {
gameObject.transform.localPosition += lastDirectionIntent2 * (Time.deltaTime * playerSpeed);
}
}
private void SmashDaBall()
{
if (isPlayer)
{
if (Input.GetKeyDown(KeyCode.Space))
{
Smash();
}
}
else
{
if (!Events.isAI)
{
if (Input.GetKeyDown(KeyCode.Return))
{
Smash();
}
}
}
}
private void GetMovement(bool player) {
if (player) {
if (Input.GetKey(KeyCode.Z)) {
WalkPlayer(player, Vector3.up);
}
if (Input.GetKey(KeyCode.Q)) {
WalkPlayer(player, Vector3.left);
}
if (Input.GetKey(KeyCode.S)) {
WalkPlayer(player, Vector3.down);
}
if (Input.GetKey(KeyCode.D)) {
WalkPlayer(player, Vector3.right);
}
if (!Input.GetKey(KeyCode.Z) && !Input.GetKey(KeyCode.Q) && !Input.GetKey(KeyCode.S) && !Input.GetKey(KeyCode.D)) {
lastDirectionIntent = Vector3.zero;
}
} else {
if (!Events.isAI)
{
if (Input.GetKey(KeyCode.UpArrow)) {
WalkPlayer(player, Vector3.up);
}
if (Input.GetKey(KeyCode.LeftArrow)) {
WalkPlayer(player, Vector3.left);
}
if (Input.GetKey(KeyCode.DownArrow)) {
WalkPlayer(player, Vector3.down);
}
if (Input.GetKey(KeyCode.RightArrow)) {
WalkPlayer(player, Vector3.right);
}
}
if (!Input.GetKey(KeyCode.UpArrow) && !Input.GetKey(KeyCode.LeftArrow) && !Input.GetKey(KeyCode.DownArrow) && !Input.GetKey(KeyCode.RightArrow)) {
lastDirectionIntent2 = Vector3.zero;
}
}
}
public Vector3 WalkPlayer(bool player, Vector3 direction, float timeCall = 0)
{
if (player)
{
if (timeCall == 0)
{
lastDirectionIntent += direction * playerSpeed * Time.deltaTime;
}
else
{
for (int i = 0; i < timeCall; i++)
{
lastDirectionIntent += direction * playerSpeed * Time.deltaTime;
}
}
}
else
{
if (timeCall == 0)
{
lastDirectionIntent2 += direction * playerSpeed * Time.deltaTime;
}
else
{
for (int i = 0; i < timeCall; i++)
{
lastDirectionIntent2 += direction * playerSpeed * Time.deltaTime;
}
}
}
return gameObject.transform.position;
}
public void Smash()
{
//Instantiate prefab to make the ball bounce on it
GameObject smash = Instantiate(smashAreaPlayer, smashPointPlayer.position, Quaternion.identity);
//Destroy it soon after
Destroy(smash, 0.3f);
}
}
<file_sep>using UnityEngine;
using Random = UnityEngine.Random;
public class BallMovement : MonoBehaviour
{
//Ball movement values
public static Vector3 direction;
public static float speedBall;
public float ballSpeed = 5f;
//Game State value (When the ball spawn the game start)
public static bool isPlaying;
public static bool isPlayingSimu;
void Start()
{
//Determine a random direction for the ball to start
direction = new Vector3(Random.Range(-1f, 1f), Random.Range(-1f, 1f), 0);
isPlaying = true;
isPlayingSimu = true;
}
// Update is called once per frame
void Update()
{
speedBall = ballSpeed;
transform.position += direction * (Time.deltaTime * speedBall);
}
public void OnCollisionEnter(Collision other)
{
Bounce(other);
}
public void Bounce(Collision collision)
{
//If the ball collide with a horizontal wall -> invert the y axis value
if (collision.gameObject.CompareTag("horWall"))
{
direction.y *= -1;
}
//If the ball collide with a vertical wall -> invert the x axis value
if (collision.gameObject.CompareTag("verWall"))
{
direction.x *= -1;
}
//Same behaviour as the vertical wall but speed up the ball
if (collision.gameObject.CompareTag("smash"))
{
direction.x *= -1;
ballSpeed += 1f;
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.Serialization;
public class MCTSIA : MonoBehaviour
{
[SerializeField] private PlayerController p1;
[SerializeField] private PlayerController p2;
[SerializeField] private Collider[] wallsCollider;
private GameObject ballPos;
private Collider ballCollider;
private Vector3 direction;
private float speedBall;
//Up, Down, Left, Right
private Vector3[] actionsList = {
new Vector3(0, 5, 0),
new Vector3(0, -5, 0),
new Vector3(-5, 0, 0),
new Vector3(5, 0, 0),
};
private Vector3 p1PosSimuEnemy;
private Vector3 p2PosSimu;
private Vector3 BallPosSimu;
// Start is called before the first frame update
void Start()
{
InitActions();
}
// Update is called once per frame
void Update()
{
if (BallMovement.isPlayingSimu)
{
ballPos = GameObject.FindGameObjectWithTag("ball");
ballCollider = ballPos.GetComponent<Collider>();
Simulate();
Debug.Log(p1PosSimuEnemy);
}
}
private int Simulate()
{
p1PosSimuEnemy += ChooseRandomAction();
p2PosSimu += ChooseRandomAction();
BallPosSimu = BallAction();
Debug.Log(BallPosSimu);
return 0;
}
//Initiate game values
private void InitActions()
{
p1PosSimuEnemy = p1.transform.position;
p2PosSimu = p2.transform.position;
direction = new Vector3(Random.Range(-1f, 1f), Random.Range(-1f, 1f), 0);
speedBall = 5f;
}
//Simulate the ball collision with a wall
private Vector3 CheckBallCollision(Vector3 ballPos)
{
Vector3 newDirection = new Vector3(1, 1, 1);
// In order walls : North, West, South, East
if (ballPos.y + ballCollider.bounds.extents.y + 0.3 >= wallsCollider[0].bounds.min.y)
{
newDirection.y *= -1;
}
if (ballPos.x - ballCollider.bounds.extents.x <= wallsCollider[1].bounds.max.x)
{
newDirection.x *= -1;
}
if (ballPos.y - ballCollider.bounds.extents.y <= wallsCollider[2].bounds.max.y)
{
newDirection.y *= -1;
}
if (ballPos.x + ballCollider.bounds.extents.x + 0.3 >= wallsCollider[3].bounds.min.x)
{
newDirection.x *= -1;
}
return newDirection;
}
//Simulate the ball action
private Vector3 BallAction()
{
Vector3 positionBall = new Vector3();
positionBall += CheckBallCollision(positionBall) * (Time.deltaTime * speedBall);
return positionBall;
}
//TODO Add the smash to possible moves
private Vector3 ChooseRandomAction()
{
Vector3 positionPlayer = new Vector3();
var actionId = Random.Range(0, 4);
//0 = Up, 1 = Down, 2 = Left, 3 = Right
switch (actionId)
{
case 0:
positionPlayer = actionsList[0];
break;
case 1:
positionPlayer = actionsList[1];
break;
case 2:
positionPlayer = actionsList[2];
break;
case 3:
positionPlayer = actionsList[3];
break;
}
return positionPlayer;
}
}
<file_sep>using System;
using System.Collections;
using Microsoft.Win32.SafeHandles;
using UnityEngine;
using Random = UnityEngine.Random;
public class RandomIAInit : MonoBehaviour
{
//Player to be controlled values
[SerializeField] private PlayerController playerAI;
//Random action values
private int randomAction;
private float actionCooldown;
private float randomActionTime;
private float smashCooldown;
private void Update()
{
if (Events.isAI)
{
//Can do an action after the timer is 0
actionCooldown -= Time.deltaTime;
if (actionCooldown <= 0)
{
actionCooldown = 0.001f;
RandomAction();
}
}
}
//Do a random action
private void RandomAction()
{
randomAction = Random.Range(0, 5);
randomActionTime = Random.Range(10, 30);
switch (randomAction)
{
case 0:
playerAI.WalkPlayer(false, Vector3.up, randomActionTime);
break;
case 1:
playerAI.WalkPlayer(false, Vector3.down, randomActionTime);
break;
case 2:
playerAI.WalkPlayer(false, Vector3.left, randomActionTime);
break;
case 3:
playerAI.WalkPlayer(false, Vector3.right, randomActionTime);
break;
case 4:
smashCooldown -= Time.deltaTime;
if (smashCooldown <= 0)
{
smashCooldown = 0.2f;
playerAI.Smash();
}
break;
}
}
}
| 8a25be67e0503b69408492852254f85d6dcda4b2 | [
"Markdown",
"C#"
] | 8 | C# | Talyj/Groupe3_Prog_JV | 1dfab0be92ef530fd8fd0ec11a1348abc9ac11f6 | bbc5773776b42903789c3645d543e9d73b425650 | |
refs/heads/main | <file_sep># springcloud-7
springcloud1.0搭建
主要模块
注册中心:eureka
接口服务:server
web服务:web
网关:zuul
<file_sep>package com.example.wyyserver.apis;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@FeignClient(name = "wyyserver-service", contextId = "TestServicesApis")
public interface TestServicesApis {
@RequestMapping("/test/get")
public String get(@RequestParam("name") String name);
}
| 9a8441df29fd46ccada68c5e66a305fd9fc53044 | [
"Markdown",
"Java"
] | 2 | Markdown | wyychuxia/springcloud-7 | d517ba30d66b9528ecb0633b72244b38e7650b38 | 264a05b42006c9db334312228c6bf1834dccb78a | |
refs/heads/master | <file_sep># occ-template-construction
<file_sep><?php
if ($_POST["submit"]) {
if (!$_POST['name']) {
$error = "<br />Please enter your name";
}
if (!$_POST['email']) {
$error .= "<br />Please enter your email address";
}
if (!$_POST['phone']) {
$error .= "<br /> Please enter your phone number";
}
if (!$_POST['city-name']) {
$error .= "<br /> Please select your city";
}
if ($_POST['email'] != "" and !filter_var(
$_POST['email'],
FILTER_VALIDATE_EMAIL
)) {
$error .= "<br />Please enter a valid email address";
}
if ($error) {
$result = '<div class="alert alert-danger"><strong>There were error(s)
in your form:</strong>' . $error . '</div>';
} else {
/* THE EMAIL WHERE YOU WANT TO RECIEVE THE CONTACT MESSAGES */
if (mail(
"<EMAIL>",
"Message from Hardwood Decking Page",
"Name: " . $_POST['name'] . "
Email: " . $_POST['email'] . "
Phone: " . $_POST['phone'] . "
City: " . $_POST['city-name']
)) {
$result = '<div class="alert alert-success"> <strong> Thank
you!</strong> We\'ll get back to you shortly.</div>';
} else {
$result = '<div class="alert alert-danger">Sorry, there was
an error sending your message. Please try again later.</div>';
}
}
}
?>
| 181baa3194fb970e383752c2aedccc9f2914404a | [
"Markdown",
"PHP"
] | 2 | Markdown | orangeCircleCode/occ-template-construction | 795acab1866711dcaef5e372dec06872a57163bc | ad7ede8f6ada0fc8748b5894138974e4951ba66e | |
refs/heads/master | <file_sep>def my_collect(x)
if block_given?
i =0
new_array = []
while i < x.length
new_array << yield(x[i])
i += 1
end
new_array
end
end
| caea62c8b8d48a56cb9a8ea4ccddde12ffa69a5f | [
"Ruby"
] | 1 | Ruby | hengstrom25/my-collect-v-000 | 952dbbdf762f9bf6eb3239b85cce8023d19df0de | 889580b177cffa9fe868dfd0cfe75d25f4bc61fe | |
refs/heads/master | <file_sep>#!/bin/bash
TYPE_IGNORE=1
TYPE_LINK=2
TYPE_SPECIAL=3
ignore=( el utils backups )
special=( terminalrc sshconfig )
specialpath=( ~/.config/xfce4/terminal/terminalrc ~/.ssh/config )
DIR=~/.dotfiles
cd $DIR
for filename in *
do
installpath=~/.${filename}
filetype=TYPE_LINK
specialindex=0
# check if the file has a special installation path
for specialname in ${special[@]}
do
if [[ $filename == $specialname ]]
then
filetype=TYPE_SPECIAL
installpath=${specialpath[specialindex]}
break
fi
true $((specialindex++))
done
# check if the file should be ignored
if ! [[ $filetype == TYPE_SPECIAL ]]
then
for ignorename in ${ignore[@]}
do
if [[ $filename == $ignorename ]]
then
filetype=TYPE_IGNORE
fi
done
fi
# if you shouldn't ignore, and it's not already linked
if [ $filetype != TYPE_IGNORE -a ! -L $installpath ]
then
# move old versions moved to backup dir
if [ -e $installpath ]
then
echo ${filename} moved to ${DIR}/backups/${filename}
mv $installpath $DIR/backups/$filename
fi
# create the link
echo new link $installpath to ${DIR}/backups/${filename}
ln -s $DIR/$filename $installpath
fi
done
# update the submodules
git submodule update --init
# source bashrc
source ~/.bashrc
# to add new submodules use git submodule add [repo] [path to folder]
<file_sep>#!/bin/bash
# Linux startup script for disabling touchpad accel
# find devices with xinput list
# Allow computer to settle down
sleep 5
xinput set-prop 'SynPS/2 Synaptics TouchPad' 'Device Accel Profile' -1
xinput set-prop 'SynPS/2 Synaptics TouchPad' 'Device Accel Constant Deceleration' 1
## Yoga 2 Pro
xinput set-prop "SynPS/2 Synaptics TouchPad" "Device Accel Velocity Scaling" 8
xinput set-prop "SynPS/2 Synaptics TouchPad" "Synaptics Finger" 35 45 0
xinput set-prop "SynPS/2 Synaptics TouchPad" "Synaptics Coasting Speed" 5 15
xinput set-prop "SynPS/2 Synaptics TouchPad" "Synaptics Tap Time" 120
xinput set-prop "SynPS/2 Synaptics TouchPad" "Synaptics Tap Move" 300
xinput set-prop "SynPS/2 Synaptics TouchPad" "Synaptics Noise Cancellation" 20 20
synclient FingerHigh=46
synclient FingerLow=46
<file_sep>utils
=====
Various scripts for good times.<file_sep>#!/bin/sh
pulseaudio &
echo ****pw**** | sudo -S redshift -l 30.2848:-97.7460 &<file_sep>export JAVA_HOME=/home/kyeh/Programs/jdk1.8.0_40
alias java='/usr/lib/jvm/jdk1.8.0_40/bin/java'
shopt -s nocaseglob # Case-insensitive globbing (pathname expansion)
shopt -s cdspell # Auto-correct when using cd
shopt -s dotglob
shopt -s expand_aliases
shopt -s extglob
# Enable some Bash 4 features when possible:
# * `autocd`, e.g. `**/qux` will enter `./foo/bar/baz/qux`
# * Recursive globbing, e.g. `echo **/*.txt`
for option in autocd globstar; do
shopt -s "$option" 2> /dev/null
done
# Add tab completion for SSH hostnames based on ~/.ssh/config, ignoring wildcards
[ -e "$HOME/.ssh/config" ] && complete -o "default" -o "nospace" -W "$(grep "^Host" ~/.ssh/config | grep -v "[?*]" | cut -d " " -f2 | tr ' ' '\n')" scp sftp ssh
# Better color schemes
export PS1='\[\033[0;35m\]\h\[\033[0;33m\] \W\[\033[00m\]: '
export TERM=xterm-256color
export LSCOLORS="BaGxcxdxCxegedabagacad"
export GREP_OPTIONS='--color=auto -n'
export editor=emacs
export LD_LIBRARY_PATH=:/opt/OGRE-1.8/lib:$HOME/cs/git/Fractal-Evolution/C-Genetics/libs/AntTweakBar/lib
export PKG_CONFIG_PATH=:/opt/OGRE-1.8/lib/pkgconfig
export PATH=$PATH:$HOME/bin:$HOME/Programs/spark-1.2.0-bin-hadoop2.4/bin:$HOME/.rvm/bin:$HOME/bin/gibo:$HOME/Programs/android-studio/bin:$Home/Programs/genymotion:$Home/Programs/spark-1.2.0-bin-hadoop2.4/bin:/$HOME/Programs/idea-IC-139.224.1/bin
export GOPATH=$HOME/gocode
export SPARK_HOME=$HOME/Programs/spark-1.2.0-bin-hadoop2.4
export GEOTRELLIS_HOME=$HOME/cs/git/geotrellis/spark/target/scala-2.10
# Add RVM (Ruby Virt Machine) to PATH for scripting
# Add gibo path
# Add Android-Studio, Genymotion to path
alias ls='ls --group-directories-first --time-style=+"[%m/%d/%Y %H:%M]" --color'
alias la='ls -a --group-directories-first --time-style=+"[%m/%d/%Y %H:%M]" --color'
alias ll='ls -lh --group-directories-first --time-style=+"[%m/%d/%Y %H:%M]" --color'
alias lla='ls -alh --group-directories-first --time-style=+"[%m/%d/%Y %H:%M]" --color'
alias lal='lla'
alias emacs='emacs -nw'
alias ..='cd ..'
alias ...='cd ../..'
alias ....='cd ../../..'
alias pwd="pwda"
alias psa="ps aux"
alias fgls='jobs'
alias fgka='for x in `jobs -p`; do kill -9 $x; done' # KILL ALL JOBS!!!!
alias xcopy='xclip -sel clip < '
alias df='df -h' # human-readable filesizes
alias free='free -m' # MB filesies
#######################
# _________________
# (__) / \
# (uu) | Keep on moving... |
# /-------\/ --'\_________________/
# / | ||
# * ||----||
# ^^ ^^
#######################
alias yay="echo 'yayyyyyyyy :)'"
alias yayy="echo 'yayyyyyyyyyyyyyy :)'"
alias yayyy="echo 'yayyyyyyyyyyyyyyyyyyyyyy :)'"
alias yayyyy="echo 'yayyyyyyyyyyyyyyyyyyyyyyyy!!!!!!!! :)'"
alias yayyyyy="echo 'yayyyyyyyyyyyyyyyyyyyyyyyyyyyy!!!!!!!!! :)'"
alias yayyyyyy="echo 'YAYYYYYYYYYYYYYYYYYYYYYYYYYYYY!!!!!!!!!!!! :)'"
#######################
### Deb-based Linux ###
#######################
alias apt-get='sudo apt-get'
alias apt-install='apt-get install'
alias apt-update='apt-get update'
alias apt-remove='apt-get --purge remove'
# apt-find is defined as a function below
alias apt-installed="dpkg -l | grep" # Find installed packages relating to query
alias apt-files="dpkg -L" # Find files provided by installed package
alias apt-ownedby="dpkg -S" # Find the package that owns the given file
########################
### Arch-based Linux ###
########################
alias pacs="sudo pacman -S" # Install
alias pacsy="sudo pacman -Sy" # Install with fresh database
alias pacsyu="sudo pacman -Syu" # Update / Upgrade
alias pacrm="sudo pacman -R" # Remove
alias pacf="sudo pacman -Ss" # Search for package
alias pac-get="pacsy"
alias pac-update="pacsyu"
alias pac-remove="pacrm"
alias pac-find="pacf"
alias ya="yaourt -Sy" # Install AUR package with fresh database
alias yas="yaourt -Ss" # Search AUR packages
############################
# Directory History
DIRSTACKSIZE=8
alias dirs='dirs -v'
alias dh='dirs -v'
alias bd='cd $OLDPWD'
alias back='bd'
alias push='pushd'
alias pop='popd'
function cd() {
if [ $# -eq 0 ]; then
DIR="${HOME}"
else
DIR="$1"
fi
builtin pushd "${DIR}" > /dev/null
}
shopt -s cmdhist # Combine multiline commands into one in history
shopt -s histappend
shopt -s hostcomplete
export HISTSIZE=10000
export HISTCONTROL=ignoredups # Ignore dups, bare ls and builtin cmds
export HISTCONTROL=ignoreboth
export HISTIGNORE="&:ls:[bf]g:exit"
alias h="history | grep" # Run desired command with !<history num>
alias f="find . | grep"
alias dopen='xdg-open'
alias odoc='dopen'
alias gitlog="git log --graph --pretty=format:'%C(yellow)%h%Creset -%Cred%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit --date=relative"
alias gitrma="!sh -c \"git diff -z --name-only --diff-filter=D | xargs -0 git rm\""
alias gitunstage="git reset HEAD"
alias gitinfo="git remote show origin"
alias gitbranch="git branch 2> /dev/null | sed -e \"/^[^*]/d\" -e \"s/* \(.*\)/(git:\1)/\""
alias git-forcetohead="git fetch --all; git reset --hard origin/master"
alias git-unchanged="git update-index --assume-unchanged"
# Allow tab completion to propagate through sudo commands
complete -cf sudo
#################
### FUNCTIONS ###
#################
# Trim animations. If each frame occupies a different-sized portion, this will ruin offsets.
trimanim () {
bgcolor="null"
if [[ -z "$3" ]]; then
bgcolor=${3}
fi
if [ $# -gt 1 ]; then
convert $1 -trim +repage -layers TrimBounds -set dispose background -coalesce -scene 1 $2
if [ bgcolor != "null" ]; then
convert $2 -alpha off -background ${bgcolor} $2
fi
fi
}
# kill job by job #
fgkill () {
jobfindcmd="sed -n -e "$1"p"
echo $jobfindcmd
job=`jobs -p | ${jobfindcmd}`
kill -9 $job
}
# open nautilus in given dir (default working dir)
opendir () {
if [ $# -gt 0 ]; then
for dir in $@
do
if [ -d "$dir" ]; then
thunar $dir >/dev/null 2>/dev/null &
# nautilus $dir >/dev/null 2>/dev/null & # GNOME default file manager
fi
done
else
thunar "$PWD" & > /dev/null
# nautilus "$PWD" & > /dev/null
fi
}
# show diff from nth last HEAD
gitdiff () {
distance=0
if [ $# -gt 0 ]; then
distance=$1
fi
git diff HEAD~$distance
}
# find and delete all matching regex
nuke () {
if [ $# -gt 0 ]; then
for reg in $@
do
find -name "$reg" -exec rm -f {} +
done
fi
}
# pwd enhanced function to show linked vs. physical dir differences.
pwda () {
link=$(\pwd -L)
physical=$(\pwd -P)
if [ $link == $physical ]; then
echo ${link}
else
echo "Linked: "${link}
echo "Physical: "${physical}
fi
}
# up - goes up n directory levels
up(){
local d=""
limit=$1
for ((i=1 ; i <= limit ; i++))
do
d=$d/..
done
d=$(echo $d | sed 's/^\///')
if [ -z "$d" ]; then
d=..
fi
cd $d
}
# Make all lines in a file unique
mkuniq () {
if [ $# -eq 0 ]; then
echo "Please provide a file to uniqify."
else
for var in $@
do
if [ -f $var ]; then
perl -ne 'print unless $seen{$_}++' $1 >> $var.tmpmkuniq
rm $var
cat $var.tmpmkuniq >> $var
rm $var.tmpmkuniq
else
echo $var is not a file.
fi
done
fi
}
# Create .gitignore file for given languages
mkgibo () {
for lang in $@
do
gibo $lang TextMate >> .gitignore
done
echo "*~" >> .gitignore
echo "*#" >> .gitignore
mkuniq .gitignore
}
# Self-explanatory, dude
flac2mp3 () {
find "$1" -type f -name "*.flac" -print0 | while read -d $'\0' song
do
output=${song%.flac}.mp3
avconv -i "$song" -metadata album="$1" -b 192k "$output"
done
}
convert-anim-skip () {
# This script will take an animated GIF and delete every other frame
# Accepts three parameters: skip step, input file and output file
# Usage: convert-anim-skip <skipStep> input.gif output.gif
# To check current delay, run identify -verbose output.gif | grep Delay
if [ $# -eq 3 ]; then
cp $2 $3
numframes=`convert $3 -format "%[scenes]" info: | tail -n 1`
# delete frames
gifsicle "$2" --unoptimize $(seq -f "#%g" 0 $1 $numframes) -O2 -o "$3"
fi
}
# All-in-one extractor
extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2)tar xvjf $1 && cd $(basename "$1" .tar.bz2) ;;
*.tar.gz)tar xvzf $1 && cd $(basename "$1" .tar.gz) ;;
*.tar.xz)tar Jxvf $1 && cd $(basename "$1" .tar.xz) ;;
*.bz2)bunzip2 $1 && cd $(basename "$1" /bz2) ;;
*.rar)unrar x $1 && cd $(basename "$1" .rar) ;;
*.gz)gunzip $1 && cd $(basename "$1" .gz) ;;
*.tar)tar xvf $1 && cd $(basename "$1" .tar) ;;
*.tbz2)tar xvjf $1 && cd $(basename "$1" .tbz2) ;;
*.tgz)tar xvzf $1 && cd $(basename "$1" .tgz) ;;
*.zip)unzip $1 && cd $(basename "$1" .zip) ;;
*.Z)uncompress $1 && cd $(basename "$1" .Z) ;;
*.7z)7z x $1 && cd $(basename "$1" .7z) ;;
*)echo "don't know how to extract '$1'..." ;;
esac
else
echo "'$1' is not a valid file!"
fi
}
# Run command detached from terminal
detach () {
if [ $# -gt 0 ]; then
$@ </dev/null &>/dev/null &
fi
}
# Mega-fancy package-finder
apt-find () {
tmp_list=/tmp/apt_tmp_list.txt
tmp_alist=/tmp/apt_tmp_alist.txt
if [ -w ${tmp_list} ]; then
rm -f ${tmp_list}
fi
if [ -w ${tmp_alist} ]; then
rm -f ${tmp_alist}
fi
if [ -z ${1} ]; then
echo "Please provide a package to search for."
return
fi
for i in `apt-cache search ${1} | awk -F " - " '{ print $1 }'`
do
list=("${list[@]}" "${i}")
done
dpkg-query -W -f='${Package}\t${Version}\n${Description}\n\n' ${list[@]} >${tmp_list} 2>${tmp_alist}
clear
echo "############# Installed #################"
echo
grep -v "^ " ${tmp_list} | awk -F: '{printf "\033[1;32m"$1"\033[0m: "$2"\n"}'
echo
echo "############# Available #################"
echo
cat ${tmp_alist} | sed "s/dpkg-query:\ no\ packages\ found\ matching\ //g" | grep -v "No packages" | awk -F: '{printf "\033[1;31m"$1"\033[0m: "$2"\n"}'
echo
}
export -f fgkill
export -f opendir
export -f gitdiff
export -f nuke
export -f pwda
export -f up
export -f mkuniq
export -f mkgibo
export -f flac2mp3
export -f extract
export -f apt-find
export -f convert-anim-skip
export -f cd
export -f detach
### Added by the Heroku Toolbelt
export PATH="/usr/local/heroku/bin:$PATH"
<file_sep>Note: minor changes have been made to solarized-definitions.el, specifically:
(base03 "#141414" "#141414" "#141414" "brightblack" "black") ;; Darker background
(blue "#268bd2" "#2075c7" "#0087ff" "pink" "pink") ;; 8, 16-bit is now pink<file_sep>ssh-keygen -t rsa -C "<EMAIL>"
ssh-add id_rsa
# Afterwards, copy id_rsa.pub contents and add to your github account.
# Test with ssh -T [email protected]
<file_sep>sudo apt-get install pip git subversion emacs-24.3 redshift imagemagick ruby rubygems perl node libglu1-mesa-dev freeglut3-dev mesa-common-dev libglew-dev libpng-dev xclip haskell-platform dropbox autojump #multi-pipelight rails
sudo pip howdoi
sudo gem install bropages
sudo npm -g install jade html2jade
# Android Studio / GenyMotion
# intellij
# CCCP or Similar Codec Pack
# gibo
# jdk (1.7+)
# rust
# golang
<file_sep>export PS1='\[\033[0;35m\]\h\[\033[0;33m\] \W\[\033[00m\]: '
export LSCOLORS="BaGxcxdxCxegedabagacad"
alias ls='ls --color'
export editor=emacs
alias emacs='emacs -nw'
alias ..='cd ..'
alias ...='cd ...'
alias fgls='jobs'
alias gitlog="log --graph --pretty=format:'%C(yellow)%h%Creset -%Cred%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit --date=relative"
alias gitrma="!sh -c \"git diff -z --name-only --diff-filter=D | xargs -0 git rm\""
alias gitunstage="git reset HEAD"
PATH=$PATH:$HOME/.rvm/bin:$HOME/UserProgs/android-studio/bin:$Home/UserProgs/genymotion
# Add RVM (Ruby Virt Machine) to PATH for scripting
# Add Android-Studio, Genymotion to path
extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2)tar xvjf $1 && cd $(basename "$1" .tar.bz2) ;;
*.tar.gz)tar xvzf $1 && cd $(basename "$1" .tar.gz) ;;
*.tar.xz)tar Jxvf $1 && cd $(basename "$1" .tar.xz) ;;
*.bz2)bunzip2 $1 && cd $(basename "$1" /bz2) ;;
*.rar)unrar x $1 && cd $(basename "$1" .rar) ;;
*.gz)gunzip $1 && cd $(basename "$1" .gz) ;;
*.tar)tar xvf $1 && cd $(basename "$1" .tar) ;;
*.tbz2)tar xvjf $1 && cd $(basename "$1" .tbz2) ;;
*.tgz)tar xvzf $1 && cd $(basename "$1" .tgz) ;;
*.zip)unzip $1 && cd $(basename "$1" .zip) ;;
*.Z)uncompress $1 && cd $(basename "$1" .Z) ;;
*.7z)7z x $1 && cd $(basename "$1" .7z) ;;
*)echo "don't know how to extract '$1'..." ;;
esac
else
echo "'$1' is not a valid file!"
fi
}
export -f extract<file_sep>#!/bin/bash
# linux to windows
#sed -i 's/\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/media\/kyeh\/FCA08950A0891276/C:/g' *.m3u
# windows to linux
#sed -i 's/C:/\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/\.\.\/media\/kyeh\/FCA08950A0891276/g' *.m3u
# itunes removes periods from filenames, so fix that.
# itunes and banshee hate each other, so I recommend
# 1) Open itunes and allow it to delete periods by re-adding all songs and deleting dups
# 2) Use the following sed command to fix the playlists
# 3) clear banshee and re-add all files.
# 4) Make sure banshee doesn't start adding hazardous periods in filenames.
sed -i 's/\([0-9]\{2\}\)\.\ /\1\ /g' *.m3u
| f59bdf570eeda7e21676dd562e73fc64bba828e0 | [
"Markdown",
"Shell"
] | 10 | Shell | kyeah/.dotfiles | 834e4808008914aa83c106d8204f83b85ad7b0c5 | 0148d13b4d7ad73f24572e6c50f87189df7b515f | |
refs/heads/master | <repo_name>kornik1977/ProgrammingAssignment2<file_sep>/cachematrix.R
## Put comments here that give an overall description of what your
## functions do
#makeCacheMatrix() function stores getters and setters (as a list) of a "matrix" object"
#...so that its inverse can be cached
#cacheSolve() function calculates and returns inverse of a matrix created by first function and stores it in a cache
#... or gets it from a cache if inverse was already calculated and stored
## Write a short comment describing this function
#1. The function takes one default argument of class matrix
#2. variable i = NULL (empty variable) is defined
#...(it will show if the right inverse is stored in cache or not)
#3. there are four nested functions in the list:
#a. set(y) sets new value of a function argument, and resets the "i" value
#... << symbol chnges initialized values in the parent environment
#b. get() gets us the matrix which was set just before
#c. setinverse() calculates inverse of that matrix
#d. and getinverse() returns the value calculated inverse
makeCacheMatrix <- function(x = matrix()) {
i <- NULL
set <- function(y) {
x <<- y
i <<- NULL
}
get <- function() x
setInverse <- function(inverse) i <<- inverse
getInverse <- function() i
list(set=set, get=get, setInverse=setInverse, getInverse=getInverse)
}
## Write a short comment describing this function
#1. looks into cache if inverse (i) of a matrix created by makeCacheMatrix() is already calculated
#a. if (!is.null(i)) it writes a messege and returns value of i from cache
#b. (else) creates a new inverse for the matrix (using solve() function) and returns it
cacheSolve <- function(x, ...) {
i <- x$getInverse()
if(!is.null(i)) {
message("getting cached data")
return(i)
}
data <- x$get()
i <- solve(data,...)
x$setInverse(i)
i
}
| 6248b7721af551e1709e0fd48803e9d6bd86912e | [
"R"
] | 1 | R | kornik1977/ProgrammingAssignment2 | c017fad936fa05a67d0ddd16765e86e8d2b55474 | 8f7b541c03be1c3f20722f5c52be9c151767340b | |
refs/heads/master | <file_sep># ChordPro2HTML
## Basic Usage
convert chordpro lyrics into HTML-based songsheets.
## Syntax for writing songsheets
Example: [C]Twinkle, twinkle [F]little [C]star.
Translates to:
C F C
Twinkle, twinkle little star.
```python
from chordpro2html import Parser as p
p.to_html("""[C]Twinkle, twinkle [F]little [C]star.
[F]How I [C]wonder [G7]what you [C]are.
[C]Up [F]above the [C]world so [G7]high,
[C]Like a [F]diamond [C]in the [G7]sky.
[C]Twinkle, twinkle [F]little [C]star.
[F]How I [C]wonder [G7]what you [C]are.""")
>>> HTML
```
## Limitations
Currently only the chord notation is supported when importing ChordPro text. It is advisable to remove any other directives before processing with ChordPro2HTML.
## Planned
- export as HTML file
- export as PDF file
## Motivation
I'm planning on creating a public website where users can submit their own songsheets and add various translations. Why? Because no good website exists for songsheets for J-POP/K-POP songs that support smooth transition between hiragana, kanji, romaji and hangui. PowerChord2HTML is the base parser for needed for that.<file_sep>TWINKLE = """[C]Twinkle, twinkle [F]little [C]star.
[F]How I [C]wonder [G7]what you [C]are.
[C]Up [F]above the [C]world so [G7]high,
[C]Like a [F]diamond [C]in the [G7]sky.
[C]Twinkle, twinkle [F]little [C]star.
[F]How I [C]wonder [G7]what you [C]are.
"""
LEAVES = """The falling [Gm7 C7]leaves
Drift by the [Fmaj7 Bbmaj7]window
The autumn [Em7b5 A7]leaves
Of red and [Dm7 Dm6]gold
I see your [Gm7 C7]lips
The summer [Fmaj7 Bbmaj7]kisses
The sunburned [Em7b5 A7]hands
I used to [Dm7 Dm6]hold
Since you [Em7b5]went [A7]away
The days grow [Dm7 Dm6]long
And soon I'll [Gm7 C7]hear
Old winter's [Fmaj7 Bbmaj7]song
But I [Em7b5]miss you most of [A7]all my [Dm7]darling
When [Em7b5]autumn [A7]leaves
Start to [Dm7]fall
"""
TENSHI = """
shiroi [Ab]hane ga hora [Bb]mieru desho?
[Gm]anata wo mitsu[Cm]mete'ru me [Ab]mo
amai koe mo yasashii te [Bb]mo [Eb Eb Eb Eb]
kimi ga [Ab]ai de [Gm]nayamu [Fm]koto ga aru [Eb]nara
[Ab]Pinchi [Gm]sain [Fm]dashite [Bb]matte mite [Eb]yo
koi wa [Ab]DOKI DOKI [Bb]suru kedo
ai ga [Gm]LOVE LOVE [Cm]suru nara
motto [Ab]GAN GAN [Bb]ikou yo
kitto [Gm]CHANSU wa [Cm]aru kara
ai [Ab]no tenshi [Bb]wa hohoende'ru [Eb Eb Eb Eb]yo
hon no sukoshi yuuki dashitara
anata wa kawareru hazu yo
jishin motte ganbatte yo
tsurai koto datte norikoete
anata ni wa dekiru hazu yo
hitori ja nai no yo
hito to aishiaeru koto ga dekitara
sore wa kitto tsuyoku nareru koto yo
koi ni UKI UKI shite'ru ne?
HATO RUN RUN shite'ru yo
anata KIRA KIRA shite kita
kare ni ATAKKU shiyou yo
ai no tenshi mo mimamotte'ru yo
kimi ga ai de nayamu koto ga aru nara
Pinchi sain dashite matte mite yo
koi wa DOKI DOKI suru kedo
ai ga LOVE LOVE suru nara
motto GAN GAN ikou yo
kitto CHANSU wa aru kara
ai no tenshi wa hohoende'ru yo
"""<file_sep>class Parser:
def __init__(self):
pass
def to_html(self, input):
# TODO: handle songs that start with lyrics but no chords (e. g. "I love you. do you love me to?[Am7]")
lines = input.splitlines()
result = ''
for line in lines:
n = 2
tokens = line.split('[')
pairs = []
for token in tokens:
temp = token.split(']')
pairs.append(tuple(temp))
print(pairs[1:])
html = ''
html += '<ruby>'
for pair in pairs[1:]:
html += f'<ruby style="margin-right: 4px;">{pair[1][:1]}<rt>{pair[0]}</rt>{pair[1][1:]}</ruby>'
html += '<br>'
result += (html)
print(result)
return result
# <ruby>FIRST CHARACTER<rt>CHORD</rt>REST OF TOKEN
if __name__ == "__main__":
parser = Parser()
parser.to_html('[G7]hello [C]world. [G7]hello [C]world.')<file_sep>from flask import Flask
import sys
sys.path.append('../chordpro2html')
from chordpro2html import Parser
from songs import TWINKLE, LEAVES, TENSHI
p = Parser()
song = TWINKLE
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return p.to_html(LEAVES) + p.to_html(TWINKLE) + p.to_html(TENSHI)
if __name__ == '__main__':
app.run(debug=True)<file_sep>from setuptools import setup
with open('README.md', 'r') as file:
long_description = file.read()
setup(name='ChordPro2HTML',
version='0.1.0',
description='Add chords to lyrics and export an HTML document out of it!',
url='https://github.com/Zenahr/ChordPro2HTML',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['chordpro2html'],
keywords=['chordpro',
'lyrics',
'chords',
'music'
],
python_requires='>=3.8.6',
py_modules=["quicksample"],
install_requires=[],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.8',
'Topic :: Text Processing',
'Topic :: Utilities'
],
long_description=long_description,
long_description_content_type='text/markdown',
zip_safe=False)
<file_sep>import sys
sys.path.append('../chordpro2html')
from chordpro2html import Parser
from songs import TWINKLE
p = Parser()
html = p.to_html()
print(html) | f675d8b548a4eaa6f1b4f7695bed587e8198287b | [
"Markdown",
"Python"
] | 6 | Markdown | Zenahr/ChordPro2HTML | 432ed0bd67c562074824165cdabe7301c347837e | e09d6354417ea331c3ef76c7c40c2c9478cf6231 | |
refs/heads/master | <repo_name>patriciallull/Flask_NLP<file_sep>/app.py
import flask
import sys
import argparse
import sklearn
from flask import Flask, request, abort
from ie_nlp_utils.tokenisation import tokenise as tokenise_sentence
app = Flask(__name__)
# FOURTH FUNCTION - using the tokenise function that was created for the exercise
# @app.route("/tokenise")
# def tokenise():
# args = dict(request.args) # store the Arguments
# if sentence := args.get("sentence"):
# return {
# "tokens": tokenise_sentence(sentence),
# "sentence": sentence,
# }
# else:
# abort(400)
# THIRD function - example:
@app.route("/greet/<name>")
def greet(name):
return f"Howdy, {name}!"
# SECOND FUNCTION - return dict; listens on the /api url
@app.route("/api/<int:version>")
def api(version):
args = dict(request.args)
return {
"python-version": sys.version[0:5],
"status": "OK",
"name": args.get("name", "<NOT GIVEN>"),
"version": version,
"scikit-learn-version": sklearn.__version__,
"flask-version": flask.__version__,
}
# FIRST FUNCTION - returns text
@app.route("/")
def hello():
args = dict(request.args)
if name := args.get("name"):
return f"Hello, {name}!"
# to introduce an error for the debugger: 'name_' instead of 'name'
else:
return f"Hello World!"
# to run from command line & add a parameter to turn debugger on:
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Sample Flask application")
parser.add_argument("-v", "--debug", action="store_true", help="Enable debug mode")
args = parser.parse_args()
app.run(debug=args.debug)
| ac465ed9cad2f86712ffe76af666f145da9cc440 | [
"Python"
] | 1 | Python | patriciallull/Flask_NLP | c38149b74113ab3610a6c7067bda4c47a33f9c28 | 69f794d65564b571eeb2b3f6d2e10df5d42c729c | |
refs/heads/main | <file_sep>/* =======================================================================================
Template : Agroly
Create : Sept. 5th 2020
========================================================================================== */
/* ===============================================
Function Call - Call Function Ones
=============================================== */
jQuery(document).ready(function () {
"use strict";
// here all ready functions
loader();
scroll_top();
magnific_popup();
accordion();
});
/* ===============================================
1. PRELOADER
=============================================== */
function loader() {
"use strict";
setTimeout(function () {
$('#loader-wrapper').fadeOut();
}, 1500);
};
/* ===============================================
2. SCROLL TOP
=============================================== */
function scroll_top() {
"use strict";
var offset = 300,
offset_opacity = 1200,
scroll_top_duration = 700,
$back_to_top = $('.cd-top');
$(window).scroll(function () {
($(this).scrollTop() > offset) ? $back_to_top.addClass('cd-is-visible'): $back_to_top.removeClass('cd-is-visible cd-fade-out');
if ($(this).scrollTop() > offset_opacity) {
$back_to_top.addClass('cd-fade-out');
}
});
$back_to_top.on('click', function (event) {
event.preventDefault();
$('body,html').animate({
scrollTop: 0,
}, scroll_top_duration);
});
};
/* ===============================================
3. COUNTER
=============================================== */
$('.counter').each(function () {
var $this = $(this),
countTo = $this.attr('data-count');
$({
countNum: $this.text()
}).animate({
countNum: countTo
},
{
duration: 8000,
easing: 'linear',
step: function () {
$this.text(Math.floor(this.countNum));
},
complete: function () {
$this.text(this.countNum);
//alert('finished');
}
});
});
/* ===============================================
4. MAGNIFIC POPUP GALLERY
=============================================== */
function magnific_popup() {
$('.image-popup-vertical-fit').magnificPopup({
type: 'image',
mainClass: 'mfp-with-zoom',
gallery: {
enabled: true
},
zoom: {
enabled: true,
duration: 300, // duration of the effect, in milliseconds
easing: 'ease-in-out', // CSS transition easing function
opener: function (openerElement) {
return openerElement.is('img') ? openerElement : openerElement.find('img');
}
}
});
};
/* ===============================================
5. YOUTUBE POPUP
=============================================== */
function video_popup() {
var $btnLoadMore = $(
'<div class="btn-wrapper text-center"><a href="#" class="btn load-more">Load More</a></div>'
);
var items = $(".youtube-popup[data-listnum]");
var count = items.length;
var slice = 2;
var current = 0;
if (items.length > slice) {
//bind load more event
$btnLoadMore.on("click", function (e) {
e.preventDefault();
loadMoreNews();
});
//append load more button
items.closest(".salvattore-grid").after($btnLoadMore);
}
function getItem(listnum) {
return items
.filter(function (index) {
if ($(this).attr("data-listnum") == listnum) {
return true;
}
});
}
function loadMoreNews() {
var end = current + slice;
if (end >= count) {
end = count;
$btnLoadMore.hide();
}
while (current < end) {
var listnum = current + 1; //data-listnum : 1-based
var item = getItem(listnum);
if (item) {
item.fadeIn();
}
current++;
}
}
//youtube popup
$(".popup-youtube").magnificPopup({
type: "iframe",
removalDelay: 160,
preloader: false,
fixedContentPos: false,
iframe: {
markup: '<div class="mfp-iframe-scaler">' +
'<div class="mfp-close"></div>' +
'<iframe class="mfp-iframe" frameborder="0" allowfullscreen></iframe>' +
"</div>",
patterns: {
youtube: {
index: "youtube.com/",
id: "v=",
src: "//www.youtube.com/embed/%id%?autoplay=1&rel=0&showinfo=0"
}
},
srcAction: "iframe_src"
}
});
//init load
loadMoreNews();
};
/* ===============================================
6. FILTER GALLERY
=============================================== */
$(function () {
var $margin = $("#kehl-grid").isotope({
itemSelector: ".grid-box",
// Different transition duration
transitionDuration: "0.5s"
});
// on filter button click
$(".filter-container li").click(function (e) {
var $this = $(this);
// Prevent default behaviour
e.preventDefault();
$('.filter li').removeClass('active');
$this.addClass('active');
// Get the filter data attribute from the button
var $filter = $this.attr("data-filter");
// filter
$margin.isotope({
filter: $filter
});
});
});
/* ===============================================
7. MASONRY GALLERY
=============================================== */
var $grid = $('.grid').imagesLoaded( function() {
$grid.masonry({
itemSelector: '.grid-box',
percentPosition: true,
columnWidth: '.grid-sizer'
});
});
/* ===============================================
8. FAQ ACCORDION
=============================================== */
function accordion() {};
$('.accordion > li:eq(0) a').addClass('active').next().slideDown();
$('.accordion a').click(function (j) {
var dropDown = $(this).closest('li').find('p');
$(this).closest('.accordion').find('p').not(dropDown).slideUp();
if ($(this).hasClass('active')) {
$(this).removeClass('active');
} else {
$(this).closest('.accordion').find('a.active').removeClass('active');
$(this).addClass('active');
}
dropDown.stop(false, true).slideToggle();
j.preventDefault();
});
(jQuery)
/* ===============================================
9. ANIMATION
=============================================== */
AOS.init({
duration: 1200,
})
/* ===============================================
10. VIDEO POPUP
=============================================== */
$('.popup-youtube, .popup-vimeo').magnificPopup({
type: 'iframe',
disableOn: 700,
type: 'iframe',
mainClass: 'mfp-fade',
removalDelay: 160,
preloader: false,
fixedContentPos: false,
markup: '<div class="mfp-iframe-scaler">' +
'<div class="mfp-close"></div>' +
'<iframe class="mfp-iframe" frameborder="0" allowfullscreen></iframe>' +
'</div>', // HTML markup of popup, `mfp-close` will be replaced by the close button
iframe: {
patterns: {
youtube: {
index: 'youtube.com/',
id: 'v=',
src: 'https://www.youtube.com/embed/%id%?autoplay=1'
}
}
}
});
/* ===============================================
11. Imager hover Movement
=============================================== */
$(document).on('mousemove', function (e) {
$('.light').css({
left: e.pageX - 300,
top: e.pageY - 300 });
});
var el = $('.js-tilt-container');
el.on('mousemove', function (e) {var _$$offset =
$(this).offset(),left = _$$offset.left,top = _$$offset.top;
var cursPosX = e.pageX - left;
var cursPosY = e.pageY - top;
var cursFromCenterX = $(this).width() / 2 - cursPosX;
var cursFromCenterY = $(this).height() / 2 - cursPosY;
$(this).css('transform', 'perspective(500px) rotateX(' + cursFromCenterY / 40 + 'deg) rotateY(' + -(cursFromCenterX / 40) + 'deg) translateZ(10px)');
var invertedX = Math.sign(cursFromCenterX) > 0 ? -Math.abs(cursFromCenterX) : Math.abs(cursFromCenterX);
//Parallax transform on image
$(this).find('.js-perspective-neg').css('transform', 'translateY(' + cursFromCenterY / 10 + 'px) translateX(' + -(invertedX / 10) + 'px) scale(1.15)');
$(this).removeClass('leave');
});
el.on('mouseleave', function () {
$(this).addClass('leave');
});
/* ===============================================
12. NEWS CAROUSEL
=============================================== */
$('.news-carousel .owl-carousel').owlCarousel({
stagePadding: 0,
autoplay:true,
autoplayTimeout: 2500,
loop: true,
dots: false,
margin: 10,
nav: true,
navText: [
'<i class="fa fa-angle-left" aria-hidden="true"></i>',
'<i class="fa fa-angle-right" aria-hidden="true"></i>'
],
navContainer: '.news-carousel .custom-nav',
responsive: {
0: {
items: 1
},
700: {
items: 2
},
1050: {
items: 3
}
}
});
/* ===============================================
13. NAVBAR
=============================================== */
$(() => {
//On Scroll Functionality
$(window).scroll(() => {
var windowTop = $(window).scrollTop();
windowTop > 100 ? $('.navbar').addClass('navShadow') : $('.navbar').removeClass(
'navShadow');
windowTop > 100 ? $('ul').css('top', '100px') : $('ul').css('top', '160px');
});
//Click Logo To Scroll To Top
$('#logo').on('click', () => {
$('html,body').animate({
scrollTop: 0
}, 500);
});
//Smooth Scrolling Using Navigation Menu
$('a[href*="#"]').on('click', function (e) {
$('html,body').animate({
scrollTop: $($(this).attr('href')).offset().top - 100
}, 500);
e.preventDefault();
});
//Toggle Menu
$('#menu-toggle').on('click', () => {
$('#menu-toggle').toggleClass('closeMenu');
$('ul').toggleClass('showMenu');
$('li').on('click', () => {
$('ul').removeClass('showMenu');
$('#menu-toggle').removeClass('closeMenu');
});
});
});
/* ===============================================
14. TYPING
=============================================== */
let wrapper;
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms));
async function writingAll(stringTarget, container) {
wrapper = document.querySelector('[' + container + ']');
const stringsContainer = document.getElementsByClassName(stringTarget);
while (wrapper) {
for (i = 0; i < stringsContainer.length; i++) {
const string = stringsContainer[i].textContent;
await write(string);
await sleep(1000);
await erase();
await sleep(1000);
};
}
};
async function write(text) {
let index = 0;
while (index < text.length) {
const timeout = 100;
await sleep(timeout);
index++;
wrapper.innerHTML = text.substring(0, index);
}
};
async function erase() {
while (wrapper.textContent.length) {
const timeout = 100;
await sleep(timeout);
wrapper.textContent = wrapper.textContent.substring(0, wrapper.textContent.length - 2);
}
};
writingAll('text-layer', 'data-text');
/* ===============================================
15. SKILLS
=============================================== */
"use strict"; // Start of use strict
$('.circular-progress').each(function () {
var Self = $(this);
var getID = Self.attr('id');
const QUARTER_R = Math.PI / 2;
const otherBall = document.getElementById(getID + '-other-ball');
const containerWidth = document.getElementById(getID).offsetWidth;
const strokeWidth = 3;
const strokeColor = '#e6a100';
const lineWidthInPixels = strokeWidth / 100 * containerWidth;
// Radius is from circle's exact center to the middle of the line
const radius = (containerWidth - lineWidthInPixels) / 2
const center = (containerWidth / 2);
var progressCount = Self.data('progress-count');
const circle = new ProgressBar.Circle('#' + getID, {
color: strokeColor,
easing: 'easeInOut',
duration: 1200,
strokeWidth: strokeWidth,
text: {
style: null, // manually style text
},
step: function (state, bar) {
const angleR = bar.value() * 2 * Math.PI - QUARTER_R;
const x = radius * Math.cos(angleR) + center;
const y = radius * Math.sin(angleR) + center;
otherBall.style.left = x + 'px';
otherBall.style.top = y + 'px';
}
});
circle.animate(progressCount); // Number from 0.0 to 1.0
});
<file_sep># Ariel Holman Portfolio
Hey there! Welcome to my portfolio! Take a look around and let me know if you have any questions.
## Built With
- Bootstrap
- HTML
- CSS
## Authors
<NAME>: <a href="https://www.linkedin.com/in/ariel-holman/">LinkedIn</a> - <a href="https://github.com/ArielHolman">GitHub</a>
| dce798f1a18ab9610ab58f8cfa20fb5c2e72d292 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | ArielHolman/Ariel-Holman-Portfolio | 48dbe8775543dfdb98c85b79cbcded1130d65395 | 735b8de4504a48e7afc2229b4b537cb5f1445aa2 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.