context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System.Linq; using NUnit.Framework; using osu.Framework.Allocation; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Sprites; using osu.Framework.Testing; using osu.Game.Beatmaps; using osu.Game.Beatmaps.ControlPoints; using osu.Game.Rulesets.Catch.Objects; using osu.Game.Rulesets.Catch.Objects.Drawables; using osu.Game.Rulesets.Catch.Skinning; using osu.Game.Rulesets.Catch.Skinning.Legacy; using osu.Game.Rulesets.Catch.UI; using osu.Game.Skinning; using osu.Game.Tests.Visual; using osuTK; using osuTK.Graphics; namespace osu.Game.Rulesets.Catch.Tests { public class TestSceneHyperDashColouring : OsuTestScene { [Resolved] private SkinManager skins { get; set; } [Test] public void TestDefaultCatcherColour() { var skin = new TestSkin(); checkHyperDashCatcherColour(skin, Catcher.DEFAULT_HYPER_DASH_COLOUR); } [Test] public void TestCustomCatcherColour() { var skin = new TestSkin { HyperDashColour = Color4.Goldenrod }; checkHyperDashCatcherColour(skin, skin.HyperDashColour); } [Test] public void TestCustomEndGlowColour() { var skin = new TestSkin { HyperDashAfterImageColour = Color4.Lime }; checkHyperDashCatcherColour(skin, Catcher.DEFAULT_HYPER_DASH_COLOUR, skin.HyperDashAfterImageColour); } [Test] public void TestCustomEndGlowColourPriority() { var skin = new TestSkin { HyperDashColour = Color4.Goldenrod, HyperDashAfterImageColour = Color4.Lime }; checkHyperDashCatcherColour(skin, skin.HyperDashColour, skin.HyperDashAfterImageColour); } [Test] public void TestDefaultFruitColour() { var skin = new TestSkin(); checkHyperDashFruitColour(skin, Catcher.DEFAULT_HYPER_DASH_COLOUR); } [Test] public void TestCustomFruitColour() { var skin = new TestSkin { HyperDashFruitColour = Color4.Cyan }; checkHyperDashFruitColour(skin, skin.HyperDashFruitColour); } [Test] public void TestCustomFruitColourPriority() { var skin = new TestSkin { HyperDashColour = Color4.Goldenrod, HyperDashFruitColour = Color4.Cyan }; checkHyperDashFruitColour(skin, skin.HyperDashFruitColour); } [Test] public void TestFruitColourFallback() { var skin = new TestSkin { HyperDashColour = Color4.Goldenrod }; checkHyperDashFruitColour(skin, skin.HyperDashColour); } private void checkHyperDashCatcherColour(ISkin skin, Color4 expectedCatcherColour, Color4? expectedEndGlowColour = null) { CatcherArea catcherArea = null; CatcherTrailDisplay trails = null; AddStep("create hyper-dashing catcher", () => { Child = setupSkinHierarchy(catcherArea = new TestCatcherArea { Anchor = Anchor.Centre, Origin = Anchor.Centre }, skin); }); AddStep("get trails container", () => { trails = catcherArea.OfType<CatcherTrailDisplay>().Single(); catcherArea.MovableCatcher.SetHyperDashState(2); }); AddUntilStep("catcher colour is correct", () => catcherArea.MovableCatcher.Colour == expectedCatcherColour); AddAssert("catcher trails colours are correct", () => trails.HyperDashTrailsColour == expectedCatcherColour); AddAssert("catcher end-glow colours are correct", () => trails.EndGlowSpritesColour == (expectedEndGlowColour ?? expectedCatcherColour)); AddStep("finish hyper-dashing", () => { catcherArea.MovableCatcher.SetHyperDashState(1); catcherArea.MovableCatcher.FinishTransforms(); }); AddAssert("catcher colour returned to white", () => catcherArea.MovableCatcher.Colour == Color4.White); } private void checkHyperDashFruitColour(ISkin skin, Color4 expectedColour) { DrawableFruit drawableFruit = null; AddStep("create hyper-dash fruit", () => { var fruit = new Fruit { HyperDashTarget = new Banana() }; fruit.ApplyDefaults(new ControlPointInfo(), new BeatmapDifficulty()); Child = setupSkinHierarchy(drawableFruit = new DrawableFruit(fruit) { Anchor = Anchor.Centre, Origin = Anchor.Centre, Scale = new Vector2(4f), }, skin); }); AddAssert("hyper-dash colour is correct", () => checkLegacyFruitHyperDashColour(drawableFruit, expectedColour)); } private Drawable setupSkinHierarchy(Drawable child, ISkin skin) { var legacySkinProvider = new SkinProvidingContainer(skins.GetSkin(DefaultLegacySkin.Info)); var testSkinProvider = new SkinProvidingContainer(skin); var legacySkinTransformer = new SkinProvidingContainer(new CatchLegacySkinTransformer(testSkinProvider)); return legacySkinProvider .WithChild(testSkinProvider .WithChild(legacySkinTransformer .WithChild(child))); } private bool checkLegacyFruitHyperDashColour(DrawableFruit fruit, Color4 expectedColour) => fruit.ChildrenOfType<SkinnableDrawable>().First().Drawable.ChildrenOfType<Sprite>().Any(c => c.Colour == expectedColour); private class TestSkin : LegacySkin { public Color4 HyperDashColour { get => Configuration.CustomColours[CatchSkinColour.HyperDash.ToString()]; set => Configuration.CustomColours[CatchSkinColour.HyperDash.ToString()] = value; } public Color4 HyperDashAfterImageColour { get => Configuration.CustomColours[CatchSkinColour.HyperDashAfterImage.ToString()]; set => Configuration.CustomColours[CatchSkinColour.HyperDashAfterImage.ToString()] = value; } public Color4 HyperDashFruitColour { get => Configuration.CustomColours[CatchSkinColour.HyperDashFruit.ToString()]; set => Configuration.CustomColours[CatchSkinColour.HyperDashFruit.ToString()] = value; } public TestSkin() : base(new SkinInfo(), null, null, string.Empty) { } } private class TestCatcherArea : CatcherArea { [Cached] private readonly DroppedObjectContainer droppedObjectContainer; public TestCatcherArea() { Scale = new Vector2(4f); AddInternal(droppedObjectContainer = new DroppedObjectContainer()); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //#define SPAM using System; using System.Collections.Generic; using OpenSim.Framework; using OpenSim.Region.Physics.Manager; using OpenMetaverse; using OpenMetaverse.StructuredData; using System.Drawing; using System.Drawing.Imaging; using System.IO.Compression; using PrimMesher; using log4net; using Nini.Config; using System.Reflection; using System.IO; using ComponentAce.Compression.Libs.zlib; namespace OpenSim.Region.Physics.Meshing { public class MeshmerizerPlugin : IMeshingPlugin { public MeshmerizerPlugin() { } public string GetName() { return "Meshmerizer"; } public IMesher GetMesher(IConfigSource config) { return new Meshmerizer(config); } } public class Meshmerizer : IMesher { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); // Setting baseDir to a path will enable the dumping of raw files // raw files can be imported by blender so a visual inspection of the results can be done #if SPAM const string baseDir = "rawFiles"; #else private const string baseDir = null; //"rawFiles"; #endif private bool cacheSculptMaps = true; private string decodedSculptMapPath = null; private bool useMeshiesPhysicsMesh = false; private float minSizeForComplexMesh = 0.2f; // prims with all dimensions smaller than this will have a bounding box mesh private Dictionary<ulong, Mesh> m_uniqueMeshes = new Dictionary<ulong, Mesh>(); public Meshmerizer(IConfigSource config) { IConfig start_config = config.Configs["Startup"]; decodedSculptMapPath = start_config.GetString("DecodedSculptMapPath","j2kDecodeCache"); cacheSculptMaps = start_config.GetBoolean("CacheSculptMaps", cacheSculptMaps); useMeshiesPhysicsMesh = start_config.GetBoolean("UseMeshiesPhysicsMesh", useMeshiesPhysicsMesh); try { if (!Directory.Exists(decodedSculptMapPath)) Directory.CreateDirectory(decodedSculptMapPath); } catch (Exception e) { m_log.WarnFormat("[SCULPT]: Unable to create {0} directory: ", decodedSculptMapPath, e.Message); } } /// <summary> /// creates a simple box mesh of the specified size. This mesh is of very low vertex count and may /// be useful as a backup proxy when level of detail is not needed or when more complex meshes fail /// for some reason /// </summary> /// <param name="minX"></param> /// <param name="maxX"></param> /// <param name="minY"></param> /// <param name="maxY"></param> /// <param name="minZ"></param> /// <param name="maxZ"></param> /// <returns></returns> private static Mesh CreateSimpleBoxMesh(float minX, float maxX, float minY, float maxY, float minZ, float maxZ) { Mesh box = new Mesh(); List<Vertex> vertices = new List<Vertex>(); // bottom vertices.Add(new Vertex(minX, maxY, minZ)); vertices.Add(new Vertex(maxX, maxY, minZ)); vertices.Add(new Vertex(maxX, minY, minZ)); vertices.Add(new Vertex(minX, minY, minZ)); box.Add(new Triangle(vertices[0], vertices[1], vertices[2])); box.Add(new Triangle(vertices[0], vertices[2], vertices[3])); // top vertices.Add(new Vertex(maxX, maxY, maxZ)); vertices.Add(new Vertex(minX, maxY, maxZ)); vertices.Add(new Vertex(minX, minY, maxZ)); vertices.Add(new Vertex(maxX, minY, maxZ)); box.Add(new Triangle(vertices[4], vertices[5], vertices[6])); box.Add(new Triangle(vertices[4], vertices[6], vertices[7])); // sides box.Add(new Triangle(vertices[5], vertices[0], vertices[3])); box.Add(new Triangle(vertices[5], vertices[3], vertices[6])); box.Add(new Triangle(vertices[1], vertices[0], vertices[5])); box.Add(new Triangle(vertices[1], vertices[5], vertices[4])); box.Add(new Triangle(vertices[7], vertices[1], vertices[4])); box.Add(new Triangle(vertices[7], vertices[2], vertices[1])); box.Add(new Triangle(vertices[3], vertices[2], vertices[7])); box.Add(new Triangle(vertices[3], vertices[7], vertices[6])); return box; } /// <summary> /// Creates a simple bounding box mesh for a complex input mesh /// </summary> /// <param name="meshIn"></param> /// <returns></returns> private static Mesh CreateBoundingBoxMesh(Mesh meshIn) { float minX = float.MaxValue; float maxX = float.MinValue; float minY = float.MaxValue; float maxY = float.MinValue; float minZ = float.MaxValue; float maxZ = float.MinValue; foreach (Vector3 v in meshIn.getVertexList()) { if (v != null) { if (v.X < minX) minX = v.X; if (v.Y < minY) minY = v.Y; if (v.Z < minZ) minZ = v.Z; if (v.X > maxX) maxX = v.X; if (v.Y > maxY) maxY = v.Y; if (v.Z > maxZ) maxZ = v.Z; } } return CreateSimpleBoxMesh(minX, maxX, minY, maxY, minZ, maxZ); } private void ReportPrimError(string message, string primName, PrimMesh primMesh) { m_log.Error(message); m_log.Error("\nPrim Name: " + primName); m_log.Error("****** PrimMesh Parameters ******\n" + primMesh.ParamsToDisplayString()); } private ulong GetMeshKey(PrimitiveBaseShape pbs, Vector3 size, float lod) { ulong hash = 5381; hash = djb2(hash, pbs.PathCurve); hash = djb2(hash, (byte)((byte)pbs.HollowShape | (byte)pbs.ProfileShape)); hash = djb2(hash, pbs.PathBegin); hash = djb2(hash, pbs.PathEnd); hash = djb2(hash, pbs.PathScaleX); hash = djb2(hash, pbs.PathScaleY); hash = djb2(hash, pbs.PathShearX); hash = djb2(hash, pbs.PathShearY); hash = djb2(hash, (byte)pbs.PathTwist); hash = djb2(hash, (byte)pbs.PathTwistBegin); hash = djb2(hash, (byte)pbs.PathRadiusOffset); hash = djb2(hash, (byte)pbs.PathTaperX); hash = djb2(hash, (byte)pbs.PathTaperY); hash = djb2(hash, pbs.PathRevolutions); hash = djb2(hash, (byte)pbs.PathSkew); hash = djb2(hash, pbs.ProfileBegin); hash = djb2(hash, pbs.ProfileEnd); hash = djb2(hash, pbs.ProfileHollow); // TODO: Separate scale out from the primitive shape data (after // scaling is supported at the physics engine level) byte[] scaleBytes = size.GetBytes(); for (int i = 0; i < scaleBytes.Length; i++) hash = djb2(hash, scaleBytes[i]); // Include LOD in hash, accounting for endianness byte[] lodBytes = new byte[4]; Buffer.BlockCopy(BitConverter.GetBytes(lod), 0, lodBytes, 0, 4); if (!BitConverter.IsLittleEndian) { Array.Reverse(lodBytes, 0, 4); } for (int i = 0; i < lodBytes.Length; i++) hash = djb2(hash, lodBytes[i]); // include sculpt UUID if (pbs.SculptEntry) { scaleBytes = pbs.SculptTexture.GetBytes(); for (int i = 0; i < scaleBytes.Length; i++) hash = djb2(hash, scaleBytes[i]); } return hash; } private ulong djb2(ulong hash, byte c) { return ((hash << 5) + hash) + (ulong)c; } private ulong djb2(ulong hash, ushort c) { hash = ((hash << 5) + hash) + (ulong)((byte)c); return ((hash << 5) + hash) + (ulong)(c >> 8); } private Mesh CreateMeshFromPrimMesher(string primName, PrimitiveBaseShape primShape, Vector3 size, float lod) { PrimMesh primMesh; PrimMesher.SculptMesh sculptMesh; List<Coord> coords = new List<Coord>(); List<Face> faces = new List<Face>(); Image idata = null; string decodedSculptFileName = ""; if (primShape.SculptEntry) { if (((OpenMetaverse.SculptType)primShape.SculptType) == SculptType.Mesh) { if (!useMeshiesPhysicsMesh) return null; m_log.Debug("[MESH]: experimental mesh proxy generation"); OSD meshOsd; if (primShape.SculptData.Length <= 0) { m_log.Error("[MESH]: asset data is zero length"); return null; } long start = 0; using (MemoryStream data = new MemoryStream(primShape.SculptData)) { meshOsd = (OSDMap)OSDParser.DeserializeLLSDBinary(data); start = data.Position; } if (meshOsd is OSDMap) { OSDMap map = (OSDMap)meshOsd; OSDMap physicsParms = (OSDMap)map["physics_shape"]; int physOffset = physicsParms["offset"].AsInteger() + (int)start; int physSize = physicsParms["size"].AsInteger(); if (physOffset < 0 || physSize == 0) return null; // no mesh data in asset OSD decodedMeshOsd = new OSD(); byte[] meshBytes = new byte[physSize]; System.Buffer.BlockCopy(primShape.SculptData, physOffset, meshBytes, 0, physSize); byte[] decompressed = new byte[physSize * 5]; try { using (MemoryStream inMs = new MemoryStream(meshBytes)) { using (MemoryStream outMs = new MemoryStream()) { using (ZOutputStream zOut = new ZOutputStream(outMs)) { byte[] readBuffer = new byte[2048]; int readLen = 0; while ((readLen = inMs.Read(readBuffer, 0, readBuffer.Length)) > 0) { zOut.Write(readBuffer, 0, readLen); } zOut.Flush(); outMs.Seek(0, SeekOrigin.Begin); byte[] decompressedBuf = outMs.GetBuffer(); decodedMeshOsd = OSDParser.DeserializeLLSDBinary(decompressedBuf); } } } } catch (Exception e) { m_log.Error("[MESH]: exception decoding physical mesh: " + e.ToString()); return null; } OSDArray decodedMeshOsdArray = null; // physics_shape is an array of OSDMaps, one for each submesh if (decodedMeshOsd is OSDArray) { decodedMeshOsdArray = (OSDArray)decodedMeshOsd; foreach (OSD subMeshOsd in decodedMeshOsdArray) { if (subMeshOsd is OSDMap) { OSDMap subMeshMap = (OSDMap)subMeshOsd; OpenMetaverse.Vector3 posMax = ((OSDMap)subMeshMap["PositionDomain"])["Max"].AsVector3(); OpenMetaverse.Vector3 posMin = ((OSDMap)subMeshMap["PositionDomain"])["Min"].AsVector3(); ushort faceIndexOffset = (ushort)coords.Count; byte[] posBytes = subMeshMap["Position"].AsBinary(); for (int i = 0; i < posBytes.Length; i += 6) { ushort uX = Utils.BytesToUInt16(posBytes, i); ushort uY = Utils.BytesToUInt16(posBytes, i + 2); ushort uZ = Utils.BytesToUInt16(posBytes, i + 4); Coord c = new Coord( Utils.UInt16ToFloat(uX, posMin.X, posMax.X) * size.X, Utils.UInt16ToFloat(uY, posMin.Y, posMax.Y) * size.Y, Utils.UInt16ToFloat(uZ, posMin.Z, posMax.Z) * size.Z); coords.Add(c); } byte[] triangleBytes = subMeshMap["TriangleList"].AsBinary(); for (int i = 0; i < triangleBytes.Length; i += 6) { ushort v1 = (ushort)(Utils.BytesToUInt16(triangleBytes, i) + faceIndexOffset); ushort v2 = (ushort)(Utils.BytesToUInt16(triangleBytes, i + 2) + faceIndexOffset); ushort v3 = (ushort)(Utils.BytesToUInt16(triangleBytes, i + 4) + faceIndexOffset); Face f = new Face(v1, v2, v3); faces.Add(f); } } } } } } else { if (cacheSculptMaps && primShape.SculptTexture != UUID.Zero) { decodedSculptFileName = System.IO.Path.Combine(decodedSculptMapPath, "smap_" + primShape.SculptTexture.ToString()); try { if (File.Exists(decodedSculptFileName)) { idata = Image.FromFile(decodedSculptFileName); } } catch (Exception e) { m_log.Error("[SCULPT]: unable to load cached sculpt map " + decodedSculptFileName + " " + e.Message); } //if (idata != null) // m_log.Debug("[SCULPT]: loaded cached map asset for map ID: " + primShape.SculptTexture.ToString()); } if (idata == null) { if (primShape.SculptData == null || primShape.SculptData.Length == 0) return null; try { OpenMetaverse.Imaging.ManagedImage unusedData; OpenMetaverse.Imaging.OpenJPEG.DecodeToImage(primShape.SculptData, out unusedData, out idata); unusedData = null; //idata = CSJ2K.J2kImage.FromBytes(primShape.SculptData); if (cacheSculptMaps && idata != null) { try { idata.Save(decodedSculptFileName, ImageFormat.MemoryBmp); } catch (Exception e) { m_log.Error("[SCULPT]: unable to cache sculpt map " + decodedSculptFileName + " " + e.Message); } } } catch (DllNotFoundException) { m_log.Error("[PHYSICS]: OpenJpeg is not installed correctly on this system. Physics Proxy generation failed. Often times this is because of an old version of GLIBC. You must have version 2.4 or above!"); return null; } catch (IndexOutOfRangeException) { m_log.Error("[PHYSICS]: OpenJpeg was unable to decode this. Physics Proxy generation failed"); return null; } catch (Exception ex) { m_log.Error("[PHYSICS]: Unable to generate a Sculpty physics proxy. Sculpty texture decode failed: " + ex.Message); return null; } } PrimMesher.SculptMesh.SculptType sculptType; switch ((OpenMetaverse.SculptType)primShape.SculptType) { case OpenMetaverse.SculptType.Cylinder: sculptType = PrimMesher.SculptMesh.SculptType.cylinder; break; case OpenMetaverse.SculptType.Plane: sculptType = PrimMesher.SculptMesh.SculptType.plane; break; case OpenMetaverse.SculptType.Torus: sculptType = PrimMesher.SculptMesh.SculptType.torus; break; case OpenMetaverse.SculptType.Sphere: sculptType = PrimMesher.SculptMesh.SculptType.sphere; break; default: sculptType = PrimMesher.SculptMesh.SculptType.plane; break; } bool mirror = ((primShape.SculptType & 128) != 0); bool invert = ((primShape.SculptType & 64) != 0); sculptMesh = new PrimMesher.SculptMesh((Bitmap)idata, sculptType, (int)lod, false, mirror, invert); idata.Dispose(); sculptMesh.DumpRaw(baseDir, primName, "primMesh"); sculptMesh.Scale(size.X, size.Y, size.Z); coords = sculptMesh.coords; faces = sculptMesh.faces; } } else { float pathShearX = primShape.PathShearX < 128 ? (float)primShape.PathShearX * 0.01f : (float)(primShape.PathShearX - 256) * 0.01f; float pathShearY = primShape.PathShearY < 128 ? (float)primShape.PathShearY * 0.01f : (float)(primShape.PathShearY - 256) * 0.01f; float pathBegin = (float)primShape.PathBegin * 2.0e-5f; float pathEnd = 1.0f - (float)primShape.PathEnd * 2.0e-5f; float pathScaleX = (float)(primShape.PathScaleX - 100) * 0.01f; float pathScaleY = (float)(primShape.PathScaleY - 100) * 0.01f; float profileBegin = (float)primShape.ProfileBegin * 2.0e-5f; float profileEnd = 1.0f - (float)primShape.ProfileEnd * 2.0e-5f; float profileHollow = (float)primShape.ProfileHollow * 2.0e-5f; if (profileHollow > 0.95f) profileHollow = 0.95f; int sides = 4; if ((primShape.ProfileCurve & 0x07) == (byte)ProfileShape.EquilateralTriangle) sides = 3; else if ((primShape.ProfileCurve & 0x07) == (byte)ProfileShape.Circle) sides = 24; else if ((primShape.ProfileCurve & 0x07) == (byte)ProfileShape.HalfCircle) { // half circle, prim is a sphere sides = 24; profileBegin = 0.5f * profileBegin + 0.5f; profileEnd = 0.5f * profileEnd + 0.5f; } int hollowSides = sides; if (primShape.HollowShape == HollowShape.Circle) hollowSides = 24; else if (primShape.HollowShape == HollowShape.Square) hollowSides = 4; else if (primShape.HollowShape == HollowShape.Triangle) hollowSides = 3; primMesh = new PrimMesh(sides, profileBegin, profileEnd, profileHollow, hollowSides); if (primMesh.errorMessage != null) if (primMesh.errorMessage.Length > 0) m_log.Error("[ERROR] " + primMesh.errorMessage); primMesh.topShearX = pathShearX; primMesh.topShearY = pathShearY; primMesh.pathCutBegin = pathBegin; primMesh.pathCutEnd = pathEnd; if (primShape.PathCurve == (byte)Extrusion.Straight || primShape.PathCurve == (byte) Extrusion.Flexible) { primMesh.twistBegin = primShape.PathTwistBegin * 18 / 10; primMesh.twistEnd = primShape.PathTwist * 18 / 10; primMesh.taperX = pathScaleX; primMesh.taperY = pathScaleY; if (profileBegin < 0.0f || profileBegin >= profileEnd || profileEnd > 1.0f) { ReportPrimError("*** CORRUPT PRIM!! ***", primName, primMesh); if (profileBegin < 0.0f) profileBegin = 0.0f; if (profileEnd > 1.0f) profileEnd = 1.0f; } #if SPAM m_log.Debug("****** PrimMesh Parameters (Linear) ******\n" + primMesh.ParamsToDisplayString()); #endif try { primMesh.ExtrudeLinear(); } catch (Exception ex) { ReportPrimError("Extrusion failure: exception: " + ex.ToString(), primName, primMesh); return null; } } else { primMesh.holeSizeX = (200 - primShape.PathScaleX) * 0.01f; primMesh.holeSizeY = (200 - primShape.PathScaleY) * 0.01f; primMesh.radius = 0.01f * primShape.PathRadiusOffset; primMesh.revolutions = 1.0f + 0.015f * primShape.PathRevolutions; primMesh.skew = 0.01f * primShape.PathSkew; primMesh.twistBegin = primShape.PathTwistBegin * 36 / 10; primMesh.twistEnd = primShape.PathTwist * 36 / 10; primMesh.taperX = primShape.PathTaperX * 0.01f; primMesh.taperY = primShape.PathTaperY * 0.01f; if (profileBegin < 0.0f || profileBegin >= profileEnd || profileEnd > 1.0f) { ReportPrimError("*** CORRUPT PRIM!! ***", primName, primMesh); if (profileBegin < 0.0f) profileBegin = 0.0f; if (profileEnd > 1.0f) profileEnd = 1.0f; } #if SPAM m_log.Debug("****** PrimMesh Parameters (Circular) ******\n" + primMesh.ParamsToDisplayString()); #endif try { primMesh.ExtrudeCircular(); } catch (Exception ex) { ReportPrimError("Extrusion failure: exception: " + ex.ToString(), primName, primMesh); return null; } } primMesh.DumpRaw(baseDir, primName, "primMesh"); primMesh.Scale(size.X, size.Y, size.Z); coords = primMesh.coords; faces = primMesh.faces; } // Remove the reference to any JPEG2000 sculpt data so it can be GCed primShape.SculptData = Utils.EmptyBytes; int numCoords = coords.Count; int numFaces = faces.Count; // Create the list of vertices List<Vertex> vertices = new List<Vertex>(); for (int i = 0; i < numCoords; i++) { Coord c = coords[i]; vertices.Add(new Vertex(c.X, c.Y, c.Z)); } Mesh mesh = new Mesh(); // Add the corresponding triangles to the mesh for (int i = 0; i < numFaces; i++) { Face f = faces[i]; mesh.Add(new Triangle(vertices[f.v1], vertices[f.v2], vertices[f.v3])); } return mesh; } public IMesh CreateMesh(String primName, PrimitiveBaseShape primShape, Vector3 size, float lod) { return CreateMesh(primName, primShape, size, lod, false); } public IMesh CreateMesh(String primName, PrimitiveBaseShape primShape, Vector3 size, float lod, bool isPhysical) { Mesh mesh = null; ulong key = 0; // If this mesh has been created already, return it instead of creating another copy // For large regions with 100k+ prims and hundreds of copies of each, this can save a GB or more of memory key = GetMeshKey(primShape, size, lod); if (m_uniqueMeshes.TryGetValue(key, out mesh)) return mesh; if (size.X < 0.01f) size.X = 0.01f; if (size.Y < 0.01f) size.Y = 0.01f; if (size.Z < 0.01f) size.Z = 0.01f; mesh = CreateMeshFromPrimMesher(primName, primShape, size, lod); if (mesh != null) { if ((!isPhysical) && size.X < minSizeForComplexMesh && size.Y < minSizeForComplexMesh && size.Z < minSizeForComplexMesh) { #if SPAM m_log.Debug("Meshmerizer: prim " + primName + " has a size of " + size.ToString() + " which is below threshold of " + minSizeForComplexMesh.ToString() + " - creating simple bounding box"); #endif mesh = CreateBoundingBoxMesh(mesh); mesh.DumpRaw(baseDir, primName, "Z extruded"); } // trim the vertex and triangle lists to free up memory mesh.TrimExcess(); m_uniqueMeshes.Add(key, mesh); } return mesh; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Security; using System.Text; using System.Threading; namespace System.Runtime.Caching { internal struct UsageEntryRef { static internal readonly UsageEntryRef INVALID = new UsageEntryRef(0, 0); private const uint ENTRY_MASK = 0x000000ffu; private const uint PAGE_MASK = 0xffffff00u; private const int PAGE_SHIFT = 8; private uint _ref; internal UsageEntryRef(int pageIndex, int entryIndex) { Dbg.Assert((pageIndex & 0x00ffffff) == pageIndex, "(pageIndex & 0x00ffffff) == pageIndex"); Dbg.Assert((Math.Abs(entryIndex) & ENTRY_MASK) == (Math.Abs(entryIndex)), "(Math.Abs(entryIndex) & ENTRY_MASK) == Math.Abs(entryIndex)"); Dbg.Assert(entryIndex != 0 || pageIndex == 0, "entryIndex != 0 || pageIndex == 0"); _ref = ((((uint)pageIndex) << PAGE_SHIFT) | (((uint)(entryIndex)) & ENTRY_MASK)); } public override bool Equals(object value) { if (value is UsageEntryRef) { return _ref == ((UsageEntryRef)value)._ref; } return false; } public static bool operator ==(UsageEntryRef r1, UsageEntryRef r2) { return r1._ref == r2._ref; } public static bool operator !=(UsageEntryRef r1, UsageEntryRef r2) { return r1._ref != r2._ref; } public override int GetHashCode() { return (int)_ref; } internal int PageIndex { get { int result = (int)(_ref >> PAGE_SHIFT); return result; } } internal int Ref1Index { get { int result = (int)(sbyte)(_ref & ENTRY_MASK); Dbg.Assert(result > 0, "result > 0"); return result; } } internal int Ref2Index { get { int result = (int)(sbyte)(_ref & ENTRY_MASK); Dbg.Assert(result < 0, "result < 0"); return -result; } } internal bool IsRef1 { get { return ((int)(sbyte)(_ref & ENTRY_MASK)) > 0; } } internal bool IsRef2 { get { return ((int)(sbyte)(_ref & ENTRY_MASK)) < 0; } } internal bool IsInvalid { get { return _ref == 0; } } } internal struct UsageEntryLink { internal UsageEntryRef _next; internal UsageEntryRef _prev; } [SuppressMessage("Microsoft.Portability", "CA1900:ValueTypeFieldsShouldBePortable", Justification = "Grandfathered suppression from original caching code checkin")] [StructLayout(LayoutKind.Explicit)] internal struct UsageEntry { [FieldOffset(0)] internal UsageEntryLink _ref1; [FieldOffset(4)] internal int _cFree; [FieldOffset(8)] internal UsageEntryLink _ref2; [FieldOffset(16)] internal DateTime _utcDate; [FieldOffset(24)] internal MemoryCacheEntry _cacheEntry; } internal struct UsagePage { internal UsageEntry[] _entries; internal int _pageNext; internal int _pagePrev; } internal struct UsagePageList { internal int _head; internal int _tail; } internal sealed class UsageBucket { private const int NUM_ENTRIES = 127; private const int LENGTH_ENTRIES = 128; private const int MIN_PAGES_INCREMENT = 10; private const int MAX_PAGES_INCREMENT = 340; private const double MIN_LOAD_FACTOR = 0.5; private CacheUsage _cacheUsage; private byte _bucket; private UsagePage[] _pages; private int _cEntriesInUse; private int _cPagesInUse; private int _cEntriesInFlush; private int _minEntriesInUse; private UsagePageList _freePageList; private UsagePageList _freeEntryList; private UsageEntryRef _lastRefHead; private UsageEntryRef _lastRefTail; private UsageEntryRef _addRef2Head; private bool _blockReduce; internal UsageBucket(CacheUsage cacheUsage, byte bucket) { _cacheUsage = cacheUsage; _bucket = bucket; InitZeroPages(); } private void InitZeroPages() { Dbg.Assert(_cPagesInUse == 0, "_cPagesInUse == 0"); Dbg.Assert(_cEntriesInUse == 0, "_cEntriesInUse == 0"); Dbg.Assert(_cEntriesInFlush == 0, "_cEntriesInFlush == 0"); Dbg.Assert(_lastRefHead.IsInvalid, "_lastRefHead.IsInvalid"); Dbg.Assert(_lastRefTail.IsInvalid, "_lastRefTail.IsInvalid"); Dbg.Assert(_addRef2Head.IsInvalid, "_addRef2Head.IsInvalid"); _pages = null; _minEntriesInUse = -1; _freePageList._head = -1; _freePageList._tail = -1; _freeEntryList._head = -1; _freeEntryList._tail = -1; } private void AddToListHead(int pageIndex, ref UsagePageList list) { Dbg.Assert((list._head == -1) == (list._tail == -1), "(list._head == -1) == (list._tail == -1)"); (_pages[(pageIndex)]._pagePrev) = -1; (_pages[(pageIndex)]._pageNext) = list._head; if (list._head != -1) { Dbg.Assert((_pages[(list._head)]._pagePrev) == -1, "PagePrev(list._head) == -1"); (_pages[(list._head)]._pagePrev) = pageIndex; } else { list._tail = pageIndex; } list._head = pageIndex; } private void AddToListTail(int pageIndex, ref UsagePageList list) { Dbg.Assert((list._head == -1) == (list._tail == -1), "(list._head == -1) == (list._tail == -1)"); (_pages[(pageIndex)]._pageNext) = -1; (_pages[(pageIndex)]._pagePrev) = list._tail; if (list._tail != -1) { Dbg.Assert((_pages[(list._tail)]._pageNext) == -1, "PageNext(list._tail) == -1"); (_pages[(list._tail)]._pageNext) = pageIndex; } else { list._head = pageIndex; } list._tail = pageIndex; } private int RemoveFromListHead(ref UsagePageList list) { Dbg.Assert(list._head != -1, "list._head != -1"); int oldHead = list._head; RemoveFromList(oldHead, ref list); return oldHead; } private void RemoveFromList(int pageIndex, ref UsagePageList list) { Dbg.Assert((list._head == -1) == (list._tail == -1), "(list._head == -1) == (list._tail == -1)"); if ((_pages[(pageIndex)]._pagePrev) != -1) { Dbg.Assert((_pages[((_pages[(pageIndex)]._pagePrev))]._pageNext) == pageIndex, "PageNext(PagePrev(pageIndex)) == pageIndex"); (_pages[((_pages[(pageIndex)]._pagePrev))]._pageNext) = (_pages[(pageIndex)]._pageNext); } else { Dbg.Assert(list._head == pageIndex, "list._head == pageIndex"); list._head = (_pages[(pageIndex)]._pageNext); } if ((_pages[(pageIndex)]._pageNext) != -1) { Dbg.Assert((_pages[((_pages[(pageIndex)]._pageNext))]._pagePrev) == pageIndex, "PagePrev(PageNext(pageIndex)) == pageIndex"); (_pages[((_pages[(pageIndex)]._pageNext))]._pagePrev) = (_pages[(pageIndex)]._pagePrev); } else { Dbg.Assert(list._tail == pageIndex, "list._tail == pageIndex"); list._tail = (_pages[(pageIndex)]._pagePrev); } (_pages[(pageIndex)]._pagePrev) = -1; (_pages[(pageIndex)]._pageNext) = -1; } private void MoveToListHead(int pageIndex, ref UsagePageList list) { Dbg.Assert(list._head != -1, "list._head != -1"); Dbg.Assert(list._tail != -1, "list._tail != -1"); if (list._head == pageIndex) return; RemoveFromList(pageIndex, ref list); AddToListHead(pageIndex, ref list); } private void MoveToListTail(int pageIndex, ref UsagePageList list) { Dbg.Assert(list._head != -1, "list._head != -1"); Dbg.Assert(list._tail != -1, "list._tail != -1"); if (list._tail == pageIndex) return; RemoveFromList(pageIndex, ref list); AddToListTail(pageIndex, ref list); } private void UpdateMinEntries() { if (_cPagesInUse <= 1) { _minEntriesInUse = -1; } else { int capacity = _cPagesInUse * NUM_ENTRIES; Dbg.Assert(capacity > 0, "capacity > 0"); Dbg.Assert(MIN_LOAD_FACTOR < 1.0, "MIN_LOAD_FACTOR < 1.0"); _minEntriesInUse = (int)(capacity * MIN_LOAD_FACTOR); if ((_minEntriesInUse - 1) > ((_cPagesInUse - 1) * NUM_ENTRIES)) { _minEntriesInUse = -1; } } } private void RemovePage(int pageIndex) { Dbg.Assert((((_pages[(pageIndex)]._entries))[0]._cFree) == NUM_ENTRIES, "FreeEntryCount(EntriesI(pageIndex)) == NUM_ENTRIES"); RemoveFromList(pageIndex, ref _freeEntryList); AddToListHead(pageIndex, ref _freePageList); Dbg.Assert((_pages[(pageIndex)]._entries) != null, "EntriesI(pageIndex) != null"); (_pages[(pageIndex)]._entries) = null; _cPagesInUse--; if (_cPagesInUse == 0) { InitZeroPages(); } else { UpdateMinEntries(); } } private UsageEntryRef GetFreeUsageEntry() { Dbg.Assert(_freeEntryList._head >= 0, "_freeEntryList._head >= 0"); int pageIndex = _freeEntryList._head; UsageEntry[] entries = (_pages[(pageIndex)]._entries); int entryIndex = ((entries)[0]._ref1._next).Ref1Index; ((entries)[0]._ref1._next) = entries[entryIndex]._ref1._next; ((entries)[0]._cFree)--; if (((entries)[0]._cFree) == 0) { Dbg.Assert(((entries)[0]._ref1._next).IsInvalid, "FreeEntryHead(entries).IsInvalid"); RemoveFromList(pageIndex, ref _freeEntryList); } return new UsageEntryRef(pageIndex, entryIndex); } private void AddUsageEntryToFreeList(UsageEntryRef entryRef) { Dbg.Assert(entryRef.IsRef1, "entryRef.IsRef1"); UsageEntry[] entries = (_pages[(entryRef.PageIndex)]._entries); int entryIndex = entryRef.Ref1Index; Dbg.Assert(entries[entryIndex]._cacheEntry == null, "entries[entryIndex]._cacheEntry == null"); entries[entryIndex]._utcDate = DateTime.MinValue; entries[entryIndex]._ref1._prev = UsageEntryRef.INVALID; entries[entryIndex]._ref2._next = UsageEntryRef.INVALID; entries[entryIndex]._ref2._prev = UsageEntryRef.INVALID; entries[entryIndex]._ref1._next = ((entries)[0]._ref1._next); ((entries)[0]._ref1._next) = entryRef; _cEntriesInUse--; int pageIndex = entryRef.PageIndex; ((entries)[0]._cFree)++; if (((entries)[0]._cFree) == 1) { AddToListHead(pageIndex, ref _freeEntryList); } else if (((entries)[0]._cFree) == NUM_ENTRIES) { RemovePage(pageIndex); } } private void Expand() { Dbg.Assert(_cPagesInUse * NUM_ENTRIES == _cEntriesInUse, "_cPagesInUse * NUM_ENTRIES == _cEntriesInUse"); Dbg.Assert(_freeEntryList._head == -1, "_freeEntryList._head == -1"); Dbg.Assert(_freeEntryList._tail == -1, "_freeEntryList._tail == -1"); if (_freePageList._head == -1) { int oldLength; if (_pages == null) { oldLength = 0; } else { oldLength = _pages.Length; } Dbg.Assert(_cPagesInUse == oldLength, "_cPagesInUse == oldLength"); Dbg.Assert(_cEntriesInUse == oldLength * NUM_ENTRIES, "_cEntriesInUse == oldLength * ExpiresEntryRef.NUM_ENTRIES"); int newLength = oldLength * 2; newLength = Math.Max(oldLength + MIN_PAGES_INCREMENT, newLength); newLength = Math.Min(newLength, oldLength + MAX_PAGES_INCREMENT); Dbg.Assert(newLength > oldLength, "newLength > oldLength"); UsagePage[] newPages = new UsagePage[newLength]; for (int i = 0; i < oldLength; i++) { newPages[i] = _pages[i]; } for (int i = oldLength; i < newPages.Length; i++) { newPages[i]._pagePrev = i - 1; newPages[i]._pageNext = i + 1; } newPages[oldLength]._pagePrev = -1; newPages[newPages.Length - 1]._pageNext = -1; _freePageList._head = oldLength; _freePageList._tail = newPages.Length - 1; _pages = newPages; } int pageIndex = RemoveFromListHead(ref _freePageList); AddToListHead(pageIndex, ref _freeEntryList); UsageEntry[] entries = new UsageEntry[LENGTH_ENTRIES]; ((entries)[0]._cFree) = NUM_ENTRIES; for (int i = 0; i < entries.Length - 1; i++) { entries[i]._ref1._next = new UsageEntryRef(pageIndex, i + 1); } entries[entries.Length - 1]._ref1._next = UsageEntryRef.INVALID; (_pages[(pageIndex)]._entries) = entries; _cPagesInUse++; UpdateMinEntries(); } private void Reduce() { if (_cEntriesInUse >= _minEntriesInUse || _blockReduce) return; Dbg.Assert(_freeEntryList._head != -1, "_freeEntryList._head != -1"); Dbg.Assert(_freeEntryList._tail != -1, "_freeEntryList._tail != -1"); Dbg.Assert(_freeEntryList._head != _freeEntryList._tail, "_freeEntryList._head != _freeEntryList._tail"); int meanFree = (int)(NUM_ENTRIES - (NUM_ENTRIES * MIN_LOAD_FACTOR)); int pageIndexLast = _freeEntryList._tail; int pageIndexCurrent = _freeEntryList._head; int pageIndexNext; UsageEntry[] entries; for (; ;) { pageIndexNext = (_pages[(pageIndexCurrent)]._pageNext); if ((((_pages[(pageIndexCurrent)]._entries))[0]._cFree) > meanFree) { MoveToListTail(pageIndexCurrent, ref _freeEntryList); } else { MoveToListHead(pageIndexCurrent, ref _freeEntryList); } if (pageIndexCurrent == pageIndexLast) break; pageIndexCurrent = pageIndexNext; } for (; ;) { if (_freeEntryList._tail == -1) break; entries = (_pages[(_freeEntryList._tail)]._entries); Dbg.Assert(((entries)[0]._cFree) > 0, "FreeEntryCount(entries) > 0"); int availableFreeEntries = (_cPagesInUse * NUM_ENTRIES) - ((entries)[0]._cFree) - _cEntriesInUse; if (availableFreeEntries < (NUM_ENTRIES - ((entries)[0]._cFree))) break; for (int i = 1; i < entries.Length; i++) { if (entries[i]._cacheEntry == null) continue; Dbg.Assert(_freeEntryList._head != _freeEntryList._tail, "_freeEntryList._head != _freeEntryList._tail"); UsageEntryRef newRef1 = GetFreeUsageEntry(); UsageEntryRef newRef2 = (new UsageEntryRef((newRef1).PageIndex, -(newRef1).Ref1Index)); Dbg.Assert(newRef1.PageIndex != _freeEntryList._tail, "newRef1.PageIndex != _freeEntryList._tail"); UsageEntryRef oldRef1 = new UsageEntryRef(_freeEntryList._tail, i); UsageEntryRef oldRef2 = (new UsageEntryRef((oldRef1).PageIndex, -(oldRef1).Ref1Index)); MemoryCacheEntry cacheEntry = entries[i]._cacheEntry; Dbg.Assert(cacheEntry.UsageEntryRef == oldRef1, "cacheEntry.UsageEntryRef == oldRef1"); cacheEntry.UsageEntryRef = newRef1; UsageEntry[] newEntries = (_pages[(newRef1.PageIndex)]._entries); newEntries[newRef1.Ref1Index] = entries[i]; ((entries)[0]._cFree)++; UsageEntryRef prev = newEntries[newRef1.Ref1Index]._ref1._prev; Dbg.Assert(prev != oldRef2, "prev != oldRef2"); UsageEntryRef next = newEntries[newRef1.Ref1Index]._ref1._next; if (next == oldRef2) { next = newRef2; } { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (newRef1); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (newRef1); } else { _lastRefHead = (newRef1); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (newRef1); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (newRef1); } else { _lastRefTail = (newRef1); } }; prev = newEntries[newRef1.Ref1Index]._ref2._prev; if (prev == oldRef1) { prev = newRef1; } next = newEntries[newRef1.Ref1Index]._ref2._next; Dbg.Assert(next != oldRef1, "next != oldRef1"); { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (newRef2); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (newRef2); } else { _lastRefHead = (newRef2); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (newRef2); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (newRef2); } else { _lastRefTail = (newRef2); } }; if (_addRef2Head == oldRef2) { _addRef2Head = newRef2; } } RemovePage(_freeEntryList._tail); Dbg.Validate("CacheValidateUsage", this); } } internal void AddCacheEntry(MemoryCacheEntry cacheEntry) { lock (this) { if (_freeEntryList._head == -1) { Expand(); } UsageEntryRef freeRef1 = GetFreeUsageEntry(); UsageEntryRef freeRef2 = (new UsageEntryRef((freeRef1).PageIndex, -(freeRef1).Ref1Index)); Dbg.Assert(cacheEntry.UsageEntryRef.IsInvalid, "cacheEntry.UsageEntryRef.IsInvalid"); cacheEntry.UsageEntryRef = freeRef1; UsageEntry[] entries = (_pages[(freeRef1.PageIndex)]._entries); int entryIndex = freeRef1.Ref1Index; entries[entryIndex]._cacheEntry = cacheEntry; entries[entryIndex]._utcDate = DateTime.UtcNow; entries[entryIndex]._ref1._prev = UsageEntryRef.INVALID; entries[entryIndex]._ref2._next = _addRef2Head; if (_lastRefHead.IsInvalid) { entries[entryIndex]._ref1._next = freeRef2; entries[entryIndex]._ref2._prev = freeRef1; _lastRefTail = freeRef2; } else { entries[entryIndex]._ref1._next = _lastRefHead; { if ((_lastRefHead).IsRef1) { (_pages[((_lastRefHead).PageIndex)]._entries)[(_lastRefHead).Ref1Index]._ref1._prev = (freeRef1); } else if ((_lastRefHead).IsRef2) { (_pages[((_lastRefHead).PageIndex)]._entries)[(_lastRefHead).Ref2Index]._ref2._prev = (freeRef1); } else { _lastRefTail = (freeRef1); } }; UsageEntryRef next, prev; if (_addRef2Head.IsInvalid) { prev = _lastRefTail; next = UsageEntryRef.INVALID; } else { prev = (_pages[(_addRef2Head.PageIndex)]._entries)[_addRef2Head.Ref2Index]._ref2._prev; next = _addRef2Head; } entries[entryIndex]._ref2._prev = prev; { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (freeRef2); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (freeRef2); } else { _lastRefHead = (freeRef2); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (freeRef2); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (freeRef2); } else { _lastRefTail = (freeRef2); } }; } _lastRefHead = freeRef1; _addRef2Head = freeRef2; _cEntriesInUse++; Dbg.Trace("CacheUsageAdd", "Added item=" + cacheEntry.Key + ",_bucket=" + _bucket + ",ref=" + freeRef1); Dbg.Validate("CacheValidateUsage", this); Dbg.Dump("CacheUsageAdd", this); } } private void RemoveEntryFromLastRefList(UsageEntryRef entryRef) { Dbg.Assert(entryRef.IsRef1, "entryRef.IsRef1"); UsageEntry[] entries = (_pages[(entryRef.PageIndex)]._entries); int entryIndex = entryRef.Ref1Index; UsageEntryRef prev = entries[entryIndex]._ref1._prev; UsageEntryRef next = entries[entryIndex]._ref1._next; { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (next); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (next); } else { _lastRefHead = (next); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (prev); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (prev); } else { _lastRefTail = (prev); } }; prev = entries[entryIndex]._ref2._prev; next = entries[entryIndex]._ref2._next; UsageEntryRef entryRef2 = (new UsageEntryRef((entryRef).PageIndex, -(entryRef).Ref1Index)); { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (next); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (next); } else { _lastRefHead = (next); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (prev); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (prev); } else { _lastRefTail = (prev); } }; if (_addRef2Head == entryRef2) { _addRef2Head = next; } } internal void RemoveCacheEntry(MemoryCacheEntry cacheEntry) { lock (this) { UsageEntryRef entryRef = cacheEntry.UsageEntryRef; if (entryRef.IsInvalid) return; UsageEntry[] entries = (_pages[(entryRef.PageIndex)]._entries); int entryIndex = entryRef.Ref1Index; cacheEntry.UsageEntryRef = UsageEntryRef.INVALID; entries[entryIndex]._cacheEntry = null; RemoveEntryFromLastRefList(entryRef); AddUsageEntryToFreeList(entryRef); Reduce(); Dbg.Trace("CacheUsageRemove", "Removed item=" + cacheEntry.Key + ",_bucket=" + _bucket + ",ref=" + entryRef); Dbg.Validate("CacheValidateUsage", this); Dbg.Dump("CacheUsageRemove", this); } } internal void UpdateCacheEntry(MemoryCacheEntry cacheEntry) { lock (this) { UsageEntryRef entryRef = cacheEntry.UsageEntryRef; if (entryRef.IsInvalid) return; UsageEntry[] entries = (_pages[(entryRef.PageIndex)]._entries); int entryIndex = entryRef.Ref1Index; UsageEntryRef entryRef2 = (new UsageEntryRef((entryRef).PageIndex, -(entryRef).Ref1Index)); UsageEntryRef prev = entries[entryIndex]._ref2._prev; UsageEntryRef next = entries[entryIndex]._ref2._next; { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (next); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (next); } else { _lastRefHead = (next); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (prev); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (prev); } else { _lastRefTail = (prev); } }; if (_addRef2Head == entryRef2) { _addRef2Head = next; } entries[entryIndex]._ref2 = entries[entryIndex]._ref1; prev = entries[entryIndex]._ref2._prev; next = entries[entryIndex]._ref2._next; { if ((prev).IsRef1) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref1Index]._ref1._next = (entryRef2); } else if ((prev).IsRef2) { (_pages[((prev).PageIndex)]._entries)[(prev).Ref2Index]._ref2._next = (entryRef2); } else { _lastRefHead = (entryRef2); } }; { if ((next).IsRef1) { (_pages[((next).PageIndex)]._entries)[(next).Ref1Index]._ref1._prev = (entryRef2); } else if ((next).IsRef2) { (_pages[((next).PageIndex)]._entries)[(next).Ref2Index]._ref2._prev = (entryRef2); } else { _lastRefTail = (entryRef2); } }; entries[entryIndex]._ref1._prev = UsageEntryRef.INVALID; entries[entryIndex]._ref1._next = _lastRefHead; { if ((_lastRefHead).IsRef1) { (_pages[((_lastRefHead).PageIndex)]._entries)[(_lastRefHead).Ref1Index]._ref1._prev = (entryRef); } else if ((_lastRefHead).IsRef2) { (_pages[((_lastRefHead).PageIndex)]._entries)[(_lastRefHead).Ref2Index]._ref2._prev = (entryRef); } else { _lastRefTail = (entryRef); } }; _lastRefHead = entryRef; Dbg.Trace("CacheUsageUpdate", "Updated item=" + cacheEntry.Key + ",_bucket=" + _bucket + ",ref=" + entryRef); Dbg.Validate("CacheValidateUsage", this); Dbg.Dump("CacheUsageUpdate", this); } } internal int FlushUnderUsedItems(int maxFlush, bool force) { if (_cEntriesInUse == 0) return 0; Dbg.Assert(maxFlush > 0, "maxFlush is not greater than 0, instead is " + maxFlush); Dbg.Assert(_cEntriesInFlush == 0, "_cEntriesInFlush == 0"); UsageEntryRef inFlushHead = UsageEntryRef.INVALID; UsageEntryRef prev, prevNext; DateTime utcDate; UsageEntry[] entries; int entryIndex; MemoryCacheEntry cacheEntry; int flushed = 0; try { _cacheUsage.MemoryCacheStore.BlockInsert(); lock (this) { Dbg.Assert(_blockReduce == false, "_blockReduce == false"); if (_cEntriesInUse == 0) return 0; DateTime utcNow = DateTime.UtcNow; for (prev = _lastRefTail; _cEntriesInFlush < maxFlush && !prev.IsInvalid; prev = prevNext) { Dbg.Assert(_cEntriesInUse > 0, "_cEntriesInUse > 0"); prevNext = (_pages[(prev.PageIndex)]._entries)[prev.Ref2Index]._ref2._prev; while (prevNext.IsRef1) { prevNext = (_pages[(prevNext.PageIndex)]._entries)[prevNext.Ref1Index]._ref1._prev; } entries = (_pages[(prev.PageIndex)]._entries); entryIndex = prev.Ref2Index; if (!force) { utcDate = entries[entryIndex]._utcDate; Dbg.Assert(utcDate != DateTime.MinValue, "utcDate != DateTime.MinValue"); if (utcNow - utcDate <= CacheUsage.NEWADD_INTERVAL && utcNow >= utcDate) continue; } UsageEntryRef prev1 = (new UsageEntryRef((prev).PageIndex, (prev).Ref2Index)); cacheEntry = entries[entryIndex]._cacheEntry; Dbg.Assert(cacheEntry.UsageEntryRef == prev1, "cacheEntry.UsageEntryRef == prev1"); Dbg.Trace("CacheUsageFlushUnderUsedItem", "Flushing underused items, item=" + cacheEntry.Key + ", bucket=" + _bucket); cacheEntry.UsageEntryRef = UsageEntryRef.INVALID; RemoveEntryFromLastRefList(prev1); entries[entryIndex]._ref1._next = inFlushHead; inFlushHead = prev1; flushed++; _cEntriesInFlush++; } if (flushed == 0) { Dbg.Trace("CacheUsageFlushTotal", "Flush(" + maxFlush + "," + force + ") removed " + flushed + " underused items; Time=" + Dbg.FormatLocalDate(DateTime.Now)); return 0; } _blockReduce = true; } } finally { _cacheUsage.MemoryCacheStore.UnblockInsert(); } Dbg.Assert(!inFlushHead.IsInvalid, "!inFlushHead.IsInvalid"); MemoryCacheStore cacheStore = _cacheUsage.MemoryCacheStore; UsageEntryRef current = inFlushHead; UsageEntryRef next; while (!current.IsInvalid) { entries = (_pages[(current.PageIndex)]._entries); entryIndex = current.Ref1Index; next = entries[entryIndex]._ref1._next; cacheEntry = entries[entryIndex]._cacheEntry; entries[entryIndex]._cacheEntry = null; Dbg.Assert(cacheEntry.UsageEntryRef.IsInvalid, "cacheEntry.UsageEntryRef.IsInvalid"); cacheStore.Remove(cacheEntry, cacheEntry, CacheEntryRemovedReason.Evicted); current = next; } try { _cacheUsage.MemoryCacheStore.BlockInsert(); lock (this) { current = inFlushHead; while (!current.IsInvalid) { entries = (_pages[(current.PageIndex)]._entries); entryIndex = current.Ref1Index; next = entries[entryIndex]._ref1._next; _cEntriesInFlush--; AddUsageEntryToFreeList(current); current = next; } Dbg.Assert(_cEntriesInFlush == 0, "_cEntriesInFlush == 0"); _blockReduce = false; Reduce(); Dbg.Trace("CacheUsageFlushTotal", "Flush(" + maxFlush + "," + force + ") removed " + flushed + " underused items; Time=" + Dbg.FormatLocalDate(DateTime.Now)); Dbg.Validate("CacheValidateUsage", this); Dbg.Dump("CacheUsageFlush", this); } } finally { _cacheUsage.MemoryCacheStore.UnblockInsert(); } return flushed; } } internal class CacheUsage { internal static readonly TimeSpan NEWADD_INTERVAL = new TimeSpan(0, 0, 10); internal static readonly TimeSpan CORRELATED_REQUEST_TIMEOUT = new TimeSpan(0, 0, 1); internal static readonly TimeSpan MIN_LIFETIME_FOR_USAGE = NEWADD_INTERVAL; private const byte NUMBUCKETS = 1; private const int MAX_REMOVE = 1024; private readonly MemoryCacheStore _cacheStore; internal readonly UsageBucket[] _buckets; private int _inFlush; internal CacheUsage(MemoryCacheStore cacheStore) { _cacheStore = cacheStore; _buckets = new UsageBucket[NUMBUCKETS]; for (byte b = 0; b < _buckets.Length; b++) { _buckets[b] = new UsageBucket(this, b); } } internal MemoryCacheStore MemoryCacheStore { get { return _cacheStore; } } internal void Add(MemoryCacheEntry cacheEntry) { byte bucket = cacheEntry.UsageBucket; Dbg.Assert(bucket != 0xff, "bucket != 0xff"); _buckets[bucket].AddCacheEntry(cacheEntry); } internal void Remove(MemoryCacheEntry cacheEntry) { byte bucket = cacheEntry.UsageBucket; if (bucket != 0xff) { _buckets[bucket].RemoveCacheEntry(cacheEntry); } } [SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode", Justification = "Grandfathered suppression from original caching code checkin")] internal void Update(MemoryCacheEntry cacheEntry) { byte bucket = cacheEntry.UsageBucket; if (bucket != 0xff) { _buckets[bucket].UpdateCacheEntry(cacheEntry); } } internal int FlushUnderUsedItems(int toFlush) { int flushed = 0; if (Interlocked.Exchange(ref _inFlush, 1) == 0) { try { foreach (UsageBucket usageBucket in _buckets) { int flushedOne = usageBucket.FlushUnderUsedItems(toFlush - flushed, false); flushed += flushedOne; if (flushed >= toFlush) break; } if (flushed < toFlush) { foreach (UsageBucket usageBucket in _buckets) { int flushedOne = usageBucket.FlushUnderUsedItems(toFlush - flushed, true); flushed += flushedOne; if (flushed >= toFlush) break; } } } finally { Interlocked.Exchange(ref _inFlush, 0); } } return flushed; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.HSSF.Record.Aggregates { using System; using System.Text; using System.Collections; using NPOI.HSSF.Model; using NPOI.HSSF.Record; using NPOI.HSSF.Util; using NPOI.SS.UserModel; using NPOI.Util; using System.Collections.Generic; /** * Groups the page settings records for a worksheet.<p/> * * See OOO excelfileformat.pdf sec 4.4 'Page Settings Block' * * @author Josh Micich */ internal class PageSettingsBlock : RecordAggregate { // Every one of these component records is optional // (The whole PageSettingsBlock may not be present) private PageBreakRecord _rowBreaksRecord; private PageBreakRecord _columnBreaksRecord; private HeaderRecord header; private FooterRecord footer; private HCenterRecord _hCenter; private VCenterRecord _vCenter; private LeftMarginRecord _leftMargin; private RightMarginRecord _rightMargin; private TopMarginRecord _topMargin; private BottomMarginRecord _bottomMargin; // fix warning CS0169 "never used": private Record _pls; private PrintSetupRecord printSetup; private Record _bitmap; private HeaderFooterRecord _headerFooter; private List<HeaderFooterRecord> _sviewHeaderFooters = new List<HeaderFooterRecord>(); private List<PLSAggregate> _plsRecords; private Record _printSize; public PageSettingsBlock(RecordStream rs) { _plsRecords = new List<PLSAggregate>(); while (ReadARecord(rs)) ; } /** * Creates a PageSettingsBlock with default settings */ public PageSettingsBlock() { _plsRecords = new List<PLSAggregate>(); _rowBreaksRecord = new HorizontalPageBreakRecord(); _columnBreaksRecord = new VerticalPageBreakRecord(); header = new HeaderRecord(string.Empty); footer = new FooterRecord(string.Empty); _hCenter = CreateHCenter(); _vCenter = CreateVCenter(); printSetup = CreatePrintSetup(); } /** * @return <c>true</c> if the specified Record sid is one belonging to the * 'Page Settings Block'. */ public static bool IsComponentRecord(int sid) { switch (sid) { case HorizontalPageBreakRecord.sid: case VerticalPageBreakRecord.sid: case HeaderRecord.sid: case FooterRecord.sid: case HCenterRecord.sid: case VCenterRecord.sid: case LeftMarginRecord.sid: case RightMarginRecord.sid: case TopMarginRecord.sid: case BottomMarginRecord.sid: case UnknownRecord.PLS_004D: case PrintSetupRecord.sid: case UnknownRecord.BITMAP_00E9: case UnknownRecord.PRINTSIZE_0033: case HeaderFooterRecord.sid: // extra header/footer settings supported by Excel 2007 return true; } return false; } private bool ReadARecord(RecordStream rs) { switch (rs.PeekNextSid()) { case HorizontalPageBreakRecord.sid: CheckNotPresent(_rowBreaksRecord); _rowBreaksRecord = (PageBreakRecord)rs.GetNext(); break; case VerticalPageBreakRecord.sid: CheckNotPresent(_columnBreaksRecord); _columnBreaksRecord = (PageBreakRecord)rs.GetNext(); break; case HeaderRecord.sid: CheckNotPresent(header); header = (HeaderRecord)rs.GetNext(); break; case FooterRecord.sid: CheckNotPresent(footer); footer = (FooterRecord)rs.GetNext(); break; case HCenterRecord.sid: CheckNotPresent(_hCenter); _hCenter = (HCenterRecord)rs.GetNext(); break; case VCenterRecord.sid: CheckNotPresent(_vCenter); _vCenter = (VCenterRecord)rs.GetNext(); break; case LeftMarginRecord.sid: CheckNotPresent(_leftMargin); _leftMargin = (LeftMarginRecord)rs.GetNext(); break; case RightMarginRecord.sid: CheckNotPresent(_rightMargin); _rightMargin = (RightMarginRecord)rs.GetNext(); break; case TopMarginRecord.sid: CheckNotPresent(_topMargin); _topMargin = (TopMarginRecord)rs.GetNext(); break; case BottomMarginRecord.sid: CheckNotPresent(_bottomMargin); _bottomMargin = (BottomMarginRecord)rs.GetNext(); break; case UnknownRecord.PLS_004D: // PLS _plsRecords.Add(new PLSAggregate(rs)); break; case PrintSetupRecord.sid: CheckNotPresent(printSetup); printSetup = (PrintSetupRecord)rs.GetNext(); break; case UnknownRecord.BITMAP_00E9: // BITMAP CheckNotPresent(_bitmap); _bitmap = rs.GetNext(); break; case UnknownRecord.PRINTSIZE_0033: CheckNotPresent(_printSize); _printSize = rs.GetNext(); break; case HeaderFooterRecord.sid: HeaderFooterRecord hf = (HeaderFooterRecord)rs.GetNext(); if (hf.IsCurrentSheet) _headerFooter = hf; else _sviewHeaderFooters.Add(hf); break; default: // all other record types are not part of the PageSettingsBlock return false; } return true; } private void CheckNotPresent(Record rec) { if (rec != null) { throw new RecordFormatException("Duplicate PageSettingsBlock record (sid=0x" + StringUtil.ToHexString(rec.Sid) + ")"); } } private PageBreakRecord RowBreaksRecord { get { if (_rowBreaksRecord == null) { _rowBreaksRecord = new HorizontalPageBreakRecord(); } return _rowBreaksRecord; } } private PageBreakRecord ColumnBreaksRecord { get { if (_columnBreaksRecord == null) { _columnBreaksRecord = new VerticalPageBreakRecord(); } return _columnBreaksRecord; } } public IEnumerator GetEnumerator() { return _plsRecords.GetEnumerator(); } /** * Sets a page break at the indicated column * */ public void SetColumnBreak(int column, int fromRow, int toRow) { this.ColumnBreaksRecord.AddBreak(column, fromRow, toRow); } /** * Removes a page break at the indicated column * */ public void RemoveColumnBreak(int column) { this.ColumnBreaksRecord.RemoveBreak(column); } public override void VisitContainedRecords(RecordVisitor rv) { VisitIfPresent(_rowBreaksRecord, rv); VisitIfPresent(_columnBreaksRecord, rv); // Write out empty header / footer records if these are missing if (header == null) { rv.VisitRecord(new HeaderRecord("")); } else { rv.VisitRecord(header); } if (footer == null) { rv.VisitRecord(new FooterRecord("")); } else { rv.VisitRecord(footer); } VisitIfPresent(_hCenter, rv); VisitIfPresent(_vCenter, rv); VisitIfPresent(_leftMargin, rv); VisitIfPresent(_rightMargin, rv); VisitIfPresent(_topMargin, rv); VisitIfPresent(_bottomMargin, rv); foreach (RecordAggregate pls in _plsRecords) { pls.VisitContainedRecords(rv); } VisitIfPresent(printSetup, rv); VisitIfPresent(_bitmap, rv); VisitIfPresent(_printSize, rv); VisitIfPresent(_headerFooter, rv); } private static void VisitIfPresent(Record r, RecordVisitor rv) { if (r != null) { rv.VisitRecord(r); } } private static void VisitIfPresent(PageBreakRecord r, RecordVisitor rv) { if (r != null) { if (r.IsEmpty) { // its OK to not serialize empty page break records return; } rv.VisitRecord(r); } } /** * Creates the HCenter Record and sets it to false (don't horizontally center) */ private static HCenterRecord CreateHCenter() { HCenterRecord retval = new HCenterRecord(); retval.HCenter = (false); return retval; } /** * Creates the VCenter Record and sets it to false (don't horizontally center) */ private static VCenterRecord CreateVCenter() { VCenterRecord retval = new VCenterRecord(); retval.VCenter = (false); return retval; } /** * Creates the PrintSetup Record and sets it to defaults and marks it invalid * @see org.apache.poi.hssf.record.PrintSetupRecord * @see org.apache.poi.hssf.record.Record * @return record containing a PrintSetupRecord */ private static PrintSetupRecord CreatePrintSetup() { PrintSetupRecord retval = new PrintSetupRecord(); retval.PaperSize = ((short)1); retval.Scale = ((short)100); retval.PageStart = ((short)1); retval.FitWidth = ((short)1); retval.FitHeight = ((short)1); retval.Options = ((short)2); retval.HResolution = ((short)300); retval.VResolution = ((short)300); retval.HeaderMargin = (0.5); retval.FooterMargin = (0.5); retval.Copies = ((short)1); return retval; } /** * Returns the HeaderRecord. * @return HeaderRecord for the sheet. */ public HeaderRecord Header { get { return header; } set { header = value; } } /** * Returns the FooterRecord. * @return FooterRecord for the sheet. */ public FooterRecord Footer { get { return footer; } set { footer = value; } } /** * Returns the PrintSetupRecord. * @return PrintSetupRecord for the sheet. */ public PrintSetupRecord PrintSetup { get { return printSetup; } set { printSetup = value; } } private Margin GetMarginRec(MarginType margin) { switch (margin) { case MarginType.LeftMargin: return _leftMargin; case MarginType.RightMargin: return _rightMargin; case MarginType.TopMargin: return _topMargin; case MarginType.BottomMargin: return _bottomMargin; default: throw new InvalidOperationException("Unknown margin constant: " + (short)margin); } } /** * Gets the size of the margin in inches. * @param margin which margin to Get * @return the size of the margin */ public double GetMargin(MarginType margin) { Margin m = GetMarginRec(margin); if (m != null) { return m.Margin; } else { switch (margin) { case MarginType.LeftMargin: return .75; case MarginType.RightMargin: return .75; case MarginType.TopMargin: return 1.0; case MarginType.BottomMargin: return 1.0; } throw new InvalidOperationException("Unknown margin constant: " + margin); } } /** * Sets the size of the margin in inches. * @param margin which margin to Get * @param size the size of the margin */ public void SetMargin(MarginType margin, double size) { Margin m = GetMarginRec(margin); if (m == null) { switch (margin) { case MarginType.LeftMargin: _leftMargin = new LeftMarginRecord(); m = _leftMargin; break; case MarginType.RightMargin: _rightMargin = new RightMarginRecord(); m = _rightMargin; break; case MarginType.TopMargin: _topMargin = new TopMarginRecord(); m = _topMargin; break; case MarginType.BottomMargin: _bottomMargin = new BottomMarginRecord(); m = _bottomMargin; break; default: throw new InvalidOperationException("Unknown margin constant: " + margin); } } m.Margin= size; } /** * Shifts all the page breaks in the range "count" number of rows/columns * @param breaks The page record to be shifted * @param start Starting "main" value to shift breaks * @param stop Ending "main" value to shift breaks * @param count number of units (rows/columns) to shift by */ private static void ShiftBreaks(PageBreakRecord breaks, int start, int stop, int count) { IEnumerator iterator = breaks.GetBreaksEnumerator(); IList shiftedBreak = new ArrayList(); while(iterator.MoveNext()) { PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.Current; int breakLocation = breakItem.main; bool inStart = (breakLocation >= start); bool inEnd = (breakLocation <= stop); if(inStart && inEnd) shiftedBreak.Add(breakItem); } iterator = shiftedBreak.GetEnumerator(); while (iterator.MoveNext()) { PageBreakRecord.Break breakItem = (PageBreakRecord.Break)iterator.Current; breaks.RemoveBreak(breakItem.main); breaks.AddBreak((short)(breakItem.main+count), breakItem.subFrom, breakItem.subTo); } } /** * Sets a page break at the indicated row * @param row */ public void SetRowBreak(int row, short fromCol, short toCol) { this.RowBreaksRecord.AddBreak((short)row, fromCol, toCol); } /** * Removes a page break at the indicated row * @param row */ public void RemoveRowBreak(int row) { if (this.RowBreaksRecord.GetBreaks().Length < 1) throw new ArgumentException("Sheet does not define any row breaks"); this.RowBreaksRecord.RemoveBreak((short)row); } /** * Queries if the specified row has a page break * @param row * @return true if the specified row has a page break */ public bool IsRowBroken(int row) { return this.RowBreaksRecord.GetBreak(row) != null; } /** * Queries if the specified column has a page break * * @return <c>true</c> if the specified column has a page break */ public bool IsColumnBroken(int column) { return this.ColumnBreaksRecord.GetBreak(column) != null; } /** * Shifts the horizontal page breaks for the indicated count * @param startingRow * @param endingRow * @param count */ public void ShiftRowBreaks(int startingRow, int endingRow, int count) { ShiftBreaks(this.RowBreaksRecord, startingRow, endingRow, count); } /** * Shifts the vertical page breaks for the indicated count * @param startingCol * @param endingCol * @param count */ public void ShiftColumnBreaks(short startingCol, short endingCol, short count) { ShiftBreaks(this.ColumnBreaksRecord, startingCol, endingCol, count); } /** * @return all the horizontal page breaks, never <c>null</c> */ public int[] RowBreaks { get { return this.RowBreaksRecord.GetBreaks(); } } /** * @return the number of row page breaks */ public int NumRowBreaks { get { return this.RowBreaksRecord.NumBreaks; } } /** * @return all the column page breaks, never <c>null</c> */ public int[] ColumnBreaks { get { return this.ColumnBreaksRecord.GetBreaks(); } } /** * @return the number of column page breaks */ public int NumColumnBreaks { get { return this.ColumnBreaksRecord.NumBreaks; } } public VCenterRecord VCenter { get { return _vCenter; } } public HCenterRecord HCenter { get { return _hCenter; } } /// <summary> /// HEADERFOOTER is new in 2007. Some apps seem to have scattered this record long after /// the PageSettingsBlock where it belongs. /// </summary> /// <param name="rec"></param> public void AddLateHeaderFooter(HeaderFooterRecord rec) { if (_headerFooter != null) { throw new ArgumentNullException("This page settings block already has a header/footer record"); } if (rec.Sid != UnknownRecord.HEADER_FOOTER_089C) { throw new RecordFormatException("Unexpected header-footer record sid: 0x" + StringUtil.ToHexString(rec.Sid)); } _headerFooter = rec; } /// <summary> /// This method reads PageSettingsBlock records from the supplied RecordStream until the first non-PageSettingsBlock record is encountered. /// As each record is read, it is incorporated into this PageSettingsBlock. /// </summary> /// <param name="rs"></param> public void AddLateRecords(RecordStream rs) { while (true) { if (!ReadARecord(rs)) { break; } } } public void PositionRecords(List<RecordBase> sheetRecords) { // Take a copy to loop over, so we can update the real one // without concurrency issues List<HeaderFooterRecord> hfRecordsToIterate = new List<HeaderFooterRecord>(_sviewHeaderFooters); // loop through HeaderFooterRecord records having not-empty GUID and match them with // CustomViewSettingsRecordAggregate blocks having UserSViewBegin with the same GUID foreach (HeaderFooterRecord hf in hfRecordsToIterate) { foreach (RecordBase rb in sheetRecords) { if (rb is CustomViewSettingsRecordAggregate) { CustomViewSettingsRecordAggregate cv = (CustomViewSettingsRecordAggregate)rb; cv.VisitContainedRecords(new CustomRecordVisitor1(cv,hf,_sviewHeaderFooters)); } } } } private class CustomRecordVisitor1 : RecordVisitor { CustomViewSettingsRecordAggregate _cv; HeaderFooterRecord _hf; List<HeaderFooterRecord> _sviewHeaderFooters; public CustomRecordVisitor1(CustomViewSettingsRecordAggregate cv, HeaderFooterRecord hf, List<HeaderFooterRecord> sviewHeaderFooter) { this._cv = cv; this._hf = hf; this._sviewHeaderFooters = sviewHeaderFooter; } #region RecordVisitor Members public void VisitRecord(Record r) { if (r.Sid == UserSViewBegin.sid) { byte[] guid1 = ((UserSViewBegin)r).Guid; byte[] guid2 = _hf.Guid; if (Arrays.Equals(guid1, guid2)) { _cv.Append(_hf); _sviewHeaderFooters.Remove(_hf); } } } #endregion } } }
using Microsoft.IdentityModel; using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.S2S.Tokens; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.EventReceivers; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.IO; using System.Linq; using System.Net; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel; using System.Text; using System.Web; using System.Web.Configuration; using System.Web.Script.Serialization; using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction; using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException; using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration; using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials; namespace RESTMVCWeb { public static class TokenHelper { #region public fields /// <summary> /// SharePoint principal. /// </summary> public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000"; /// <summary> /// Lifetime of HighTrust access token, 12 hours. /// </summary> public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0); #endregion public fields #region public methods /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequest request) { return GetContextTokenFromRequest(new HttpRequestWrapper(request)); } /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequestBase request) { string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" }; foreach (string paramName in paramNames) { if (!string.IsNullOrEmpty(request.Form[paramName])) { return request.Form[paramName]; } if (!string.IsNullOrEmpty(request.QueryString[paramName])) { return request.QueryString[paramName]; } } return null; } /// <summary> /// Validate that a specified context token string is intended for this application based on the parameters /// specified in web.config. Parameters used from web.config used for validation include ClientId, /// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present, /// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not /// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an /// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents /// and a JsonWebSecurityToken based on the context token is returned. /// </summary> /// <param name="contextTokenString">The context token to validate</param> /// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation. /// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used /// for validation instead of <paramref name="appHostName"/> .</param> /// <returns>A JsonWebSecurityToken based on the context token.</returns> public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null) { JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler(); SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString); JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken; SharePointContextToken token = SharePointContextToken.Create(jsonToken); string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority; int firstDot = stsAuthority.IndexOf('.'); GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot); AcsHostUrl = stsAuthority.Substring(firstDot + 1); tokenHandler.ValidateToken(jsonToken); string[] acceptableAudiences; if (!String.IsNullOrEmpty(HostedAppHostNameOverride)) { acceptableAudiences = HostedAppHostNameOverride.Split(';'); } else if (appHostName == null) { acceptableAudiences = new[] { HostedAppHostName }; } else { acceptableAudiences = new[] { appHostName }; } bool validationSuccessful = false; string realm = Realm ?? token.Realm; foreach (var audience in acceptableAudiences) { string principal = GetFormattedPrincipal(ClientId, audience, realm); if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal)) { validationSuccessful = true; break; } } if (!validationSuccessful) { throw new AudienceUriValidationFailedException( String.Format(CultureInfo.CurrentCulture, "\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience)); } return token; } /// <summary> /// Retrieves an access token from ACS to call the source of the specified context token at the specified /// targetHost. The targetHost must be registered for the principal that sent the context token. /// </summary> /// <param name="contextToken">Context token issued by the intended access token audience</param> /// <param name="targetHost">Url authority of the target principal</param> /// <returns>An access token with an audience matching the context token's source</returns> public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost) { string targetPrincipalName = contextToken.TargetPrincipalName; // Extract the refreshToken from the context token string refreshToken = contextToken.RefreshToken; if (String.IsNullOrEmpty(refreshToken)) { return null; } string targetRealm = Realm ?? contextToken.Realm; return GetAccessToken(refreshToken, targetPrincipalName, targetHost, targetRealm); } /// <summary> /// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="authorizationCode">Authorization code to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string authorizationCode, string targetPrincipalName, string targetHost, string targetRealm, Uri redirectUri) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); // Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode( clientId, ClientSecret, authorizationCode, redirectUri, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="refreshToken">Refresh token to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string refreshToken, string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Retrieves an app-only access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAppOnlyAccessToken( string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource); oauth2Request.Resource = resource; // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Creates a client context based on the properties of a remote event receiver /// </summary> /// <param name="properties">Properties of a remote event receiver</param> /// <returns>A ClientContext ready to call the web where the event originated</returns> public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties) { Uri sharepointUrl; if (properties.ListEventProperties != null) { sharepointUrl = new Uri(properties.ListEventProperties.WebUrl); } else if (properties.ItemEventProperties != null) { sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl); } else if (properties.WebEventProperties != null) { sharepointUrl = new Uri(properties.WebEventProperties.FullUrl); } else { return null; } if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Creates a client context based on the properties of an app event /// </summary> /// <param name="properties">Properties of an app event</param> /// <param name="useAppWeb">True to target the app web, false to target the host web</param> /// <returns>A ClientContext ready to call the app web or the parent web</returns> public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb) { if (properties.AppEventProperties == null) { return null; } Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl; if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string authorizationCode, Uri redirectUri) { return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="targetPrincipalName">Name of the target SharePoint principal</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string targetPrincipalName, string authorizationCode, string targetRealm, Uri redirectUri) { Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Uses the specified access token to create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="accessToken">Access token to be used when calling the specified targetUrl</param> /// <returns>A ClientContext ready to call targetUrl with the specified access token</returns> public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken) { ClientContext clientContext = new ClientContext(targetUrl); clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous; clientContext.FormDigestHandlingEnabled = false; clientContext.ExecutingWebRequest += delegate(object oSender, WebRequestEventArgs webRequestEventArgs) { webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] = "Bearer " + accessToken; }; return clientContext; } /// <summary> /// Retrieves an access token from ACS using the specified context token, and uses that access token to create /// a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="contextTokenString">Context token received from the target SharePoint site</param> /// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName /// of web.config will be used instead</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithContextToken( string targetUrl, string contextTokenString, string appHostUrl) { SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl); Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is /// granted</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope, redirectUri); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request a new context token. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param> /// <returns>Url of the SharePoint site's context token redirect page</returns> public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri) { return string.Format( "{0}{1}?client_id={2}&redirect_uri={3}", EnsureTrailingSlash(contextUrl), RedirectPage, ClientId, redirectUri); } /// <summary> /// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified /// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in /// web.config, an auth challenge will be issued to the targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>An access token with an audience of the target principal</returns> public static string GetS2SAccessTokenWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); } /// <summary> /// Retrieves an S2S client context with an access token signed by the application's private certificate on /// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the /// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the /// targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>A ClientContext using an access token with an audience of the target application</returns> public static ClientContext GetS2SClientContextWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken); } /// <summary> /// Get authentication realm from SharePoint /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <returns>String representation of the realm GUID</returns> public static string GetRealmFromTargetUrl(Uri targetApplicationUri) { WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc"); request.Headers.Add("Authorization: Bearer "); try { using (request.GetResponse()) { } } catch (WebException e) { if (e.Response == null) { return null; } string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"]; if (string.IsNullOrEmpty(bearerResponseHeader)) { return null; } const string bearer = "Bearer realm=\""; int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal); if (bearerIndex < 0) { return null; } int realmIndex = bearerIndex + bearer.Length; if (bearerResponseHeader.Length >= realmIndex + 36) { string targetRealm = bearerResponseHeader.Substring(realmIndex, 36); Guid realmGuid; if (Guid.TryParse(targetRealm, out realmGuid)) { return targetRealm; } } } return null; } /// <summary> /// Determines if this is a high trust app. /// </summary> /// <returns>True if this is a high trust app.</returns> public static bool IsHighTrustApp() { return SigningCredentials != null; } /// <summary> /// Ensures that the specified URL ends with '/' if it is not null or empty. /// </summary> /// <param name="url">The url.</param> /// <returns>The url ending with '/' if it is not null or empty.</returns> public static string EnsureTrailingSlash(string url) { if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/') { return url + "/"; } return url; } #endregion #region private fields // // Configuration Constants // private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx"; private const string RedirectPage = "_layouts/15/AppRedirect.aspx"; private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000"; private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1"; private const string S2SProtocol = "OAuth2"; private const string DelegationIssuance = "DelegationIssuance1.0"; private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier; private const string TrustedForImpersonationClaimType = "trustedfordelegation"; private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken; // // Environment Constants // private static string GlobalEndPointPrefix = "accounts"; private static string AcsHostUrl = "accesscontrol.windows.net"; // // Hosted app configuration // private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId"); private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId"); private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride"); private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName"); private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret"); private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret"); private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath"); private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword"); private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword); private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest); #endregion #region private methods private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl) { string contextTokenString = properties.ContextToken; if (String.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host); string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken; return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken); } private static string GetAcsMetadataEndpointUrl() { return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl); } private static string GetFormattedPrincipal(string principalName, string hostName, string realm) { if (!String.IsNullOrEmpty(hostName)) { return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm); } return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm); } private static string GetAcsPrincipalName(string realm) { return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm); } private static string GetAcsGlobalEndpointUrl() { return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl); } private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler() { JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler(); handler.Configuration = new SecurityTokenHandlerConfiguration(); handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never); handler.Configuration.CertificateValidator = X509CertificateValidator.None; List<byte[]> securityKeys = new List<byte[]>(); securityKeys.Add(Convert.FromBase64String(ClientSecret)); if (!string.IsNullOrEmpty(SecondaryClientSecret)) { securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret)); } List<SecurityToken> securityTokens = new List<SecurityToken>(); securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys)); handler.Configuration.IssuerTokenResolver = SecurityTokenResolver.CreateDefaultSecurityTokenResolver( new ReadOnlyCollection<SecurityToken>(securityTokens), false); SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry(); foreach (byte[] securitykey in securityKeys) { issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace)); } handler.Configuration.IssuerNameRegistry = issuerNameRegistry; return handler; } private static string GetS2SAccessTokenWithClaims( string targetApplicationHostName, string targetRealm, IEnumerable<JsonWebTokenClaim> claims) { return IssueToken( ClientId, IssuerId, targetRealm, SharePointPrincipal, targetRealm, targetApplicationHostName, true, claims, claims == null); } private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity) { JsonWebTokenClaim[] claims = new JsonWebTokenClaim[] { new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()), new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory") }; return claims; } private static string IssueToken( string sourceApplication, string issuerApplication, string sourceRealm, string targetApplication, string targetRealm, string targetApplicationHostName, bool trustedForDelegation, IEnumerable<JsonWebTokenClaim> claims, bool appOnly = false) { if (null == SigningCredentials) { throw new InvalidOperationException("SigningCredentials was not initialized"); } #region Actor token string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm); string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm); string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm); List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>(); actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid)); if (trustedForDelegation && !appOnly) { actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true")); } // Create token JsonWebSecurityToken actorToken = new JsonWebSecurityToken( issuer: issuer, audience: audience, validFrom: DateTime.UtcNow, validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), signingCredentials: SigningCredentials, claims: actorClaims); string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken); if (appOnly) { // App-only token is the same as actor token for delegated case return actorTokenString; } #endregion Actor token #region Outer token List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims); outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString)); JsonWebSecurityToken jsonToken = new JsonWebSecurityToken( nameid, // outer token issuer should match actor token nameid audience, DateTime.UtcNow, DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), outerClaims); string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken); #endregion Outer token return accessToken; } #endregion #region AcsMetadataParser // This class is used to get MetaData document from the global STS endpoint. It contains // methods to parse the MetaData document and get endpoints and STS certificate. public static class AcsMetadataParser { public static X509Certificate2 GetAcsSigningCert(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); if (null != document.keys && document.keys.Count > 0) { JsonKey signingKey = document.keys[0]; if (null != signingKey && null != signingKey.keyValue) { return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value)); } } throw new Exception("Metadata document does not contain ACS signing certificate."); } public static string GetDelegationServiceUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance); if (null != delegationEndpoint) { return delegationEndpoint.location; } throw new Exception("Metadata document does not contain Delegation Service endpoint Url"); } private static JsonMetadataDocument GetMetadataDocument(string realm) { string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}", GetAcsMetadataEndpointUrl(), realm); byte[] acsMetadata; using (WebClient webClient = new WebClient()) { acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm); } string jsonResponseString = Encoding.UTF8.GetString(acsMetadata); JavaScriptSerializer serializer = new JavaScriptSerializer(); JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString); if (null == document) { throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm); } return document; } public static string GetStsUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol); if (null != s2sEndpoint) { return s2sEndpoint.location; } throw new Exception("Metadata document does not contain STS endpoint url"); } private class JsonMetadataDocument { public string serviceName { get; set; } public List<JsonEndpoint> endpoints { get; set; } public List<JsonKey> keys { get; set; } } private class JsonEndpoint { public string location { get; set; } public string protocol { get; set; } public string usage { get; set; } } private class JsonKeyValue { public string type { get; set; } public string value { get; set; } } private class JsonKey { public string usage { get; set; } public JsonKeyValue keyValue { get; set; } } } #endregion } /// <summary> /// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token /// </summary> public class SharePointContextToken : JsonWebSecurityToken { public static SharePointContextToken Create(JsonWebSecurityToken contextToken) { return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims); } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims) : base(issuer, audience, validFrom, validTo, claims) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken) : base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials) : base(issuer, audience, validFrom, validTo, claims, signingCredentials) { } public string NameId { get { return GetClaimValue(this, "nameid"); } } /// <summary> /// The principal name portion of the context token's "appctxsender" claim /// </summary> public string TargetPrincipalName { get { string appctxsender = GetClaimValue(this, "appctxsender"); if (appctxsender == null) { return null; } return appctxsender.Split('@')[0]; } } /// <summary> /// The context token's "refreshtoken" claim /// </summary> public string RefreshToken { get { return GetClaimValue(this, "refreshtoken"); } } /// <summary> /// The context token's "CacheKey" claim /// </summary> public string CacheKey { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string cacheKey = (string)dict["CacheKey"]; return cacheKey; } } /// <summary> /// The context token's "SecurityTokenServiceUri" claim /// </summary> public string SecurityTokenServiceUri { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"]; return securityTokenServiceUri; } } /// <summary> /// The realm portion of the context token's "audience" claim /// </summary> public string Realm { get { string aud = Audience; if (aud == null) { return null; } string tokenRealm = aud.Substring(aud.IndexOf('@') + 1); return tokenRealm; } } private static string GetClaimValue(JsonWebSecurityToken token, string claimType) { if (token == null) { throw new ArgumentNullException("token"); } foreach (JsonWebTokenClaim claim in token.Claims) { if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType)) { return claim.Value; } } return null; } } /// <summary> /// Represents a security token which contains multiple security keys that are generated using symmetric algorithms. /// </summary> public class MultipleSymmetricKeySecurityToken : SecurityToken { /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys) : this(UniqueId.CreateUniqueId(), keys) { } /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="tokenId">The unique identifier of the security token.</param> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys) { if (keys == null) { throw new ArgumentNullException("keys"); } if (String.IsNullOrEmpty(tokenId)) { throw new ArgumentException("Value cannot be a null or empty string.", "tokenId"); } foreach (byte[] key in keys) { if (key.Length <= 0) { throw new ArgumentException("The key length must be greater then zero.", "keys"); } } id = tokenId; effectiveTime = DateTime.UtcNow; securityKeys = CreateSymmetricSecurityKeys(keys); } /// <summary> /// Gets the unique identifier of the security token. /// </summary> public override string Id { get { return id; } } /// <summary> /// Gets the cryptographic keys associated with the security token. /// </summary> public override ReadOnlyCollection<SecurityKey> SecurityKeys { get { return securityKeys.AsReadOnly(); } } /// <summary> /// Gets the first instant in time at which this security token is valid. /// </summary> public override DateTime ValidFrom { get { return effectiveTime; } } /// <summary> /// Gets the last instant in time at which this security token is valid. /// </summary> public override DateTime ValidTo { get { // Never expire return DateTime.MaxValue; } } /// <summary> /// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier. /// </summary> /// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param> /// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns> public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw new ArgumentNullException("keyIdentifierClause"); } // Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the // presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later // when the key is matched to the issuer. if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause) { return true; } return base.MatchesKeyIdentifierClause(keyIdentifierClause); } #region private members private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys) { List<SecurityKey> symmetricKeys = new List<SecurityKey>(); foreach (byte[] key in keys) { symmetricKeys.Add(new InMemorySymmetricSecurityKey(key)); } return symmetricKeys; } private string id; private DateTime effectiveTime; private List<SecurityKey> securityKeys; #endregion } }
using System; using System.Data; using System.Data.OleDb; using System.Collections; using System.Configuration; using PCSComUtils.DataAccess; using PCSComUtils.PCSExc; using PCSComUtils.Common; namespace PCSComProduction.DCP.DS { public class PRO_DCOptionDetailDS { public PRO_DCOptionDetailDS() { } private const string THIS = "PCSComProduction.DCP.DS.PRO_DCOptionDetailDS"; /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public void Add(object pobjObjectVO) { const string METHOD_NAME = THIS + ".Add()"; OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { PRO_DCOptionDetailVO objObject = (PRO_DCOptionDetailVO) pobjObjectVO; string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand("", oconPCS); strSql= "INSERT INTO PRO_DCOptionDetail(" + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + "," + PRO_DCOptionDetailTable.WORKORDER_FLD + "," + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + ")" + "VALUES(?,?,?)"; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD].Value = objObject.MasterLocationID; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.WORKORDER_FLD, OleDbType.Boolean)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.WORKORDER_FLD].Value = objObject.WorkOrder; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD].Value = objObject.DCOptionMasterID; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public void Delete(int pintID) { const string METHOD_NAME = THIS + ".Delete()"; string strSql = String.Empty; strSql= "DELETE " + PRO_DCOptionDetailTable.TABLE_NAME + " WHERE " + "DCOptionDetailID" + "=" + pintID.ToString(); OleDbConnection oconPCS=null; OleDbCommand ocmdPCS =null; try { Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); ocmdPCS = null; } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public object GetObjectVO(int pintID) { const string METHOD_NAME = THIS + ".GetObjectVO()"; DataSet dstPCS = new DataSet(); OleDbDataReader odrPCS = null; OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + "," + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + "," + PRO_DCOptionDetailTable.WORKORDER_FLD + "," + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + " FROM " + PRO_DCOptionDetailTable.TABLE_NAME + " WHERE " + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + "=" + pintID; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); odrPCS = ocmdPCS.ExecuteReader(); PRO_DCOptionDetailVO objObject = new PRO_DCOptionDetailVO(); while (odrPCS.Read()) { objObject.DCOptionDetailID = int.Parse(odrPCS[PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD].ToString().Trim()); objObject.MasterLocationID = int.Parse(odrPCS[PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD].ToString().Trim()); objObject.WorkOrder = bool.Parse(odrPCS[PRO_DCOptionDetailTable.WORKORDER_FLD].ToString().Trim()); objObject.DCOptionMasterID = int.Parse(odrPCS[PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD].ToString().Trim()); } return objObject; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public void Update(object pobjObjecVO) { const string METHOD_NAME = THIS + ".Update()"; PRO_DCOptionDetailVO objObject = (PRO_DCOptionDetailVO) pobjObjecVO; //prepare value for parameters OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); strSql= "UPDATE PRO_DCOptionDetail SET " + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + "= ?" + "," + PRO_DCOptionDetailTable.WORKORDER_FLD + "= ?" + "," + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + "= ?" +" WHERE " + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + "= ?"; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD].Value = objObject.MasterLocationID; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.WORKORDER_FLD, OleDbType.Boolean)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.WORKORDER_FLD].Value = objObject.WorkOrder; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD].Value = objObject.DCOptionMasterID; ocmdPCS.Parameters.Add(new OleDbParameter(PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD].Value = objObject.DCOptionDetailID; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public DataSet List() { const string METHOD_NAME = THIS + ".List()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + "," + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + "," + PRO_DCOptionDetailTable.WORKORDER_FLD + "," + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + " FROM " + PRO_DCOptionDetailTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,PRO_DCOptionDetailTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public DataSet GetDetailByMaster(int pintDCOptionMaster) { const string METHOD_NAME = THIS + ".GetDetailByMaster()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + ", " + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + ", " + MST_MasterLocationTable.TABLE_NAME + "." + MST_MasterLocationTable.CODE_FLD + " as " + MST_MasterLocationTable.TABLE_NAME + MST_MasterLocationTable.CODE_FLD + ", " + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.WORKORDER_FLD + ", " + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + " FROM " + PRO_DCOptionDetailTable.TABLE_NAME + " LEFT JOIN " + MST_MasterLocationTable.TABLE_NAME + " ON " + MST_MasterLocationTable.TABLE_NAME + "." + MST_MasterLocationTable.MASTERLOCATIONID_FLD + "=" + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + " WHERE " + PRO_DCOptionDetailTable.TABLE_NAME + "." + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + "=" + pintDCOptionMaster; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,PRO_DCOptionDetailTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// This method uses to add data to PRO_DCOptionDetail /// </summary> /// <Inputs> /// PRO_DCOptionDetailVO /// </Inputs> /// <Returns> /// void /// </Returns> /// <History> /// Tuesday, August 02, 2005 /// </History> public void UpdateDataSet(DataSet pdstData) { const string METHOD_NAME = THIS + ".UpdateDataSet()"; string strSql; OleDbConnection oconPCS =null; OleDbCommandBuilder odcbPCS ; OleDbDataAdapter odadPCS = new OleDbDataAdapter(); try { strSql= "SELECT " + PRO_DCOptionDetailTable.DCOPTIONDETAILID_FLD + "," + PRO_DCOptionDetailTable.MASTERLOCATIONID_FLD + "," + PRO_DCOptionDetailTable.WORKORDER_FLD + "," + PRO_DCOptionDetailTable.DCOPTIONMASTERID_FLD + " FROM " + PRO_DCOptionDetailTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); odadPCS.SelectCommand = new OleDbCommand(strSql, oconPCS); odcbPCS = new OleDbCommandBuilder(odadPCS); pdstData.EnforceConstraints = false; odadPCS.Update(pdstData, PRO_DCOptionDetailTable.TABLE_NAME); pdstData.AcceptChanges(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using Xunit; namespace System.Collections.Immutable.Test { public class ImmutableHashSetBuilderTest : ImmutablesTestBase { [Fact] public void CreateBuilder() { var builder = ImmutableHashSet.CreateBuilder<string>(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase); Assert.Same(StringComparer.OrdinalIgnoreCase, builder.KeyComparer); } [Fact] public void ToBuilder() { var builder = ImmutableHashSet<int>.Empty.ToBuilder(); Assert.True(builder.Add(3)); Assert.True(builder.Add(5)); Assert.False(builder.Add(5)); Assert.Equal(2, builder.Count); Assert.True(builder.Contains(3)); Assert.True(builder.Contains(5)); Assert.False(builder.Contains(7)); var set = builder.ToImmutable(); Assert.Equal(builder.Count, set.Count); Assert.True(builder.Add(8)); Assert.Equal(3, builder.Count); Assert.Equal(2, set.Count); Assert.True(builder.Contains(8)); Assert.False(set.Contains(8)); } [Fact] public void BuilderFromSet() { var set = ImmutableHashSet<int>.Empty.Add(1); var builder = set.ToBuilder(); Assert.True(builder.Contains(1)); Assert.True(builder.Add(3)); Assert.True(builder.Add(5)); Assert.False(builder.Add(5)); Assert.Equal(3, builder.Count); Assert.True(builder.Contains(3)); Assert.True(builder.Contains(5)); Assert.False(builder.Contains(7)); var set2 = builder.ToImmutable(); Assert.Equal(builder.Count, set2.Count); Assert.True(set2.Contains(1)); Assert.True(builder.Add(8)); Assert.Equal(4, builder.Count); Assert.Equal(3, set2.Count); Assert.True(builder.Contains(8)); Assert.False(set.Contains(8)); Assert.False(set2.Contains(8)); } [Fact] public void EnumerateBuilderWhileMutating() { var builder = ImmutableHashSet<int>.Empty.Union(Enumerable.Range(1, 10)).ToBuilder(); CollectionAssertAreEquivalent(Enumerable.Range(1, 10).ToArray(), builder.ToArray()); var enumerator = builder.GetEnumerator(); Assert.True(enumerator.MoveNext()); builder.Add(11); // Verify that a new enumerator will succeed. CollectionAssertAreEquivalent(Enumerable.Range(1, 11).ToArray(), builder.ToArray()); // Try enumerating further with the previous enumerable now that we've changed the collection. Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext()); enumerator.Reset(); enumerator.MoveNext(); // resetting should fix the problem. // Verify that by obtaining a new enumerator, we can enumerate all the contents. CollectionAssertAreEquivalent(Enumerable.Range(1, 11).ToArray(), builder.ToArray()); } [Fact] public void BuilderReusesUnchangedImmutableInstances() { var collection = ImmutableHashSet<int>.Empty.Add(1); var builder = collection.ToBuilder(); Assert.Same(collection, builder.ToImmutable()); // no changes at all. builder.Add(2); var newImmutable = builder.ToImmutable(); Assert.NotSame(collection, newImmutable); // first ToImmutable with changes should be a new instance. Assert.Same(newImmutable, builder.ToImmutable()); // second ToImmutable without changes should be the same instance. } [Fact] public void EnumeratorTest() { var builder = ImmutableHashSet.Create(1).ToBuilder(); ManuallyEnumerateTest(new[] { 1 }, ((IEnumerable<int>)builder).GetEnumerator()); } [Fact] public void Clear() { var set = ImmutableHashSet.Create(1); var builder = set.ToBuilder(); builder.Clear(); Assert.Equal(0, builder.Count); } [Fact] public void KeyComparer() { var builder = ImmutableHashSet.Create("a", "B").ToBuilder(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); Assert.True(builder.Contains("a")); Assert.False(builder.Contains("A")); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Same(StringComparer.OrdinalIgnoreCase, builder.KeyComparer); Assert.Equal(2, builder.Count); Assert.True(builder.Contains("a")); Assert.True(builder.Contains("A")); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); } [Fact] public void KeyComparerCollisions() { var builder = ImmutableHashSet.Create("a", "A").ToBuilder(); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Equal(1, builder.Count); Assert.True(builder.Contains("a")); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); Assert.Equal(1, set.Count); Assert.True(set.Contains("a")); } [Fact] public void KeyComparerEmptyCollection() { var builder = ImmutableHashSet.Create<string>().ToBuilder(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Same(StringComparer.OrdinalIgnoreCase, builder.KeyComparer); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); } [Fact] public void UnionWith() { var builder = ImmutableHashSet.Create(1, 2, 3).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.UnionWith(null)); builder.UnionWith(new[] { 2, 3, 4 }); Assert.Equal(new[] { 1, 2, 3, 4 }, builder); } [Fact] public void ExceptWith() { var builder = ImmutableHashSet.Create(1, 2, 3).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.ExceptWith(null)); builder.ExceptWith(new[] { 2, 3, 4 }); Assert.Equal(new[] { 1 }, builder); } [Fact] public void SymmetricExceptWith() { var builder = ImmutableHashSet.Create(1, 2, 3).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.SymmetricExceptWith(null)); builder.SymmetricExceptWith(new[] { 2, 3, 4 }); Assert.Equal(new[] { 1, 4 }, builder); } [Fact] public void IntersectWith() { var builder = ImmutableHashSet.Create(1, 2, 3).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.IntersectWith(null)); builder.IntersectWith(new[] { 2, 3, 4 }); Assert.Equal(new[] { 2, 3 }, builder); } [Fact] public void IsProperSubsetOf() { var builder = ImmutableHashSet.CreateRange(Enumerable.Range(1, 3)).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.IsProperSubsetOf(null)); Assert.False(builder.IsProperSubsetOf(Enumerable.Range(1, 3))); Assert.True(builder.IsProperSubsetOf(Enumerable.Range(1, 5))); } [Fact] public void IsProperSupersetOf() { var builder = ImmutableHashSet.CreateRange(Enumerable.Range(1, 3)).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.IsProperSupersetOf(null)); Assert.False(builder.IsProperSupersetOf(Enumerable.Range(1, 3))); Assert.True(builder.IsProperSupersetOf(Enumerable.Range(1, 2))); } [Fact] public void IsSubsetOf() { var builder = ImmutableHashSet.CreateRange(Enumerable.Range(1, 3)).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.IsSubsetOf(null)); Assert.False(builder.IsSubsetOf(Enumerable.Range(1, 2))); Assert.True(builder.IsSubsetOf(Enumerable.Range(1, 3))); Assert.True(builder.IsSubsetOf(Enumerable.Range(1, 5))); } [Fact] public void IsSupersetOf() { var builder = ImmutableHashSet.CreateRange(Enumerable.Range(1, 3)).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.IsSupersetOf(null)); Assert.False(builder.IsSupersetOf(Enumerable.Range(1, 4))); Assert.True(builder.IsSupersetOf(Enumerable.Range(1, 3))); Assert.True(builder.IsSupersetOf(Enumerable.Range(1, 2))); } [Fact] public void Overlaps() { var builder = ImmutableHashSet.CreateRange(Enumerable.Range(1, 3)).ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.Overlaps(null)); Assert.True(builder.Overlaps(Enumerable.Range(3, 2))); Assert.False(builder.Overlaps(Enumerable.Range(4, 3))); } [Fact] public void Remove() { var builder = ImmutableHashSet.Create("a").ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.Remove(null)); Assert.False(builder.Remove("b")); Assert.True(builder.Remove("a")); } [Fact] public void SetEquals() { var builder = ImmutableHashSet.Create("a").ToBuilder(); Assert.Throws<ArgumentNullException>(() => builder.SetEquals(null)); Assert.False(builder.SetEquals(new[] { "b" })); Assert.True(builder.SetEquals(new[] { "a" })); Assert.True(builder.SetEquals(builder)); } [Fact] public void ICollectionOfTMethods() { ICollection<string> builder = ImmutableHashSet.Create("a").ToBuilder(); builder.Add("b"); Assert.True(builder.Contains("b")); var array = new string[3]; builder.CopyTo(array, 1); Assert.Null(array[0]); CollectionAssertAreEquivalent(new[] { null, "a", "b" }, array); Assert.False(builder.IsReadOnly); CollectionAssertAreEquivalent(new[] { "a", "b" }, builder.ToArray()); // tests enumerator } [Fact] public void DebuggerAttributesValid() { DebuggerAttributes.ValidateDebuggerDisplayReferences(ImmutableHashSet.CreateBuilder<int>()); } } }
using System; using System.CodeDom.Compiler; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Globalization; using System.Linq; using System.Text; namespace EduHub.Data.Entities { /// <summary> /// Fees - Billing Templates Data Set /// </summary> [GeneratedCode("EduHub Data", "0.9")] public sealed partial class SABDataSet : EduHubDataSet<SAB> { /// <inheritdoc /> public override string Name { get { return "SAB"; } } /// <inheritdoc /> public override bool SupportsEntityLastModified { get { return true; } } internal SABDataSet(EduHubContext Context) : base(Context) { Index_FEE_CODE_1ST = new Lazy<NullDictionary<string, IReadOnlyList<SAB>>>(() => this.ToGroupedNullDictionary(i => i.FEE_CODE_1ST)); Index_FEE_CODE_2ND = new Lazy<NullDictionary<string, IReadOnlyList<SAB>>>(() => this.ToGroupedNullDictionary(i => i.FEE_CODE_2ND)); Index_FEE_CODE_3RD = new Lazy<NullDictionary<string, IReadOnlyList<SAB>>>(() => this.ToGroupedNullDictionary(i => i.FEE_CODE_3RD)); Index_FEE_CODE_4TH = new Lazy<NullDictionary<string, IReadOnlyList<SAB>>>(() => this.ToGroupedNullDictionary(i => i.FEE_CODE_4TH)); Index_FEE_CODE_KG = new Lazy<NullDictionary<string, IReadOnlyList<SAB>>>(() => this.ToGroupedNullDictionary(i => i.FEE_CODE_KG)); Index_SABKEY = new Lazy<Dictionary<string, SAB>>(() => this.ToDictionary(i => i.SABKEY)); } /// <summary> /// Matches CSV file headers to actions, used to deserialize <see cref="SAB" /> /// </summary> /// <param name="Headers">The CSV column headers</param> /// <returns>An array of actions which deserialize <see cref="SAB" /> fields for each CSV column header</returns> internal override Action<SAB, string>[] BuildMapper(IReadOnlyList<string> Headers) { var mapper = new Action<SAB, string>[Headers.Count]; for (var i = 0; i < Headers.Count; i++) { switch (Headers[i]) { case "SABKEY": mapper[i] = (e, v) => e.SABKEY = v; break; case "DESCRIPTION": mapper[i] = (e, v) => e.DESCRIPTION = v; break; case "BILL_TYPE": mapper[i] = (e, v) => e.BILL_TYPE = v; break; case "FROM_CLASS": mapper[i] = (e, v) => e.FROM_CLASS = v; break; case "TO_CLASS": mapper[i] = (e, v) => e.TO_CLASS = v; break; case "FROM_YEAR": mapper[i] = (e, v) => e.FROM_YEAR = v; break; case "TO_YEAR": mapper[i] = (e, v) => e.TO_YEAR = v; break; case "RES_STATUS": mapper[i] = (e, v) => e.RES_STATUS = v; break; case "FEE_CODE_1ST": mapper[i] = (e, v) => e.FEE_CODE_1ST = v; break; case "FEE_CODE_2ND": mapper[i] = (e, v) => e.FEE_CODE_2ND = v; break; case "FEE_CODE_3RD": mapper[i] = (e, v) => e.FEE_CODE_3RD = v; break; case "FEE_CODE_4TH": mapper[i] = (e, v) => e.FEE_CODE_4TH = v; break; case "FEE_CODE_KG": mapper[i] = (e, v) => e.FEE_CODE_KG = v; break; case "LW_DATE": mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture); break; case "LW_TIME": mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v); break; case "LW_USER": mapper[i] = (e, v) => e.LW_USER = v; break; default: mapper[i] = MapperNoOp; break; } } return mapper; } /// <summary> /// Merges <see cref="SAB" /> delta entities /// </summary> /// <param name="Entities">Iterator for base <see cref="SAB" /> entities</param> /// <param name="DeltaEntities">List of delta <see cref="SAB" /> entities</param> /// <returns>A merged <see cref="IEnumerable{SAB}"/> of entities</returns> internal override IEnumerable<SAB> ApplyDeltaEntities(IEnumerable<SAB> Entities, List<SAB> DeltaEntities) { HashSet<string> Index_SABKEY = new HashSet<string>(DeltaEntities.Select(i => i.SABKEY)); using (var deltaIterator = DeltaEntities.GetEnumerator()) { using (var entityIterator = Entities.GetEnumerator()) { while (deltaIterator.MoveNext()) { var deltaClusteredKey = deltaIterator.Current.SABKEY; bool yieldEntity = false; while (entityIterator.MoveNext()) { var entity = entityIterator.Current; bool overwritten = Index_SABKEY.Remove(entity.SABKEY); if (entity.SABKEY.CompareTo(deltaClusteredKey) <= 0) { if (!overwritten) { yield return entity; } } else { yieldEntity = !overwritten; break; } } yield return deltaIterator.Current; if (yieldEntity) { yield return entityIterator.Current; } } while (entityIterator.MoveNext()) { yield return entityIterator.Current; } } } } #region Index Fields private Lazy<NullDictionary<string, IReadOnlyList<SAB>>> Index_FEE_CODE_1ST; private Lazy<NullDictionary<string, IReadOnlyList<SAB>>> Index_FEE_CODE_2ND; private Lazy<NullDictionary<string, IReadOnlyList<SAB>>> Index_FEE_CODE_3RD; private Lazy<NullDictionary<string, IReadOnlyList<SAB>>> Index_FEE_CODE_4TH; private Lazy<NullDictionary<string, IReadOnlyList<SAB>>> Index_FEE_CODE_KG; private Lazy<Dictionary<string, SAB>> Index_SABKEY; #endregion #region Index Methods /// <summary> /// Find SAB by FEE_CODE_1ST field /// </summary> /// <param name="FEE_CODE_1ST">FEE_CODE_1ST value used to find SAB</param> /// <returns>List of related SAB entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> FindByFEE_CODE_1ST(string FEE_CODE_1ST) { return Index_FEE_CODE_1ST.Value[FEE_CODE_1ST]; } /// <summary> /// Attempt to find SAB by FEE_CODE_1ST field /// </summary> /// <param name="FEE_CODE_1ST">FEE_CODE_1ST value used to find SAB</param> /// <param name="Value">List of related SAB entities</param> /// <returns>True if the list of related SAB entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByFEE_CODE_1ST(string FEE_CODE_1ST, out IReadOnlyList<SAB> Value) { return Index_FEE_CODE_1ST.Value.TryGetValue(FEE_CODE_1ST, out Value); } /// <summary> /// Attempt to find SAB by FEE_CODE_1ST field /// </summary> /// <param name="FEE_CODE_1ST">FEE_CODE_1ST value used to find SAB</param> /// <returns>List of related SAB entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> TryFindByFEE_CODE_1ST(string FEE_CODE_1ST) { IReadOnlyList<SAB> value; if (Index_FEE_CODE_1ST.Value.TryGetValue(FEE_CODE_1ST, out value)) { return value; } else { return null; } } /// <summary> /// Find SAB by FEE_CODE_2ND field /// </summary> /// <param name="FEE_CODE_2ND">FEE_CODE_2ND value used to find SAB</param> /// <returns>List of related SAB entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> FindByFEE_CODE_2ND(string FEE_CODE_2ND) { return Index_FEE_CODE_2ND.Value[FEE_CODE_2ND]; } /// <summary> /// Attempt to find SAB by FEE_CODE_2ND field /// </summary> /// <param name="FEE_CODE_2ND">FEE_CODE_2ND value used to find SAB</param> /// <param name="Value">List of related SAB entities</param> /// <returns>True if the list of related SAB entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByFEE_CODE_2ND(string FEE_CODE_2ND, out IReadOnlyList<SAB> Value) { return Index_FEE_CODE_2ND.Value.TryGetValue(FEE_CODE_2ND, out Value); } /// <summary> /// Attempt to find SAB by FEE_CODE_2ND field /// </summary> /// <param name="FEE_CODE_2ND">FEE_CODE_2ND value used to find SAB</param> /// <returns>List of related SAB entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> TryFindByFEE_CODE_2ND(string FEE_CODE_2ND) { IReadOnlyList<SAB> value; if (Index_FEE_CODE_2ND.Value.TryGetValue(FEE_CODE_2ND, out value)) { return value; } else { return null; } } /// <summary> /// Find SAB by FEE_CODE_3RD field /// </summary> /// <param name="FEE_CODE_3RD">FEE_CODE_3RD value used to find SAB</param> /// <returns>List of related SAB entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> FindByFEE_CODE_3RD(string FEE_CODE_3RD) { return Index_FEE_CODE_3RD.Value[FEE_CODE_3RD]; } /// <summary> /// Attempt to find SAB by FEE_CODE_3RD field /// </summary> /// <param name="FEE_CODE_3RD">FEE_CODE_3RD value used to find SAB</param> /// <param name="Value">List of related SAB entities</param> /// <returns>True if the list of related SAB entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByFEE_CODE_3RD(string FEE_CODE_3RD, out IReadOnlyList<SAB> Value) { return Index_FEE_CODE_3RD.Value.TryGetValue(FEE_CODE_3RD, out Value); } /// <summary> /// Attempt to find SAB by FEE_CODE_3RD field /// </summary> /// <param name="FEE_CODE_3RD">FEE_CODE_3RD value used to find SAB</param> /// <returns>List of related SAB entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> TryFindByFEE_CODE_3RD(string FEE_CODE_3RD) { IReadOnlyList<SAB> value; if (Index_FEE_CODE_3RD.Value.TryGetValue(FEE_CODE_3RD, out value)) { return value; } else { return null; } } /// <summary> /// Find SAB by FEE_CODE_4TH field /// </summary> /// <param name="FEE_CODE_4TH">FEE_CODE_4TH value used to find SAB</param> /// <returns>List of related SAB entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> FindByFEE_CODE_4TH(string FEE_CODE_4TH) { return Index_FEE_CODE_4TH.Value[FEE_CODE_4TH]; } /// <summary> /// Attempt to find SAB by FEE_CODE_4TH field /// </summary> /// <param name="FEE_CODE_4TH">FEE_CODE_4TH value used to find SAB</param> /// <param name="Value">List of related SAB entities</param> /// <returns>True if the list of related SAB entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByFEE_CODE_4TH(string FEE_CODE_4TH, out IReadOnlyList<SAB> Value) { return Index_FEE_CODE_4TH.Value.TryGetValue(FEE_CODE_4TH, out Value); } /// <summary> /// Attempt to find SAB by FEE_CODE_4TH field /// </summary> /// <param name="FEE_CODE_4TH">FEE_CODE_4TH value used to find SAB</param> /// <returns>List of related SAB entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> TryFindByFEE_CODE_4TH(string FEE_CODE_4TH) { IReadOnlyList<SAB> value; if (Index_FEE_CODE_4TH.Value.TryGetValue(FEE_CODE_4TH, out value)) { return value; } else { return null; } } /// <summary> /// Find SAB by FEE_CODE_KG field /// </summary> /// <param name="FEE_CODE_KG">FEE_CODE_KG value used to find SAB</param> /// <returns>List of related SAB entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> FindByFEE_CODE_KG(string FEE_CODE_KG) { return Index_FEE_CODE_KG.Value[FEE_CODE_KG]; } /// <summary> /// Attempt to find SAB by FEE_CODE_KG field /// </summary> /// <param name="FEE_CODE_KG">FEE_CODE_KG value used to find SAB</param> /// <param name="Value">List of related SAB entities</param> /// <returns>True if the list of related SAB entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByFEE_CODE_KG(string FEE_CODE_KG, out IReadOnlyList<SAB> Value) { return Index_FEE_CODE_KG.Value.TryGetValue(FEE_CODE_KG, out Value); } /// <summary> /// Attempt to find SAB by FEE_CODE_KG field /// </summary> /// <param name="FEE_CODE_KG">FEE_CODE_KG value used to find SAB</param> /// <returns>List of related SAB entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SAB> TryFindByFEE_CODE_KG(string FEE_CODE_KG) { IReadOnlyList<SAB> value; if (Index_FEE_CODE_KG.Value.TryGetValue(FEE_CODE_KG, out value)) { return value; } else { return null; } } /// <summary> /// Find SAB by SABKEY field /// </summary> /// <param name="SABKEY">SABKEY value used to find SAB</param> /// <returns>Related SAB entity</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public SAB FindBySABKEY(string SABKEY) { return Index_SABKEY.Value[SABKEY]; } /// <summary> /// Attempt to find SAB by SABKEY field /// </summary> /// <param name="SABKEY">SABKEY value used to find SAB</param> /// <param name="Value">Related SAB entity</param> /// <returns>True if the related SAB entity is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindBySABKEY(string SABKEY, out SAB Value) { return Index_SABKEY.Value.TryGetValue(SABKEY, out Value); } /// <summary> /// Attempt to find SAB by SABKEY field /// </summary> /// <param name="SABKEY">SABKEY value used to find SAB</param> /// <returns>Related SAB entity, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public SAB TryFindBySABKEY(string SABKEY) { SAB value; if (Index_SABKEY.Value.TryGetValue(SABKEY, out value)) { return value; } else { return null; } } #endregion #region SQL Integration /// <summary> /// Returns a <see cref="SqlCommand"/> which checks for the existence of a SAB table, and if not found, creates the table and associated indexes. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1) BEGIN CREATE TABLE [dbo].[SAB]( [SABKEY] varchar(10) NOT NULL, [DESCRIPTION] varchar(30) NULL, [BILL_TYPE] varchar(10) NULL, [FROM_CLASS] varchar(4) NULL, [TO_CLASS] varchar(4) NULL, [FROM_YEAR] varchar(4) NULL, [TO_YEAR] varchar(4) NULL, [RES_STATUS] varchar(1) NULL, [FEE_CODE_1ST] varchar(10) NULL, [FEE_CODE_2ND] varchar(10) NULL, [FEE_CODE_3RD] varchar(10) NULL, [FEE_CODE_4TH] varchar(10) NULL, [FEE_CODE_KG] varchar(10) NULL, [LW_DATE] datetime NULL, [LW_TIME] smallint NULL, [LW_USER] varchar(128) NULL, CONSTRAINT [SAB_Index_SABKEY] PRIMARY KEY CLUSTERED ( [SABKEY] ASC ) ); CREATE NONCLUSTERED INDEX [SAB_Index_FEE_CODE_1ST] ON [dbo].[SAB] ( [FEE_CODE_1ST] ASC ); CREATE NONCLUSTERED INDEX [SAB_Index_FEE_CODE_2ND] ON [dbo].[SAB] ( [FEE_CODE_2ND] ASC ); CREATE NONCLUSTERED INDEX [SAB_Index_FEE_CODE_3RD] ON [dbo].[SAB] ( [FEE_CODE_3RD] ASC ); CREATE NONCLUSTERED INDEX [SAB_Index_FEE_CODE_4TH] ON [dbo].[SAB] ( [FEE_CODE_4TH] ASC ); CREATE NONCLUSTERED INDEX [SAB_Index_FEE_CODE_KG] ON [dbo].[SAB] ( [FEE_CODE_KG] ASC ); END"); } /// <summary> /// Returns a <see cref="SqlCommand"/> which disables all non-clustered table indexes. /// Typically called before <see cref="SqlBulkCopy"/> to improve performance. /// <see cref="GetSqlRebuildIndexesCommand(SqlConnection)"/> should be called to rebuild and enable indexes after performance sensitive work is completed. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <returns>A <see cref="SqlCommand"/> which (when executed) will disable all non-clustered table indexes</returns> public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_1ST') ALTER INDEX [SAB_Index_FEE_CODE_1ST] ON [dbo].[SAB] DISABLE; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_2ND') ALTER INDEX [SAB_Index_FEE_CODE_2ND] ON [dbo].[SAB] DISABLE; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_3RD') ALTER INDEX [SAB_Index_FEE_CODE_3RD] ON [dbo].[SAB] DISABLE; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_4TH') ALTER INDEX [SAB_Index_FEE_CODE_4TH] ON [dbo].[SAB] DISABLE; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_KG') ALTER INDEX [SAB_Index_FEE_CODE_KG] ON [dbo].[SAB] DISABLE; "); } /// <summary> /// Returns a <see cref="SqlCommand"/> which rebuilds and enables all non-clustered table indexes. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <returns>A <see cref="SqlCommand"/> which (when executed) will rebuild and enable all non-clustered table indexes</returns> public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_1ST') ALTER INDEX [SAB_Index_FEE_CODE_1ST] ON [dbo].[SAB] REBUILD PARTITION = ALL; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_2ND') ALTER INDEX [SAB_Index_FEE_CODE_2ND] ON [dbo].[SAB] REBUILD PARTITION = ALL; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_3RD') ALTER INDEX [SAB_Index_FEE_CODE_3RD] ON [dbo].[SAB] REBUILD PARTITION = ALL; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_4TH') ALTER INDEX [SAB_Index_FEE_CODE_4TH] ON [dbo].[SAB] REBUILD PARTITION = ALL; IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SAB]') AND name = N'SAB_Index_FEE_CODE_KG') ALTER INDEX [SAB_Index_FEE_CODE_KG] ON [dbo].[SAB] REBUILD PARTITION = ALL; "); } /// <summary> /// Returns a <see cref="SqlCommand"/> which deletes the <see cref="SAB"/> entities passed /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <param name="Entities">The <see cref="SAB"/> entities to be deleted</param> public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<SAB> Entities) { SqlCommand command = new SqlCommand(); int parameterIndex = 0; StringBuilder builder = new StringBuilder(); List<string> Index_SABKEY = new List<string>(); foreach (var entity in Entities) { Index_SABKEY.Add(entity.SABKEY); } builder.AppendLine("DELETE [dbo].[SAB] WHERE"); // Index_SABKEY builder.Append("[SABKEY] IN ("); for (int index = 0; index < Index_SABKEY.Count; index++) { if (index != 0) builder.Append(", "); // SABKEY var parameterSABKEY = $"@p{parameterIndex++}"; builder.Append(parameterSABKEY); command.Parameters.Add(parameterSABKEY, SqlDbType.VarChar, 10).Value = Index_SABKEY[index]; } builder.Append(");"); command.Connection = SqlConnection; command.CommandText = builder.ToString(); return command; } /// <summary> /// Provides a <see cref="IDataReader"/> for the SAB data set /// </summary> /// <returns>A <see cref="IDataReader"/> for the SAB data set</returns> public override EduHubDataSetDataReader<SAB> GetDataSetDataReader() { return new SABDataReader(Load()); } /// <summary> /// Provides a <see cref="IDataReader"/> for the SAB data set /// </summary> /// <returns>A <see cref="IDataReader"/> for the SAB data set</returns> public override EduHubDataSetDataReader<SAB> GetDataSetDataReader(List<SAB> Entities) { return new SABDataReader(new EduHubDataSetLoadedReader<SAB>(this, Entities)); } // Modest implementation to primarily support SqlBulkCopy private class SABDataReader : EduHubDataSetDataReader<SAB> { public SABDataReader(IEduHubDataSetReader<SAB> Reader) : base (Reader) { } public override int FieldCount { get { return 16; } } public override object GetValue(int i) { switch (i) { case 0: // SABKEY return Current.SABKEY; case 1: // DESCRIPTION return Current.DESCRIPTION; case 2: // BILL_TYPE return Current.BILL_TYPE; case 3: // FROM_CLASS return Current.FROM_CLASS; case 4: // TO_CLASS return Current.TO_CLASS; case 5: // FROM_YEAR return Current.FROM_YEAR; case 6: // TO_YEAR return Current.TO_YEAR; case 7: // RES_STATUS return Current.RES_STATUS; case 8: // FEE_CODE_1ST return Current.FEE_CODE_1ST; case 9: // FEE_CODE_2ND return Current.FEE_CODE_2ND; case 10: // FEE_CODE_3RD return Current.FEE_CODE_3RD; case 11: // FEE_CODE_4TH return Current.FEE_CODE_4TH; case 12: // FEE_CODE_KG return Current.FEE_CODE_KG; case 13: // LW_DATE return Current.LW_DATE; case 14: // LW_TIME return Current.LW_TIME; case 15: // LW_USER return Current.LW_USER; default: throw new ArgumentOutOfRangeException(nameof(i)); } } public override bool IsDBNull(int i) { switch (i) { case 1: // DESCRIPTION return Current.DESCRIPTION == null; case 2: // BILL_TYPE return Current.BILL_TYPE == null; case 3: // FROM_CLASS return Current.FROM_CLASS == null; case 4: // TO_CLASS return Current.TO_CLASS == null; case 5: // FROM_YEAR return Current.FROM_YEAR == null; case 6: // TO_YEAR return Current.TO_YEAR == null; case 7: // RES_STATUS return Current.RES_STATUS == null; case 8: // FEE_CODE_1ST return Current.FEE_CODE_1ST == null; case 9: // FEE_CODE_2ND return Current.FEE_CODE_2ND == null; case 10: // FEE_CODE_3RD return Current.FEE_CODE_3RD == null; case 11: // FEE_CODE_4TH return Current.FEE_CODE_4TH == null; case 12: // FEE_CODE_KG return Current.FEE_CODE_KG == null; case 13: // LW_DATE return Current.LW_DATE == null; case 14: // LW_TIME return Current.LW_TIME == null; case 15: // LW_USER return Current.LW_USER == null; default: return false; } } public override string GetName(int ordinal) { switch (ordinal) { case 0: // SABKEY return "SABKEY"; case 1: // DESCRIPTION return "DESCRIPTION"; case 2: // BILL_TYPE return "BILL_TYPE"; case 3: // FROM_CLASS return "FROM_CLASS"; case 4: // TO_CLASS return "TO_CLASS"; case 5: // FROM_YEAR return "FROM_YEAR"; case 6: // TO_YEAR return "TO_YEAR"; case 7: // RES_STATUS return "RES_STATUS"; case 8: // FEE_CODE_1ST return "FEE_CODE_1ST"; case 9: // FEE_CODE_2ND return "FEE_CODE_2ND"; case 10: // FEE_CODE_3RD return "FEE_CODE_3RD"; case 11: // FEE_CODE_4TH return "FEE_CODE_4TH"; case 12: // FEE_CODE_KG return "FEE_CODE_KG"; case 13: // LW_DATE return "LW_DATE"; case 14: // LW_TIME return "LW_TIME"; case 15: // LW_USER return "LW_USER"; default: throw new ArgumentOutOfRangeException(nameof(ordinal)); } } public override int GetOrdinal(string name) { switch (name) { case "SABKEY": return 0; case "DESCRIPTION": return 1; case "BILL_TYPE": return 2; case "FROM_CLASS": return 3; case "TO_CLASS": return 4; case "FROM_YEAR": return 5; case "TO_YEAR": return 6; case "RES_STATUS": return 7; case "FEE_CODE_1ST": return 8; case "FEE_CODE_2ND": return 9; case "FEE_CODE_3RD": return 10; case "FEE_CODE_4TH": return 11; case "FEE_CODE_KG": return 12; case "LW_DATE": return 13; case "LW_TIME": return 14; case "LW_USER": return 15; default: throw new ArgumentOutOfRangeException(nameof(name)); } } } #endregion } }
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Extensions; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Colour; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Framework.Graphics.Textures; using osu.Framework.Localisation; using osu.Game.Beatmaps; using osu.Game.Graphics; using osu.Game.Graphics.Sprites; using osu.Game.Graphics.UserInterface; using osu.Game.Online.Leaderboards; using osu.Game.Rulesets.Scoring; using osu.Game.Scoring; using osu.Game.Screens.Play; using osu.Game.Users; using osuTK; using osuTK.Graphics; namespace osu.Game.Screens.Ranking.Pages { public class ScoreResultsPage : ResultsPage { private Container scoreContainer; private ScoreCounter scoreCounter; private readonly ScoreInfo score; public ScoreResultsPage(ScoreInfo score, WorkingBeatmap beatmap) : base(score, beatmap) { this.score = score; } private FillFlowContainer<DrawableScoreStatistic> statisticsContainer; [BackgroundDependencyLoader] private void load(OsuColour colours) { const float user_header_height = 120; Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Top = user_header_height }, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = Color4.White, }, } }, new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, Children = new Drawable[] { new DelayedLoadWrapper(new UserHeader(Score.User) { RelativeSizeAxes = Axes.Both, }) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, RelativeSizeAxes = Axes.X, Height = user_header_height, }, new UpdateableRank(Score.Rank) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Size = new Vector2(150, 60), Margin = new MarginPadding(20), }, scoreContainer = new Container { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, RelativeSizeAxes = Axes.X, Height = 60, Children = new Drawable[] { new SongProgressGraph { RelativeSizeAxes = Axes.Both, Alpha = 0.5f, Objects = Beatmap.Beatmap.HitObjects, }, scoreCounter = new SlowScoreCounter(6) { Anchor = Anchor.Centre, Origin = Anchor.Centre, Colour = colours.PinkDarker, Y = 10, TextSize = 56, }, } }, new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Colour = colours.PinkDarker, Shadow = false, Font = OsuFont.GetFont(weight: FontWeight.Bold), Text = "total score", Margin = new MarginPadding { Bottom = 15 }, }, new BeatmapDetails(Beatmap.BeatmapInfo) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Margin = new MarginPadding { Bottom = 10 }, }, new DateTimeDisplay(Score.Date.LocalDateTime) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, new Container { RelativeSizeAxes = Axes.X, Size = new Vector2(0.75f, 1), Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Margin = new MarginPadding { Top = 10, Bottom = 10 }, Children = new Drawable[] { new Box { Colour = ColourInfo.GradientHorizontal( colours.GrayC.Opacity(0), colours.GrayC.Opacity(0.9f)), RelativeSizeAxes = Axes.Both, Size = new Vector2(0.5f, 1), }, new Box { Anchor = Anchor.TopRight, Origin = Anchor.TopRight, Colour = ColourInfo.GradientHorizontal( colours.GrayC.Opacity(0.9f), colours.GrayC.Opacity(0)), RelativeSizeAxes = Axes.Both, Size = new Vector2(0.5f, 1), }, } }, statisticsContainer = new FillFlowContainer<DrawableScoreStatistic> { AutoSizeAxes = Axes.Both, Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Direction = FillDirection.Horizontal, LayoutDuration = 200, LayoutEasing = Easing.OutQuint }, }, }, new FillFlowContainer { Anchor = Anchor.BottomCentre, Origin = Anchor.BottomCentre, Margin = new MarginPadding { Bottom = 10 }, Spacing = new Vector2(5), AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Children = new Drawable[] { new ReplayDownloadButton(score), new RetryButton() } }, }; statisticsContainer.ChildrenEnumerable = Score.SortedStatistics.Select(s => new DrawableScoreStatistic(s)); } protected override void LoadComplete() { base.LoadComplete(); Schedule(() => { scoreCounter.Increment(Score.TotalScore); int delay = 0; foreach (var s in statisticsContainer.Children) { s.FadeOut() .Then(delay += 200) .FadeIn(300 + delay, Easing.Out); } }); } protected override void UpdateAfterChildren() { base.UpdateAfterChildren(); scoreCounter.Scale = new Vector2(Math.Min(1f, (scoreContainer.DrawWidth - 20) / scoreCounter.DrawWidth)); } private class DrawableScoreStatistic : Container { private readonly KeyValuePair<HitResult, int> statistic; public DrawableScoreStatistic(KeyValuePair<HitResult, int> statistic) { this.statistic = statistic; AutoSizeAxes = Axes.Both; Margin = new MarginPadding { Left = 5, Right = 5 }; } [BackgroundDependencyLoader] private void load(OsuColour colours) { Children = new Drawable[] { new OsuSpriteText { Text = statistic.Value.ToString().PadLeft(4, '0'), Colour = colours.Gray7, Font = OsuFont.GetFont(size: 30), Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, new OsuSpriteText { Text = statistic.Key.GetDescription(), Colour = colours.Gray7, Font = OsuFont.GetFont(weight: FontWeight.Bold), Y = 26, Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, }; } } private class DateTimeDisplay : Container { private readonly DateTime date; public DateTimeDisplay(DateTime date) { this.date = date; AutoSizeAxes = Axes.Both; Masking = true; CornerRadius = 5; } [BackgroundDependencyLoader] private void load(OsuColour colours) { Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Gray6, }, new FillFlowContainer { AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Padding = new MarginPadding { Horizontal = 10, Vertical = 5 }, Spacing = new Vector2(10), Children = new[] { new OsuSpriteText { Text = date.ToShortDateString(), Colour = Color4.White, }, new OsuSpriteText { Text = date.ToShortTimeString(), Colour = Color4.White, } } }, }; } } private class BeatmapDetails : Container { private readonly BeatmapInfo beatmap; private readonly OsuSpriteText title; private readonly OsuSpriteText artist; private readonly OsuSpriteText versionMapper; public BeatmapDetails(BeatmapInfo beatmap) { this.beatmap = beatmap; AutoSizeAxes = Axes.Both; Children = new Drawable[] { new FillFlowContainer { Direction = FillDirection.Vertical, RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Children = new Drawable[] { title = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 24, italics: true), }, artist = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 20, italics: true), }, versionMapper = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, Font = OsuFont.GetFont(weight: FontWeight.Bold), }, } } }; } [BackgroundDependencyLoader] private void load(OsuColour colours) { title.Colour = artist.Colour = colours.BlueDarker; versionMapper.Colour = colours.Gray8; var creator = beatmap.Metadata.Author?.Username; if (!string.IsNullOrEmpty(creator)) { versionMapper.Text = $"mapped by {creator}"; if (!string.IsNullOrEmpty(beatmap.Version)) versionMapper.Text = $"{beatmap.Version} - " + versionMapper.Text; } title.Text = new LocalisedString((beatmap.Metadata.TitleUnicode, beatmap.Metadata.Title)); artist.Text = new LocalisedString((beatmap.Metadata.ArtistUnicode, beatmap.Metadata.Artist)); } } [LongRunningLoad] private class UserHeader : Container { private readonly User user; private readonly Sprite cover; public UserHeader(User user) { this.user = user; Children = new Drawable[] { cover = new Sprite { RelativeSizeAxes = Axes.Both, FillMode = FillMode.Fill, Anchor = Anchor.Centre, Origin = Anchor.Centre, }, new OsuSpriteText { Anchor = Anchor.BottomCentre, Origin = Anchor.BottomCentre, Text = user.Username, Font = OsuFont.GetFont(size: 30, weight: FontWeight.Regular, italics: true), Padding = new MarginPadding { Bottom = 10 }, } }; } [BackgroundDependencyLoader] private void load(LargeTextureStore textures) { if (!string.IsNullOrEmpty(user.CoverUrl)) cover.Texture = textures.Get(user.CoverUrl); } } private class SlowScoreCounter : ScoreCounter { protected override double RollingDuration => 3000; protected override Easing RollingEasing => Easing.OutPow10; public SlowScoreCounter(uint leading = 0) : base(leading) { DisplayedCountSpriteText.Shadow = false; DisplayedCountSpriteText.Font = DisplayedCountSpriteText.Font.With(Typeface.Venera, weight: FontWeight.Light); UseCommaSeparator = true; } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Linq; using Avalonia.Data.Core; namespace Avalonia.Collections { /// <summary> /// A notifying dictionary. /// </summary> /// <typeparam name="TKey">The type of the dictionary key.</typeparam> /// <typeparam name="TValue">The type of the dictionary value.</typeparam> public class AvaloniaDictionary<TKey, TValue> : IDictionary<TKey, TValue>, IDictionary, INotifyCollectionChanged, INotifyPropertyChanged where TKey : notnull { private Dictionary<TKey, TValue> _inner; /// <summary> /// Initializes a new instance of the <see cref="AvaloniaDictionary{TKey, TValue}"/> class. /// </summary> public AvaloniaDictionary() { _inner = new Dictionary<TKey, TValue>(); } /// <summary> /// Occurs when the collection changes. /// </summary> public event NotifyCollectionChangedEventHandler? CollectionChanged; /// <summary> /// Raised when a property on the collection changes. /// </summary> public event PropertyChangedEventHandler? PropertyChanged; /// <inheritdoc/> public int Count => _inner.Count; /// <inheritdoc/> public bool IsReadOnly => false; /// <inheritdoc/> public ICollection<TKey> Keys => _inner.Keys; /// <inheritdoc/> public ICollection<TValue> Values => _inner.Values; bool IDictionary.IsFixedSize => ((IDictionary)_inner).IsFixedSize; ICollection IDictionary.Keys => ((IDictionary)_inner).Keys; ICollection IDictionary.Values => ((IDictionary)_inner).Values; bool ICollection.IsSynchronized => ((IDictionary)_inner).IsSynchronized; object ICollection.SyncRoot => ((IDictionary)_inner).SyncRoot; /// <summary> /// Gets or sets the named resource. /// </summary> /// <param name="key">The resource key.</param> /// <returns>The resource, or null if not found.</returns> public TValue this[TKey key] { get { return _inner[key]; } set { bool replace = _inner.TryGetValue(key, out var old); _inner[key] = value; if (replace) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs($"Item[{key}]")); if (CollectionChanged != null) { var e = new NotifyCollectionChangedEventArgs( NotifyCollectionChangedAction.Replace, new KeyValuePair<TKey, TValue>(key, value), new KeyValuePair<TKey, TValue>(key, old!)); CollectionChanged(this, e); } } else { NotifyAdd(key, value); } } } object? IDictionary.this[object key] { get => ((IDictionary)_inner)[key]; set => ((IDictionary)_inner)[key] = value; } /// <inheritdoc/> public void Add(TKey key, TValue value) { _inner.Add(key, value); NotifyAdd(key, value); } /// <inheritdoc/> public void Clear() { var old = _inner; _inner = new Dictionary<TKey, TValue>(); PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Count))); PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(CommonPropertyNames.IndexerName)); if (CollectionChanged != null) { var e = new NotifyCollectionChangedEventArgs( NotifyCollectionChangedAction.Remove, old.ToList(), -1); CollectionChanged(this, e); } } /// <inheritdoc/> public bool ContainsKey(TKey key) => _inner.ContainsKey(key); /// <inheritdoc/> public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { ((IDictionary<TKey, TValue>)_inner).CopyTo(array, arrayIndex); } /// <inheritdoc/> public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() => _inner.GetEnumerator(); /// <inheritdoc/> public bool Remove(TKey key) { if (_inner.TryGetValue(key, out var value)) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Count))); PropertyChanged?.Invoke(this, new PropertyChangedEventArgs($"Item[{key}]")); if (CollectionChanged != null) { var e = new NotifyCollectionChangedEventArgs( NotifyCollectionChangedAction.Remove, new[] { new KeyValuePair<TKey, TValue>(key, value) }, -1); CollectionChanged(this, e); } return true; } else { return false; } } /// <inheritdoc/> public bool TryGetValue(TKey key, [MaybeNullWhen(false)] out TValue value) => _inner.TryGetValue(key, out value); /// <inheritdoc/> IEnumerator IEnumerable.GetEnumerator() => _inner.GetEnumerator(); /// <inheritdoc/> void ICollection.CopyTo(Array array, int index) => ((ICollection)_inner).CopyTo(array, index); /// <inheritdoc/> void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item) { Add(item.Key, item.Value); } /// <inheritdoc/> bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> item) { return _inner.Contains(item); } /// <inheritdoc/> bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item) { return Remove(item.Key); } /// <inheritdoc/> void IDictionary.Add(object key, object? value) => Add((TKey)key, (TValue)value!); /// <inheritdoc/> bool IDictionary.Contains(object key) => ((IDictionary) _inner).Contains(key); /// <inheritdoc/> IDictionaryEnumerator IDictionary.GetEnumerator() => ((IDictionary)_inner).GetEnumerator(); /// <inheritdoc/> void IDictionary.Remove(object key) => Remove((TKey)key); private void NotifyAdd(TKey key, TValue value) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Count))); PropertyChanged?.Invoke(this, new PropertyChangedEventArgs($"Item[{key}]")); if (CollectionChanged != null) { var e = new NotifyCollectionChangedEventArgs( NotifyCollectionChangedAction.Add, new[] { new KeyValuePair<TKey, TValue>(key, value) }, -1); CollectionChanged(this, e); } } } }
#region License // Copyright (c) 2007 James Newton-King; 2014 Extesla, LLC. // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using OpenGamingLibrary.Json.Utilities; #if NET20 using OpenGamingLibrary.Json.Utilities.LinqBridge; #endif namespace OpenGamingLibrary.Json.Serialization { /// <summary> /// Maps a JSON property to a .NET member or constructor parameter. /// </summary> public class JsonProperty { internal Required? _required; internal bool _hasExplicitDefaultValue; private object _defaultValue; private bool _hasGeneratedDefaultValue; private string _propertyName; internal bool _skipPropertyNameEscape; private Type _propertyType; // use to cache contract during deserialization internal JsonContract PropertyContract { get; set; } /// <summary> /// Gets or sets the name of the property. /// </summary> /// <value>The name of the property.</value> public string PropertyName { get { return _propertyName; } set { _propertyName = value; _skipPropertyNameEscape = !JavaScriptUtils.ShouldEscapeJavaScriptString(_propertyName, JavaScriptUtils.HtmlCharEscapeFlags); } } /// <summary> /// Gets or sets the type that declared this property. /// </summary> /// <value>The type that declared this property.</value> public Type DeclaringType { get; set; } /// <summary> /// Gets or sets the order of serialization and deserialization of a member. /// </summary> /// <value>The numeric order of serialization or deserialization.</value> public int? Order { get; set; } /// <summary> /// Gets or sets the name of the underlying member or parameter. /// </summary> /// <value>The name of the underlying member or parameter.</value> public string UnderlyingName { get; set; } /// <summary> /// Gets the <see cref="IValueProvider"/> that will get and set the <see cref="JsonProperty"/> during serialization. /// </summary> /// <value>The <see cref="IValueProvider"/> that will get and set the <see cref="JsonProperty"/> during serialization.</value> public IValueProvider ValueProvider { get; set; } /// <summary> /// Gets or sets the type of the property. /// </summary> /// <value>The type of the property.</value> public Type PropertyType { get { return _propertyType; } set { if (_propertyType != value) { _propertyType = value; _hasGeneratedDefaultValue = false; } } } /// <summary> /// Gets or sets the <see cref="JsonConverter" /> for the property. /// If set this converter takes presidence over the contract converter for the property type. /// </summary> /// <value>The converter.</value> public JsonConverter Converter { get; set; } /// <summary> /// Gets or sets the member converter. /// </summary> /// <value>The member converter.</value> public JsonConverter MemberConverter { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="JsonProperty"/> is ignored. /// </summary> /// <value><c>true</c> if ignored; otherwise, <c>false</c>.</value> public bool Ignored { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="JsonProperty"/> is readable. /// </summary> /// <value><c>true</c> if readable; otherwise, <c>false</c>.</value> public bool Readable { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="JsonProperty"/> is writable. /// </summary> /// <value><c>true</c> if writable; otherwise, <c>false</c>.</value> public bool Writable { get; set; } /// <summary> /// Gets or sets a value indicating whether this <see cref="JsonProperty"/> has a member attribute. /// </summary> /// <value><c>true</c> if has a member attribute; otherwise, <c>false</c>.</value> public bool HasMemberAttribute { get; set; } /// <summary> /// Gets the default value. /// </summary> /// <value>The default value.</value> public object DefaultValue { get { if (!_hasExplicitDefaultValue) return null; return _defaultValue; } set { _hasExplicitDefaultValue = true; _defaultValue = value; } } internal object GetResolvedDefaultValue() { if (_propertyType == null) return null; if (!_hasExplicitDefaultValue && !_hasGeneratedDefaultValue) { _defaultValue = ReflectionUtils.GetDefaultValue(PropertyType); _hasGeneratedDefaultValue = true; } return _defaultValue; } /// <summary> /// Gets or sets a value indicating whether this <see cref="JsonProperty"/> is required. /// </summary> /// <value>A value indicating whether this <see cref="JsonProperty"/> is required.</value> public Required Required { get { return _required ?? Required.Default; } set { _required = value; } } /// <summary> /// Gets or sets a value indicating whether this property preserves object references. /// </summary> /// <value> /// <c>true</c> if this instance is reference; otherwise, <c>false</c>. /// </value> public bool? IsReference { get; set; } /// <summary> /// Gets or sets the property null value handling. /// </summary> /// <value>The null value handling.</value> public NullValueHandling? NullValueHandling { get; set; } /// <summary> /// Gets or sets the property default value handling. /// </summary> /// <value>The default value handling.</value> public DefaultValueHandling? DefaultValueHandling { get; set; } /// <summary> /// Gets or sets the property reference loop handling. /// </summary> /// <value>The reference loop handling.</value> public ReferenceLoopHandling? ReferenceLoopHandling { get; set; } /// <summary> /// Gets or sets the property object creation handling. /// </summary> /// <value>The object creation handling.</value> public ObjectCreationHandling? ObjectCreationHandling { get; set; } /// <summary> /// Gets or sets or sets the type name handling. /// </summary> /// <value>The type name handling.</value> public TypeNameHandling? TypeNameHandling { get; set; } /// <summary> /// Gets or sets a predicate used to determine whether the property should be serialize. /// </summary> /// <value>A predicate used to determine whether the property should be serialize.</value> public Predicate<object> ShouldSerialize { get; set; } /// <summary> /// Gets or sets a predicate used to determine whether the property should be serialized. /// </summary> /// <value>A predicate used to determine whether the property should be serialized.</value> public Predicate<object> GetIsSpecified { get; set; } /// <summary> /// Gets or sets an action used to set whether the property has been deserialized. /// </summary> /// <value>An action used to set whether the property has been deserialized.</value> public Action<object, object> SetIsSpecified { get; set; } /// <summary> /// Returns a <see cref="String"/> that represents this instance. /// </summary> /// <returns> /// A <see cref="String"/> that represents this instance. /// </returns> public override string ToString() { return PropertyName; } /// <summary> /// Gets or sets the converter used when serializing the property's collection items. /// </summary> /// <value>The collection's items converter.</value> public JsonConverter ItemConverter { get; set; } /// <summary> /// Gets or sets whether this property's collection items are serialized as a reference. /// </summary> /// <value>Whether this property's collection items are serialized as a reference.</value> public bool? ItemIsReference { get; set; } /// <summary> /// Gets or sets the the type name handling used when serializing the property's collection items. /// </summary> /// <value>The collection's items type name handling.</value> public TypeNameHandling? ItemTypeNameHandling { get; set; } /// <summary> /// Gets or sets the the reference loop handling used when serializing the property's collection items. /// </summary> /// <value>The collection's items reference loop handling.</value> public ReferenceLoopHandling? ItemReferenceLoopHandling { get; set; } internal void WritePropertyName(JsonWriter writer) { if (_skipPropertyNameEscape) writer.WritePropertyName(PropertyName, false); else writer.WritePropertyName(PropertyName); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Web; using Common.Logging; using Microsoft.Isam.Esent.Interop; using Rhino.Queues.Model; using Rhino.Queues.Protocol; namespace Rhino.Queues.Storage { public class SenderActions : AbstractActions { private readonly QueueManagerConfiguration configuration; private readonly ILog logger = LogManager.GetLogger(typeof (SenderActions)); public SenderActions(JET_INSTANCE instance, ColumnsInformation columnsInformation, string database, Guid instanceId, QueueManagerConfiguration configuration) : base(instance, columnsInformation, database, instanceId) { this.configuration = configuration; } public IList<PersistentMessage> GetMessagesToSendAndMarkThemAsInFlight(int maxNumberOfMessage, int maxSizeOfMessagesInTotal, out Endpoint endPoint) { Api.MoveBeforeFirst(session, outgoing); endPoint = null; string queue = null; var messages = new List<PersistentMessage>(); while (Api.TryMoveNext(session, outgoing)) { var msgId = new Guid(Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["msg_id"])); var value = (OutgoingMessageStatus)Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["send_status"]).Value; var timeAsDate = Api.RetrieveColumnAsDouble(session, outgoing, ColumnsInformation.OutgoingColumns["time_to_send"]).Value; var time = DateTime.FromOADate(timeAsDate); logger.DebugFormat("Scanning message {0} with status {1} to be sent at {2}", msgId, value, time); if (value != OutgoingMessageStatus.Ready) continue; // Check if the message has expired, and move it to the outgoing history. var deliverBy = Api.RetrieveColumnAsDouble(session, outgoing, ColumnsInformation.OutgoingColumns["deliver_by"]); if (deliverBy != null) { var deliverByTime = DateTime.FromOADate(deliverBy.Value); if (deliverByTime < DateTime.Now) { logger.InfoFormat("Outgoing message {0} was not succesfully sent by its delivery time limit {1}", msgId, deliverByTime); var numOfRetries = Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["number_of_retries"]).Value; MoveFailedMessageToOutgoingHistory(numOfRetries, msgId); continue; } } var maxAttempts = Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["max_attempts"]); if (maxAttempts != null) { var numOfRetries = Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["number_of_retries"]).Value; if (numOfRetries > maxAttempts) { logger.InfoFormat("Outgoing message {0} has reached its max attempts of {1}", msgId, maxAttempts); MoveFailedMessageToOutgoingHistory(numOfRetries, msgId); continue; } } if (time > DateTime.Now) continue; var rowEndpoint = new Endpoint( Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["address"]), Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["port"]).Value ); if (endPoint == null) endPoint = rowEndpoint; if (endPoint.Equals(rowEndpoint) == false) continue; var rowQueue = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["queue"], Encoding.Unicode); if (queue == null) queue = rowQueue; if(queue != rowQueue) continue; var bookmark = new MessageBookmark(); Api.JetGetBookmark(session, outgoing, bookmark.Bookmark, bookmark.Size, out bookmark.Size); logger.DebugFormat("Adding message {0} to returned messages", msgId); var headerAsQueryString = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["headers"],Encoding.Unicode); messages.Add(new PersistentMessage { Id = new MessageId { SourceInstanceId = instanceId, MessageIdentifier = msgId }, Headers = HttpUtility.ParseQueryString(headerAsQueryString), Queue = rowQueue, SubQueue = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["subqueue"], Encoding.Unicode), SentAt = DateTime.FromOADate(Api.RetrieveColumnAsDouble(session, outgoing, ColumnsInformation.OutgoingColumns["sent_at"]).Value), Data = Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["data"]), Bookmark = bookmark }); using (var update = new Update(session, outgoing, JET_prep.Replace)) { Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["send_status"], (int)OutgoingMessageStatus.InFlight); update.Save(); } logger.DebugFormat("Marking output message {0} as InFlight", msgId); if (maxNumberOfMessage < messages.Count) break; if (maxSizeOfMessagesInTotal < messages.Sum(x => x.Data.Length)) break; } return messages; } public void MarkOutgoingMessageAsFailedTransmission(MessageBookmark bookmark, bool queueDoesNotExistsInDestination) { Api.JetGotoBookmark(session, outgoing, bookmark.Bookmark, bookmark.Size); var numOfRetries = Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["number_of_retries"]).Value; var msgId = new Guid(Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["msg_id"])); if (numOfRetries < 100 && queueDoesNotExistsInDestination == false) { using (var update = new Update(session, outgoing, JET_prep.Replace)) { var timeToSend = DateTime.Now.AddSeconds(numOfRetries * numOfRetries); Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["send_status"], (int)OutgoingMessageStatus.Ready); Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["time_to_send"], timeToSend.ToOADate()); Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["number_of_retries"], numOfRetries + 1); logger.DebugFormat("Marking outgoing message {0} as failed with retries: {1}", msgId, numOfRetries); update.Save(); } } else { MoveFailedMessageToOutgoingHistory(numOfRetries, msgId); } } public MessageBookmark MarkOutgoingMessageAsSuccessfullySent(MessageBookmark bookmark) { Api.JetGotoBookmark(session, outgoing, bookmark.Bookmark, bookmark.Size); var newBookmark = new MessageBookmark(); using (var update = new Update(session, outgoingHistory, JET_prep.Insert)) { foreach (var column in ColumnsInformation.OutgoingColumns.Keys) { var bytes = Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns[column]); Api.SetColumn(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns[column], bytes); } Api.SetColumn(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns["send_status"], (int)OutgoingMessageStatus.Sent); update.Save(newBookmark.Bookmark, newBookmark.Size, out newBookmark.Size); } var msgId = new Guid(Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["msg_id"])); Api.JetDelete(session, outgoing); logger.DebugFormat("Successfully sent output message {0}", msgId); return newBookmark; } public bool HasMessagesToSend() { Api.MoveBeforeFirst(session, outgoing); return Api.TryMoveNext(session, outgoing); } public IEnumerable<PersistentMessageToSend> GetMessagesToSend() { Api.MoveBeforeFirst(session, outgoing); while (Api.TryMoveNext(session, outgoing)) { var address = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["address"]); var port = Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["port"]).Value; var bookmark = new MessageBookmark(); Api.JetGetBookmark(session, outgoing, bookmark.Bookmark, bookmark.Size, out bookmark.Size); yield return new PersistentMessageToSend { Id = new MessageId { SourceInstanceId = instanceId, MessageIdentifier = new Guid(Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["msg_id"])) }, OutgoingStatus = (OutgoingMessageStatus)Api.RetrieveColumnAsInt32(session, outgoing, ColumnsInformation.OutgoingColumns["send_status"]).Value, Endpoint = new Endpoint(address, port), Queue = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["queue"], Encoding.Unicode), SubQueue = Api.RetrieveColumnAsString(session, outgoing, ColumnsInformation.OutgoingColumns["subqueue"], Encoding.Unicode), SentAt = DateTime.FromOADate(Api.RetrieveColumnAsDouble(session, outgoing, ColumnsInformation.OutgoingColumns["sent_at"]).Value), Data = Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns["data"]), Bookmark = bookmark }; } } public void RevertBackToSend(MessageBookmark[] bookmarks) { foreach (var bookmark in bookmarks) { Api.JetGotoBookmark(session, outgoingHistory, bookmark.Bookmark, bookmark.Size); var msgId = new Guid(Api.RetrieveColumn(session, outgoingHistory, ColumnsInformation.OutgoingColumns["msg_id"])); using(var update = new Update(session, outgoing, JET_prep.Insert)) { foreach (var column in ColumnsInformation.OutgoingColumns.Keys) { Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns[column], Api.RetrieveColumn(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns[column]) ); } Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["send_status"], (int)OutgoingMessageStatus.Ready); Api.SetColumn(session, outgoing, ColumnsInformation.OutgoingColumns["number_of_retries"], Api.RetrieveColumnAsInt32(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns["number_of_retries"]).Value + 1 ); logger.DebugFormat("Reverting output message {0} back to Ready mode", msgId); update.Save(); } Api.JetDelete(session, outgoingHistory); } } private void MoveFailedMessageToOutgoingHistory(int numOfRetries, Guid msgId) { if (configuration.EnableOutgoingMessageHistory) { using (var update = new Update(session, outgoingHistory, JET_prep.Insert)) { foreach (var column in ColumnsInformation.OutgoingColumns.Keys) { Api.SetColumn(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns[column], Api.RetrieveColumn(session, outgoing, ColumnsInformation.OutgoingColumns[column]) ); } Api.SetColumn(session, outgoingHistory, ColumnsInformation.OutgoingHistoryColumns["send_status"], (int)OutgoingMessageStatus.Failed); logger.DebugFormat("Marking outgoing message {0} as permenantly failed after {1} retries", msgId, numOfRetries); update.Save(); } } Api.JetDelete(session, outgoing); } } }
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; namespace EvoXP { /// <summary> /// Summary description for WayPointEditor. /// </summary> public class WayPointEditor : System.Windows.Forms.Control { private Font _Font = new Font("Arial", 8); private Pen _GPen = new Pen(Color.White, 1); private Pen _GPenS = new Pen(Color.DarkGray, 1); private Pen _CPen = new Pen(Color.CornflowerBlue, 1); private Pen _SPen = new Pen(Color.Aqua, 4); private Pen _SPenS = new Pen(Color.Orange, 4); private EconomyClient _EconClient = null; private Point _start_pan = new Point(0,0); private Point _pan = new Point(0,0); private double _scale = 1.0; private bool _dragging = false; private bool _scaling = false; private double _init_scale; public int sel = -1; public int sel2 = -1; public double _cursorX = 0.0; public double _cursorY = 0.0; private double _cursorX_capture = 0.0; private double _cursorY_capture = 0.0; public bool _bShowShips = true; public bool _bShowTradeDepots = true; public bool _bShowJumpGates = true; public bool _bShowStars = true; public WayPointEditor(EconomyClient ec) { _EconClient = ec; } private void DrawNode(Graphics g, int x, int y, bool sel, LocalGalaxyDetail d) { int w = 5; Pen OutLine = _GPen; if(sel) OutLine = _GPenS; g.FillEllipse(OutLine.Brush, x-w, y-w, 2*w,2*w); g.DrawString(d.name, _Font, OutLine.Brush, x,y+w+2); } private void DrawShip(Graphics g, int x, int y, bool sel, LocalShip s) { int w = 3; Pen OutLine = _SPen; if(sel) OutLine = _SPenS; g.FillEllipse(OutLine.Brush, x-w, y-w, 2*w,2*w); g.DrawString(s._name, _Font, OutLine.Brush, x,y+w+2); } private void DrawCursor(Graphics g, int x, int y) { g.DrawLine(_CPen, x,y-5,x,y+5); g.DrawLine(_CPen, x-5,y,x+5,y); } protected override void OnPaint(PaintEventArgs pe) { //DrawNode(pe.Graphics, 5, 5, false, "WTF"); /* int gTx = (int) ((G.GetPosition().X ) * _scale + _pan.X + Width/2); int gTy = (int) ((G.GetPosition().Y ) * _scale + _pan.Y + Height/2); */ int i, gTx, gTy; gTx = (int) ((_cursorX) * _scale + _pan.X + Width/2); gTy = (int) ((_cursorY) * _scale + _pan.Y + Height/2); DrawCursor(pe.Graphics, gTx, gTy); for(i=0;i<_EconClient._LocalGalaxy.Count;i++) { LocalGalaxyDetail LGD = _EconClient._LocalGalaxy[i] as LocalGalaxyDetail; if((LGD.isTradeDepot && _bShowTradeDepots) || (LGD.isJumpGate && _bShowJumpGates) || (!LGD.isJumpGate && !LGD.isTradeDepot && _bShowStars )) { gTx = (int) ((LGD.x) * _scale + _pan.X + Width/2); gTy = (int) ((LGD.y) * _scale + _pan.Y + Height/2); DrawNode(pe.Graphics, gTx, gTy, i==sel, LGD); } } for(i=0;i<_EconClient._LocalShips._Ships.Count;i++) { if(_bShowShips || i==sel2) { LocalShip LS = _EconClient._LocalShips._Ships.GetByIndex(i) as LocalShip; gTx = (int) ((LS._x) * _scale + _pan.X + Width/2); gTy = (int) ((LS._y) * _scale + _pan.Y + Height/2); DrawShip(pe.Graphics, gTx, gTy, i==sel2, LS); } } } protected override void OnPaintBackground(PaintEventArgs pe) { pe.Graphics.Clear(Color.Black); } private int GetSelected(int x, int y) { int i, gTx, gTy; for(i=0;i<_EconClient._LocalGalaxy.Count;i++) { LocalGalaxyDetail LGD = _EconClient._LocalGalaxy[i] as LocalGalaxyDetail; if((LGD.isTradeDepot && _bShowTradeDepots) || (LGD.isJumpGate && _bShowJumpGates)) { gTx = (int) ((LGD.x) * _scale + _pan.X + Width/2); gTy = (int) ((LGD.y) * _scale + _pan.Y + Height/2); int d = (gTx - x)*(gTx - x) + (gTy - y)*(gTy - y); if(d<=25) return i; } } return -1; } private int GetSelected2(int x, int y) { if(!_bShowShips) return sel2; int i, gTx, gTy; for(i=0;i<_EconClient._LocalShips._Ships.Count;i++) { LocalShip LS = _EconClient._LocalShips._Ships.GetByIndex(i) as LocalShip; gTx = (int) ((LS._x) * _scale + _pan.X + Width/2); gTy = (int) ((LS._y) * _scale + _pan.Y + Height/2); int d = (gTx - x)*(gTx - x) + (gTy - y)*(gTy - y); if(d<=25) return i; } return -1; } private int _CaptureState = 0; private int _TickDown = 10; private void OnVectorCaptureStart() { _TickDown = 10; // confirm // if this matchs what we want to click on, good } private void OnVectorCaptureEnd() { _TickDown = 10; } protected override void OnMouseDown(System.Windows.Forms.MouseEventArgs e) { if(_scale != 0.0) { _cursorX_capture = _cursorX; _cursorY_capture = _cursorY; _cursorX = (int)((e.X - Width/2 -_pan.X) / _scale); _cursorY = (int)((e.Y - Height/2 -_pan.Y) / _scale); _TickDown = 10; if(_CaptureState == 1) { OnVectorCaptureStart(); } _EconClient._LocalShips._sel_pos_x = _cursorX; _EconClient._LocalShips._sel_pos_y = _cursorY; _EconClient._LocalShips._SelectedGal = true; _EconClient._LocalShips.SyncData(); } sel = GetSelected(e.X,e.Y); if(sel>=0) { LocalGalaxyDetail LGD = _EconClient._LocalGalaxy[sel] as LocalGalaxyDetail; _EconClient._gameclient.NotifyGalaxySelect(LGD.name); } int dSel = sel2; sel2 = GetSelected2(e.X, e.Y); if(sel2 != dSel && sel2 >= 0) { _EconClient._LocalShips._SE.LookAt(sel2); } else { sel2 = dSel; } _start_pan.X = e.X; _start_pan.Y = e.Y; if(e.Button == MouseButtons.Left) { _dragging = true; } if(e.Button == MouseButtons.Right) { _init_scale = _scale; _scaling = true; } Refresh(); } protected override void OnMouseWheel(System.Windows.Forms.MouseEventArgs e) { /* _scale += (e.Delta / 2400.0); if(_scale < 0.01) _scale = 0.01; Refresh(); */ } public void MOnMouseWheel(System.Windows.Forms.MouseEventArgs e) { // OnMouseWheel(e); } protected override void OnMouseUp(System.Windows.Forms.MouseEventArgs e) { if(_CaptureState == 3 && _TickDown > 0) { OnVectorCaptureStart(); } if(_CaptureState == 2) { _CaptureState = 3; } _dragging = false; _scaling = false; Refresh(); } protected override void OnMouseLeave(System.EventArgs e) { _dragging = false; if(_CaptureState > 1) _CaptureState = 1; } protected override void OnMouseMove(System.Windows.Forms.MouseEventArgs e) { if(_dragging) { if(_TickDown > 0 ) _TickDown--; _pan.X+= ((e.X - _start_pan.X)); _pan.Y+= ((e.Y - _start_pan.Y)); _start_pan.X = e.X; _start_pan.Y = e.Y; Refresh(); } if(_scaling) { double d = Math.Exp ( (_start_pan.X - e.X) / 100.0); _scale = _init_scale * (d+.01); //_scale = (d + 1) / 50.0; Refresh(); } } } }
using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using Microsoft.Orleans.ServiceFabric.Models; namespace Microsoft.Orleans.ServiceFabric.Utilities { using System; using System.Fabric; using System.Fabric.Query; using System.Threading.Tasks; using Microsoft.ServiceFabric.Services.Client; internal class FabricQueryManager : IFabricQueryManager { private readonly ConcurrentDictionary<Uri, ConcurrentDictionary<ServicePartitionKey, ResolvedServicePartition>> previousResolves = new ConcurrentDictionary<Uri, ConcurrentDictionary<ServicePartitionKey, ResolvedServicePartition>>(); private readonly FabricClient fabricClient; private readonly IServicePartitionResolver resolver; private readonly TimeSpan timeoutPerAttempt; private readonly TimeSpan maxBackoffInterval; public FabricQueryManager( FabricClient fabricClient, IServicePartitionResolver resolver) { this.fabricClient = fabricClient; this.resolver = resolver; this.timeoutPerAttempt = TimeSpan.FromSeconds(30); this.maxBackoffInterval = TimeSpan.FromSeconds(90); } /// <inheritdoc /> public void UnregisterPartitionChangeHandler(long id) { this.fabricClient.ServiceManager.UnregisterServicePartitionResolutionChangeHandler(id); } /// <inheritdoc /> public long RegisterPartitionChangeHandler( Uri serviceName, IResolvedServicePartition servicePartition, FabricPartitionResolutionChangeHandler handler) { var partition = servicePartition as ResolvedServicePartitionWrapper; if (partition == null) { throw new ArgumentException( string.Format( "Only partitions of type {0} are supported. Provided type {1} is not supported.", nameof(ResolvedServicePartitionWrapper), servicePartition.GetType()), nameof(servicePartition)); } // Wrap the provided handler so that it's compatible with Service Fabric. void ChangeHandler(FabricClient source, long id, ServicePartitionResolutionChange args) { ServicePartitionSilos result = null; if (!args.HasException) { result = new ServicePartitionSilos( new ResolvedServicePartitionWrapper(args.Result), args.Result.GetPartitionEndpoints()); } handler(id, new FabricPartitionResolutionChange(result, args.Exception)); } var sm = this.fabricClient.ServiceManager; switch (servicePartition.Kind) { case ServicePartitionKind.Int64Range: return sm.RegisterServicePartitionResolutionChangeHandler( serviceName, ((Int64RangePartitionInformation) partition.Partition.Info).LowKey, ChangeHandler); case ServicePartitionKind.Named: return sm.RegisterServicePartitionResolutionChangeHandler( serviceName, ((NamedPartitionInformation) partition.Partition.Info).Name, ChangeHandler); case ServicePartitionKind.Singleton: return sm.RegisterServicePartitionResolutionChangeHandler(serviceName, ChangeHandler); default: throw new ArgumentOutOfRangeException( nameof(servicePartition), $"Partition kind {servicePartition.Kind} is not supported"); } } /// <inheritdoc /> public async Task<ServicePartitionSilos[]> ResolveSilos(Uri serviceName) { var fabricPartitions = await this.QueryServicePartitions(serviceName); var resolvedPartitions = new List<ServicePartitionSilos>(fabricPartitions.Count); foreach (var fabricPartition in fabricPartitions) { var partitionKey = fabricPartition.PartitionInformation.GetPartitionKey(); var resolvedPartition = await this.ResolvePartition(serviceName, partitionKey, CancellationToken.None); resolvedPartitions.Add(resolvedPartition); } return resolvedPartitions.ToArray(); } /// <inheritdoc /> public async Task<ServicePartitionSilos> ResolvePartition( Uri serviceName, ServicePartitionKey partitionKey, CancellationToken cancellationToken) { ResolvedServicePartition result; var cache = this.previousResolves.GetOrAdd(serviceName, CreateCache); if (cache.TryGetValue(partitionKey, out var previousResult)) { // Re-resolve the partition and avoid caching. result = await this.resolver.ResolveAsync( previousResult, this.timeoutPerAttempt, this.maxBackoffInterval, cancellationToken); } else { // Perform an initial resolution for the partition. result = await this.resolver.ResolveAsync( serviceName, partitionKey, this.timeoutPerAttempt, this.maxBackoffInterval, cancellationToken); } // Cache the results of this resolution to provide to the next resolution call. cache.AddOrUpdate( partitionKey, _ => result, (key, existing) => existing.CompareVersion(result) < 0 ? result : existing); return new ServicePartitionSilos( new ResolvedServicePartitionWrapper(result), result.GetPartitionEndpoints()); ConcurrentDictionary<ServicePartitionKey, ResolvedServicePartition> CreateCache(Uri uri) { return new ConcurrentDictionary<ServicePartitionKey, ResolvedServicePartition>(ServicePartitionKeyComparer.Instance); } } /// <summary> /// Returns the list of Service Fabric partitions for the given service. /// </summary> /// <returns>The list of Service Fabric partitions for the given service.</returns> private async Task<List<Partition>> QueryServicePartitions(Uri serviceName) { var partitions = new List<Partition>(); var continuationToken = default(string); do { var batch = await this.fabricClient.QueryManager.GetPartitionListAsync(serviceName, continuationToken); if (batch.Count > 0) partitions.AddRange(batch); continuationToken = batch.ContinuationToken; } while (!string.IsNullOrWhiteSpace(continuationToken)); partitions.Sort( (partition1, partition2) => partition1.PartitionInformation.Id.CompareTo(partition2.PartitionInformation.Id)); return partitions; } private class ResolvedServicePartitionWrapper : IResolvedServicePartition { public ResolvedServicePartitionWrapper(ResolvedServicePartition partition) { this.Partition = partition; } public ResolvedServicePartition Partition { get; } public Guid Id => this.Partition.Info.Id; public ServicePartitionKind Kind => this.Partition.Info.Kind; public bool IsSamePartitionAs(IResolvedServicePartition other) { if (other is ResolvedServicePartitionWrapper otherWrapper) { return this.Partition.IsSamePartitionAs(otherWrapper.Partition); } return false; } public override string ToString() => this.Partition.ToPartitionString(); } /// <summary> /// Equality comparer for <see cref="ServicePartitionKey"/>. /// </summary> private struct ServicePartitionKeyComparer : IEqualityComparer<ServicePartitionKey> { /// <summary> /// Gets a singleton instance of this class. /// </summary> public static ServicePartitionKeyComparer Instance { get; } = new ServicePartitionKeyComparer(); /// <inheritdoc /> public bool Equals(ServicePartitionKey x, ServicePartitionKey y) { if (ReferenceEquals(x, y)) return true; if (ReferenceEquals(x, null)) return false; if (ReferenceEquals(y, null)) return false; if (x.Kind != y.Kind) return false; switch (x.Kind) { case ServicePartitionKind.Int64Range: return (long) x.Value == (long) y.Value; case ServicePartitionKind.Named: return string.Equals(x.Value as string, y.Value as string, StringComparison.Ordinal); case ServicePartitionKind.Singleton: return true; default: ThrowKindOutOfRange(x); return false; } } /// <inheritdoc /> public int GetHashCode(ServicePartitionKey obj) { switch (obj.Kind) { case ServicePartitionKind.Int64Range: return ((long) obj.Value).GetHashCode(); case ServicePartitionKind.Named: return ((string) obj.Value).GetHashCode(); case ServicePartitionKind.Singleton: return 0; default: ThrowKindOutOfRange(obj); return -1; } } private static void ThrowKindOutOfRange(ServicePartitionKey x) { throw new ArgumentOutOfRangeException(nameof(x), $"Partition kind {x.Kind} is not supported"); } } } }
using System; using System.Collections.Generic; using System.Text; using OpenCVProxy; using OpenCVProxy.Interop; namespace RCubeCapture { class PointTracker { static TrackPointContext context0, context1; static ByteSafeMemoryBox status; static TrackedPoint[] trackedPoints; static bool needToInitialize = true; static int flags = 0; static CvTermCriteria defaultTermCriteria = new CvTermCriteria(20, 0.03); const int MaxPointsCount = 500; const int WinSize = 10; public static bool HasData { get { return !needToInitialize && trackedPoints != null; } } public static TrackedPoint[] GetTrackedPoints() { if (!HasData) return null; return trackedPoints; } public static void InitializeTrackPoints(IplImage image) { status = new ByteSafeMemoryBox(MaxPointsCount); CvSize imageSize = image.Size; context0 = new TrackPointContext(imageSize, MaxPointsCount); context1 = new TrackPointContext(imageSize, MaxPointsCount); } private static bool InPoints(CvPoint p, IList<CvPoint> points, int index, int count) { for (int j = 0; j < count; j++) { if (CvPoint.Distance2(p, points[index + j]) < 25) return true; } return false; } public static void TrackPoints(IplImage image, List<CvPoint> rects) { if (rects.Count == 0) { needToInitialize = true; return; } CvSize imageSize = image.Size; image.ConvertColor(context1.Gray, Cv.CV_BGR2GRAY); if (needToInitialize) { using (IplImage eig = new IplImage(imageSize, 32, 1), temp = new IplImage(imageSize, 32, 1)) { int k = 0; for (int i = 0; i < rects.Count; i++) { CvPoint p = rects[i]; if (!InPoints(p, rects, 0, i - 1)) context1.Points[k++] = new CvPoint2D32f(p.x, p.y); } context1.Count = k; Cv.cvFindCornerSubPix(context1.Gray, context1.Points.Pointer, context1.Count, new CvSize(WinSize, WinSize), new CvSize(-1, -1), defaultTermCriteria); } } else if (context0.Count > 0) { Cv.cvCalcOpticalFlowPyrLK(context0.Gray, context1.Gray, context0.Pyramid, context1.Pyramid, context0.Points.Pointer, context1.Points.Pointer, context0.Count, new CvSize(WinSize, WinSize), 3, status.Pointer, IntPtr.Zero, new CvTermCriteria(20, 0.03), flags); flags |= Cv.CV_LKFLOW_PYR_A_READY; bool[] inTracking = new bool[rects.Count]; List<CvPoint> pointsInTrack = new List<CvPoint>(); List<TrackedPoint> trackedPoints = new List<TrackedPoint>(); int k = 0; for (int i = 0; i < context0.Count; i++) { if (status[i] == 0) continue; CvPoint2D32f sp = context1.Points[i]; bool valid = false; CvPoint p = sp.ToCvPoint(); for (int j = 0; j < rects.Count; j++) { if (CvPoint.Distance2(p, rects[j]) < 25) { inTracking[j] = true; valid = true; } } if (!valid && context0.IsFading[i]) continue; pointsInTrack.Add(p); context1.Points[k] = sp; context1.IsFading[k] = !valid; ++k; TrackedPoint tp = new TrackedPoint(); tp.p = sp; CvPoint2D32f sp0 = context0.Points[i]; tp.offset = new CvPoint2D32f(sp.x - sp0.x, sp.y - sp0.y); tp.isNewPoint = false; trackedPoints.Add(tp); } int l = k; for (int i = 0; i < rects.Count; i++) { CvPoint p = rects[i]; if (!inTracking[i] && !InPoints(p, pointsInTrack, 0, pointsInTrack.Count)) { pointsInTrack.Add(rects[i]); CvPoint2D32f sp = new CvPoint2D32f(p.x, p.y); context1.Points[k++] = sp; TrackedPoint tp = new TrackedPoint(); tp.p = sp; tp.isNewPoint = true; trackedPoints.Add(tp); } } if (l < k) { Cv.cvFindCornerSubPix(context1.Gray, new IntPtr(context1.Points.Pointer.ToInt32() + 4 * l), k - l, new CvSize(WinSize, WinSize), new CvSize(-1, -1), defaultTermCriteria); } context1.Count = k; PointTracker.trackedPoints = trackedPoints.ToArray(); } else { context1.Count = 0; PointTracker.trackedPoints = null; } if (context1.Count == 0) { needToInitialize = true; return; } TrackPointContext c = context1; context1 = context0; context0 = c; needToInitialize = false; } public static bool GetMovement(out MovementType type, out double step) { step = 0; type = MovementType.Nothing; if (HasData) { List<double> angles = new List<double>(); List<double> distances = new List<double>(); double sumX = 0; double sumY = 0; for (int i = 0; i < trackedPoints.Length; i++) { if (!trackedPoints[i].isNewPoint) { CvPoint2D32f p = trackedPoints[i].offset; double distance = Math.Sqrt(p.x * p.x + p.y * p.y); distances.Add(distance); if(distance < 1e-5) continue; CvPoint2D32f p0 = trackedPoints[i].offset; sumX += p0.x; sumY += p0.y; double angle = Math.Atan2(p.y, p.x); angles.Add(angle); } } if (angles.Count > 2) { angles.Sort(); distances.Sort(); NormalizeAngles(angles); double angleDiff = angles[angles.Count - 1] - angles[0]; double distanceDiff = distances[distances.Count - 1] - angles[0]; if (angleDiff < 0.25) { step = distanceDiff; if (-Math.PI / 4 <= angles[0] && angles[0] < Math.PI / 4) { type = MovementType.ToRight; } else if (Math.PI / 4 <= angles[0] && angles[0] < Math.PI * 3 / 4) { type = MovementType.ToDown; } else if (-Math.PI * 3 / 4 <= angles[0] && angles[0] < -Math.PI / 4) { type = MovementType.ToUp; } else { type = MovementType.ToLeft; } } else { sumX /= angles.Count; sumY /= angles.Count; } } } return type != MovementType.Nothing; } private static void CalcMeanAndDeviation(IList<double> x, out double mean, out double deviation) { int n = x.Count; double sum = 0; for (int i = 0; i < n; i++) { sum += x[i]; } mean = sum / n; sum = 0; for (int i = 0; i < n; i++) { double diff = x[i] - mean; sum += diff * diff; } deviation = Math.Sqrt(sum / n); } private static void NormalizeAngles(List<double> angles) { int k = angles.Count; while (k > 0 && (angles[angles.Count - 1] - angles[0]) > Math.PI) { double angle = angles[0] + 2 * Math.PI; angles.RemoveAt(0); angles.Add(angle); k--; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Xml.XPath; using FT = MS.Internal.Xml.XPath.Function.FunctionType; namespace MS.Internal.Xml.XPath { internal sealed class QueryBuilder { // Note: Up->Down, Down->Up: // For operators order is normal: 1 + 2 --> Operator+(1, 2) // For paths order is reversed: a/b -> ChildQuery_B(input: ChildQuery_A(input: ContextQuery())) // Input flags. We pass them Up->Down. // Using them upper query set states which controls how inner query will be built. enum Flags { None = 0x00, SmartDesc = 0x01, PosFilter = 0x02, // Node has this flag set when it has position predicate applied to it Filter = 0x04, // Subtree we compiling will be filtered. i.e. Flag not set on rightmost filter. } // Output props. We return them Down->Up. // These are properties of Query tree we have built already. // These properties are closely related to QueryProps exposed by Query node itself. // They have the following difference: // QueryProps describe property of node they are (belong like Reverse) // these Props describe accumulated properties of the tree (like nonFlat) enum Props { None = 0x00, PosFilter = 0x01, // This filter or inner filter was positional: foo[1] or foo[1][true()] HasPosition = 0x02, // Expression may ask position() of the context HasLast = 0x04, // Expression may ask last() of the context NonFlat = 0x08, // Some nodes may be descendent of others } // comment are approximate. This is my best understanding: private string _query; private bool _allowVar; private bool _allowKey; private bool _allowCurrent; private bool _needContext; private BaseAxisQuery _firstInput; // Input of the leftmost predicate. Set by leftmost predicate, used in rightmost one private void Reset() { _parseDepth = 0; _needContext = false; } private Query ProcessAxis(Axis root, Flags flags, out Props props) { Query result = null; if (root.Prefix.Length > 0) { _needContext = true; } _firstInput = null; Query qyInput; { if (root.Input != null) { Flags inputFlags = Flags.None; if ((flags & Flags.PosFilter) == 0) { Axis input = root.Input as Axis; if (input != null) { if ( root.TypeOfAxis == Axis.AxisType.Child && input.TypeOfAxis == Axis.AxisType.DescendantOrSelf && input.NodeType == XPathNodeType.All ) { Query qyGrandInput; if (input.Input != null) { qyGrandInput = ProcessNode(input.Input, Flags.SmartDesc, out props); } else { qyGrandInput = new ContextQuery(); props = Props.None; } result = new DescendantQuery(qyGrandInput, root.Name, root.Prefix, root.NodeType, false, input.AbbrAxis); if ((props & Props.NonFlat) != 0) { result = new DocumentOrderQuery(result); } props |= Props.NonFlat; return result; } } if (root.TypeOfAxis == Axis.AxisType.Descendant || root.TypeOfAxis == Axis.AxisType.DescendantOrSelf) { inputFlags |= Flags.SmartDesc; } } qyInput = ProcessNode(root.Input, inputFlags, out props); } else { qyInput = new ContextQuery(); props = Props.None; } } switch (root.TypeOfAxis) { case Axis.AxisType.Ancestor: result = new XPathAncestorQuery(qyInput, root.Name, root.Prefix, root.NodeType, false); props |= Props.NonFlat; break; case Axis.AxisType.AncestorOrSelf: result = new XPathAncestorQuery(qyInput, root.Name, root.Prefix, root.NodeType, true); props |= Props.NonFlat; break; case Axis.AxisType.Child: if ((props & Props.NonFlat) != 0) { result = new CacheChildrenQuery(qyInput, root.Name, root.Prefix, root.NodeType); } else { result = new ChildrenQuery(qyInput, root.Name, root.Prefix, root.NodeType); } break; case Axis.AxisType.Parent: result = new ParentQuery(qyInput, root.Name, root.Prefix, root.NodeType); break; case Axis.AxisType.Descendant: if ((flags & Flags.SmartDesc) != 0) { result = new DescendantOverDescendantQuery(qyInput, false, root.Name, root.Prefix, root.NodeType, /*abbrAxis:*/false); } else { result = new DescendantQuery(qyInput, root.Name, root.Prefix, root.NodeType, false, /*abbrAxis:*/false); if ((props & Props.NonFlat) != 0) { result = new DocumentOrderQuery(result); } } props |= Props.NonFlat; break; case Axis.AxisType.DescendantOrSelf: if ((flags & Flags.SmartDesc) != 0) { result = new DescendantOverDescendantQuery(qyInput, true, root.Name, root.Prefix, root.NodeType, root.AbbrAxis); } else { result = new DescendantQuery(qyInput, root.Name, root.Prefix, root.NodeType, true, root.AbbrAxis); if ((props & Props.NonFlat) != 0) { result = new DocumentOrderQuery(result); } } props |= Props.NonFlat; break; case Axis.AxisType.Preceding: result = new PrecedingQuery(qyInput, root.Name, root.Prefix, root.NodeType); props |= Props.NonFlat; break; case Axis.AxisType.Following: result = new FollowingQuery(qyInput, root.Name, root.Prefix, root.NodeType); props |= Props.NonFlat; break; case Axis.AxisType.FollowingSibling: result = new FollSiblingQuery(qyInput, root.Name, root.Prefix, root.NodeType); if ((props & Props.NonFlat) != 0) { result = new DocumentOrderQuery(result); } break; case Axis.AxisType.PrecedingSibling: result = new PreSiblingQuery(qyInput, root.Name, root.Prefix, root.NodeType); break; case Axis.AxisType.Attribute: result = new AttributeQuery(qyInput, root.Name, root.Prefix, root.NodeType); break; case Axis.AxisType.Self: result = new XPathSelfQuery(qyInput, root.Name, root.Prefix, root.NodeType); break; case Axis.AxisType.Namespace: if ((root.NodeType == XPathNodeType.All || root.NodeType == XPathNodeType.Element || root.NodeType == XPathNodeType.Attribute) && root.Prefix.Length == 0) { result = new NamespaceQuery(qyInput, root.Name, root.Prefix, root.NodeType); } else { result = new EmptyQuery(); } break; default: throw XPathException.Create(SR.Xp_NotSupported, _query); } return result; } private static bool CanBeNumber(Query q) { return ( q.StaticType == XPathResultType.Any || q.StaticType == XPathResultType.Number ); } private Query ProcessFilter(Filter root, Flags flags, out Props props) { bool first = ((flags & Flags.Filter) == 0); Props propsCond; Query cond = ProcessNode(root.Condition, Flags.None, out propsCond); if ( CanBeNumber(cond) || (propsCond & (Props.HasPosition | Props.HasLast)) != 0 ) { propsCond |= Props.HasPosition; flags |= Flags.PosFilter; } // We don't want DescendantOverDescendant pattern to be recognized here (in case descendent::foo[expr]/descendant::bar) // So we clean this flag here: flags &= ~Flags.SmartDesc; // ToDo: Instead it would be nice to wrap descendent::foo[expr] into special query that will flatten it -- i.e. // remove all nodes that are descendant of other nodes. This is very easy because for sorted nodesets all children // follow its parent. One step caching. This can be easily done by rightmost DescendantQuery itself. // Interesting note! Can we guarantee that DescendantOverDescendant returns flat nodeset? This definitely true if it's input is flat. Query qyInput = ProcessNode(root.Input, flags | Flags.Filter, out props); if (root.Input.Type != AstNode.AstType.Filter) { // Props.PosFilter is for nested filters only. // We clean it here to avoid cleaning it in all other ast nodes. props &= ~Props.PosFilter; } if ((propsCond & Props.HasPosition) != 0) { // this condition is positional rightmost filter should be avare of this. props |= Props.PosFilter; } /*merging predicates*/ { FilterQuery qyFilter = qyInput as FilterQuery; if (qyFilter != null && (propsCond & Props.HasPosition) == 0 && qyFilter.Condition.StaticType != XPathResultType.Any) { Query prevCond = qyFilter.Condition; if (prevCond.StaticType == XPathResultType.Number) { prevCond = new LogicalExpr(Operator.Op.EQ, new NodeFunctions(FT.FuncPosition, null), prevCond); } cond = new BooleanExpr(Operator.Op.AND, prevCond, cond); qyInput = qyFilter.qyInput; } } if ((props & Props.PosFilter) != 0 && qyInput is DocumentOrderQuery) { qyInput = ((DocumentOrderQuery)qyInput).input; } if (_firstInput == null) { _firstInput = qyInput as BaseAxisQuery; } bool merge = (qyInput.Properties & QueryProps.Merge) != 0; bool reverse = (qyInput.Properties & QueryProps.Reverse) != 0; if ((propsCond & Props.HasPosition) != 0) { if (reverse) { qyInput = new ReversePositionQuery(qyInput); } else if ((propsCond & Props.HasLast) != 0) { qyInput = new ForwardPositionQuery(qyInput); } } if (first && _firstInput != null) { if (merge && (props & Props.PosFilter) != 0) { qyInput = new FilterQuery(qyInput, cond, /*noPosition:*/false); Query parent = _firstInput.qyInput; if (!(parent is ContextQuery)) { // we don't need to wrap filter with MergeFilterQuery when cardinality is parent <: ? _firstInput.qyInput = new ContextQuery(); _firstInput = null; return new MergeFilterQuery(parent, qyInput); } _firstInput = null; return qyInput; } _firstInput = null; } return new FilterQuery(qyInput, cond, /*noPosition:*/(propsCond & Props.HasPosition) == 0); } private Query ProcessOperator(Operator root, out Props props) { Props props1, props2; Query op1 = ProcessNode(root.Operand1, Flags.None, out props1); Query op2 = ProcessNode(root.Operand2, Flags.None, out props2); props = props1 | props2; switch (root.OperatorType) { case Operator.Op.PLUS: case Operator.Op.MINUS: case Operator.Op.MUL: case Operator.Op.MOD: case Operator.Op.DIV: return new NumericExpr(root.OperatorType, op1, op2); case Operator.Op.LT: case Operator.Op.GT: case Operator.Op.LE: case Operator.Op.GE: case Operator.Op.EQ: case Operator.Op.NE: return new LogicalExpr(root.OperatorType, op1, op2); case Operator.Op.OR: case Operator.Op.AND: return new BooleanExpr(root.OperatorType, op1, op2); case Operator.Op.UNION: props |= Props.NonFlat; return new UnionExpr(op1, op2); default: return null; } } private Query ProcessVariable(Variable root) { _needContext = true; if (!_allowVar) { throw XPathException.Create(SR.Xp_InvalidKeyPattern, _query); } return new VariableQuery(root.Localname, root.Prefix); } private Query ProcessFunction(Function root, out Props props) { props = Props.None; Query qy = null; switch (root.TypeOfFunction) { case FT.FuncLast: qy = new NodeFunctions(root.TypeOfFunction, null); props |= Props.HasLast; return qy; case FT.FuncPosition: qy = new NodeFunctions(root.TypeOfFunction, null); props |= Props.HasPosition; return qy; case FT.FuncCount: return new NodeFunctions(FT.FuncCount, ProcessNode((AstNode)(root.ArgumentList[0]), Flags.None, out props) ); case FT.FuncID: qy = new IDQuery(ProcessNode((AstNode)(root.ArgumentList[0]), Flags.None, out props)); props |= Props.NonFlat; return qy; case FT.FuncLocalName: case FT.FuncNameSpaceUri: case FT.FuncName: if (root.ArgumentList != null && root.ArgumentList.Count > 0) { return new NodeFunctions(root.TypeOfFunction, ProcessNode((AstNode)(root.ArgumentList[0]), Flags.None, out props) ); } else { return new NodeFunctions(root.TypeOfFunction, null); } case FT.FuncString: case FT.FuncConcat: case FT.FuncStartsWith: case FT.FuncContains: case FT.FuncSubstringBefore: case FT.FuncSubstringAfter: case FT.FuncSubstring: case FT.FuncStringLength: case FT.FuncNormalize: case FT.FuncTranslate: return new StringFunctions(root.TypeOfFunction, ProcessArguments(root.ArgumentList, out props)); case FT.FuncNumber: case FT.FuncSum: case FT.FuncFloor: case FT.FuncCeiling: case FT.FuncRound: if (root.ArgumentList != null && root.ArgumentList.Count > 0) { return new NumberFunctions(root.TypeOfFunction, ProcessNode((AstNode)root.ArgumentList[0], Flags.None, out props) ); } else { return new NumberFunctions(Function.FunctionType.FuncNumber, null); } case FT.FuncTrue: case FT.FuncFalse: return new BooleanFunctions(root.TypeOfFunction, null); case FT.FuncNot: case FT.FuncLang: case FT.FuncBoolean: return new BooleanFunctions(root.TypeOfFunction, ProcessNode((AstNode)root.ArgumentList[0], Flags.None, out props) ); case FT.FuncUserDefined: _needContext = true; if (!_allowCurrent && root.Name == "current" && root.Prefix.Length == 0) { throw XPathException.Create(SR.Xp_CurrentNotAllowed); } if (!_allowKey && root.Name == "key" && root.Prefix.Length == 0) { throw XPathException.Create(SR.Xp_InvalidKeyPattern, _query); } qy = new FunctionQuery(root.Prefix, root.Name, ProcessArguments(root.ArgumentList, out props)); props |= Props.NonFlat; return qy; default: throw XPathException.Create(SR.Xp_NotSupported, _query); } } List<Query> ProcessArguments(List<AstNode> args, out Props props) { int numArgs = args != null ? args.Count : 0; List<Query> argList = new List<Query>(numArgs); props = Props.None; for (int count = 0; count < numArgs; count++) { Props argProps; argList.Add(ProcessNode((AstNode)args[count], Flags.None, out argProps)); props |= argProps; } return argList; } private int _parseDepth = 0; private const int MaxParseDepth = 1024; private Query ProcessNode(AstNode root, Flags flags, out Props props) { if (++_parseDepth > MaxParseDepth) { throw XPathException.Create(SR.Xp_QueryTooComplex); } Debug.Assert(root != null, "root != null"); Query result = null; props = Props.None; switch (root.Type) { case AstNode.AstType.Axis: result = ProcessAxis((Axis)root, flags, out props); break; case AstNode.AstType.Operator: result = ProcessOperator((Operator)root, out props); break; case AstNode.AstType.Filter: result = ProcessFilter((Filter)root, flags, out props); break; case AstNode.AstType.ConstantOperand: result = new OperandQuery(((Operand)root).OperandValue); break; case AstNode.AstType.Variable: result = ProcessVariable((Variable)root); break; case AstNode.AstType.Function: result = ProcessFunction((Function)root, out props); break; case AstNode.AstType.Group: result = new GroupQuery(ProcessNode(((Group)root).GroupNode, Flags.None, out props)); break; case AstNode.AstType.Root: result = new AbsoluteQuery(); break; default: Debug.Fail("Unknown QueryType encountered!!"); break; } --_parseDepth; return result; } private Query Build(AstNode root, string query) { Reset(); Props props; _query = query; Query result = ProcessNode(root, Flags.None, out props); return result; } internal Query Build(string query, bool allowVar, bool allowKey) { _allowVar = allowVar; _allowKey = allowKey; _allowCurrent = true; return Build(XPathParser.ParseXPathExpression(query), query); } internal Query Build(string query, out bool needContext) { Query result = Build(query, true, true); needContext = _needContext; return result; } internal Query BuildPatternQuery(string query, bool allowVar, bool allowKey) { _allowVar = allowVar; _allowKey = allowKey; _allowCurrent = false; return Build(XPathParser.ParseXPathPattern(query), query); } internal Query BuildPatternQuery(string query, out bool needContext) { Query result = BuildPatternQuery(query, true, true); needContext = _needContext; return result; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Sockets; using System.Reflection; using System.Threading; using log4net; using Nini.Config; using OpenMetaverse.Packets; using OpenSim.Framework; using OpenSim.Framework.Statistics; using OpenSim.Region.Framework.Scenes; using OpenMetaverse; using TokenBucket = OpenSim.Region.ClientStack.LindenUDP.TokenBucket; namespace OpenSim.Region.ClientStack.LindenUDP { /// <summary> /// A shim around LLUDPServer that implements the IClientNetworkServer interface /// </summary> public sealed class LLUDPServerShim : IClientNetworkServer { LLUDPServer m_udpServer; public LLUDPServerShim() { } public void Initialise(IPAddress listenIP, ref uint port, int proxyPortOffsetParm, bool allow_alternate_port, IConfigSource configSource, AgentCircuitManager circuitManager) { m_udpServer = new LLUDPServer(listenIP, ref port, proxyPortOffsetParm, allow_alternate_port, configSource, circuitManager); } public void NetworkStop() { m_udpServer.Stop(); } public void AddScene(IScene scene) { m_udpServer.AddScene(scene); } public bool HandlesRegion(Location x) { return m_udpServer.HandlesRegion(x); } public void Start() { m_udpServer.Start(); } public void Stop() { m_udpServer.Stop(); } } /// <summary> /// The LLUDP server for a region. This handles incoming and outgoing /// packets for all UDP connections to the region /// </summary> public class LLUDPServer : OpenSimUDPBase { /// <summary>Maximum transmission unit, or UDP packet size, for the LLUDP protocol</summary> public const int MTU = 1400; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary>The measured resolution of Environment.TickCount</summary> public readonly float TickCountResolution; /// <summary>Number of prim updates to put on the queue each time the /// OnQueueEmpty event is triggered for updates</summary> public readonly int PrimUpdatesPerCallback; /// <summary>Number of texture packets to put on the queue each time the /// OnQueueEmpty event is triggered for textures</summary> public readonly int TextureSendLimit; /// <summary>Handlers for incoming packets</summary> //PacketEventDictionary packetEvents = new PacketEventDictionary(); /// <summary>Incoming packets that are awaiting handling</summary> private OpenMetaverse.BlockingQueue<IncomingPacket> packetInbox = new OpenMetaverse.BlockingQueue<IncomingPacket>(); /// <summary></summary> //private UDPClientCollection m_clients = new UDPClientCollection(); /// <summary>Bandwidth throttle for this UDP server</summary> protected TokenBucket m_throttle; /// <summary>Bandwidth throttle rates for this UDP server</summary> protected ThrottleRates m_throttleRates; /// <summary>Manages authentication for agent circuits</summary> private AgentCircuitManager m_circuitManager; /// <summary>Reference to the scene this UDP server is attached to</summary> protected Scene m_scene; /// <summary>The X/Y coordinates of the scene this UDP server is attached to</summary> private Location m_location; /// <summary>The size of the receive buffer for the UDP socket. This value /// is passed up to the operating system and used in the system networking /// stack. Use zero to leave this value as the default</summary> private int m_recvBufferSize; /// <summary>Flag to process packets asynchronously or synchronously</summary> private bool m_asyncPacketHandling; /// <summary>Tracks whether or not a packet was sent each round so we know /// whether or not to sleep</summary> private bool m_packetSent; /// <summary>Environment.TickCount of the last time that packet stats were reported to the scene</summary> private int m_elapsedMSSinceLastStatReport = 0; /// <summary>Environment.TickCount of the last time the outgoing packet handler executed</summary> private int m_tickLastOutgoingPacketHandler; /// <summary>Keeps track of the number of elapsed milliseconds since the last time the outgoing packet handler looped</summary> private int m_elapsedMSOutgoingPacketHandler; /// <summary>Keeps track of the number of 100 millisecond periods elapsed in the outgoing packet handler executed</summary> private int m_elapsed100MSOutgoingPacketHandler; /// <summary>Keeps track of the number of 500 millisecond periods elapsed in the outgoing packet handler executed</summary> private int m_elapsed500MSOutgoingPacketHandler; /// <summary>Flag to signal when clients should check for resends</summary> private bool m_resendUnacked; /// <summary>Flag to signal when clients should send ACKs</summary> private bool m_sendAcks; /// <summary>Flag to signal when clients should send pings</summary> private bool m_sendPing; private int m_defaultRTO = 0; private int m_maxRTO = 0; private bool m_disableFacelights = false; public Socket Server { get { return null; } } public LLUDPServer(IPAddress listenIP, ref uint port, int proxyPortOffsetParm, bool allow_alternate_port, IConfigSource configSource, AgentCircuitManager circuitManager) : base(listenIP, (int)port) { #region Environment.TickCount Measurement // Measure the resolution of Environment.TickCount TickCountResolution = 0f; for (int i = 0; i < 5; i++) { int start = Environment.TickCount; int now = start; while (now == start) now = Environment.TickCount; TickCountResolution += (float)(now - start) * 0.2f; } m_log.Info("[LLUDPSERVER]: Average Environment.TickCount resolution: " + TickCountResolution + "ms"); TickCountResolution = (float)Math.Ceiling(TickCountResolution); #endregion Environment.TickCount Measurement m_circuitManager = circuitManager; int sceneThrottleBps = 0; IConfig config = configSource.Configs["ClientStack.LindenUDP"]; if (config != null) { m_asyncPacketHandling = config.GetBoolean("async_packet_handling", true); m_recvBufferSize = config.GetInt("client_socket_rcvbuf_size", 0); sceneThrottleBps = config.GetInt("scene_throttle_max_bps", 0); PrimUpdatesPerCallback = config.GetInt("PrimUpdatesPerCallback", 100); TextureSendLimit = config.GetInt("TextureSendLimit", 20); m_defaultRTO = config.GetInt("DefaultRTO", 0); m_maxRTO = config.GetInt("MaxRTO", 0); m_disableFacelights = config.GetBoolean("DisableFacelights", false); } else { PrimUpdatesPerCallback = 100; TextureSendLimit = 20; } #region BinaryStats config = configSource.Configs["Statistics.Binary"]; m_shouldCollectStats = false; if (config != null) { if (config.Contains("enabled") && config.GetBoolean("enabled")) { if (config.Contains("collect_packet_headers")) m_shouldCollectStats = config.GetBoolean("collect_packet_headers"); if (config.Contains("packet_headers_period_seconds")) { binStatsMaxFilesize = TimeSpan.FromSeconds(config.GetInt("region_stats_period_seconds")); } if (config.Contains("stats_dir")) { binStatsDir = config.GetString("stats_dir"); } } else { m_shouldCollectStats = false; } } #endregion BinaryStats m_throttle = new TokenBucket(null, sceneThrottleBps, sceneThrottleBps); m_throttleRates = new ThrottleRates(configSource); } public void Start() { if (m_scene == null) throw new InvalidOperationException("[LLUDPSERVER]: Cannot LLUDPServer.Start() without an IScene reference"); m_log.Info("[LLUDPSERVER]: Starting the LLUDP server in " + (m_asyncPacketHandling ? "asynchronous" : "synchronous") + " mode"); base.Start(m_recvBufferSize, m_asyncPacketHandling); // Start the packet processing threads Watchdog.StartThread(IncomingPacketHandler, "Incoming Packets (" + m_scene.RegionInfo.RegionName + ")", ThreadPriority.Normal, false); Watchdog.StartThread(OutgoingPacketHandler, "Outgoing Packets (" + m_scene.RegionInfo.RegionName + ")", ThreadPriority.Normal, false); m_elapsedMSSinceLastStatReport = Environment.TickCount; } public new void Stop() { m_log.Info("[LLUDPSERVER]: Shutting down the LLUDP server for " + m_scene.RegionInfo.RegionName); base.Stop(); } public void AddScene(IScene scene) { if (m_scene != null) { m_log.Error("[LLUDPSERVER]: AddScene() called on an LLUDPServer that already has a scene"); return; } if (!(scene is Scene)) { m_log.Error("[LLUDPSERVER]: AddScene() called with an unrecognized scene type " + scene.GetType()); return; } m_scene = (Scene)scene; m_location = new Location(m_scene.RegionInfo.RegionHandle); } public bool HandlesRegion(Location x) { return x == m_location; } public void BroadcastPacket(Packet packet, ThrottleOutPacketType category, bool sendToPausedAgents, bool allowSplitting) { // CoarseLocationUpdate and AvatarGroupsReply packets cannot be split in an automated way if ((packet.Type == PacketType.CoarseLocationUpdate || packet.Type == PacketType.AvatarGroupsReply) && allowSplitting) allowSplitting = false; if (allowSplitting && packet.HasVariableBlocks) { byte[][] datas = packet.ToBytesMultiple(); int packetCount = datas.Length; if (packetCount < 1) m_log.Error("[LLUDPSERVER]: Failed to split " + packet.Type + " with estimated length " + packet.Length); for (int i = 0; i < packetCount; i++) { byte[] data = datas[i]; m_scene.ForEachClient( delegate(IClientAPI client) { if (client is LLClientView) SendPacketData(((LLClientView)client).UDPClient, data, packet.Type, category); } ); } } else { byte[] data = packet.ToBytes(); m_scene.ForEachClient( delegate(IClientAPI client) { if (client is LLClientView) SendPacketData(((LLClientView)client).UDPClient, data, packet.Type, category); } ); } } public void SendPacket(LLUDPClient udpClient, Packet packet, ThrottleOutPacketType category, bool allowSplitting) { // CoarseLocationUpdate packets cannot be split in an automated way if (packet.Type == PacketType.CoarseLocationUpdate && allowSplitting) allowSplitting = false; if (allowSplitting && packet.HasVariableBlocks) { byte[][] datas = packet.ToBytesMultiple(); int packetCount = datas.Length; if (packetCount < 1) m_log.Error("[LLUDPSERVER]: Failed to split " + packet.Type + " with estimated length " + packet.Length); for (int i = 0; i < packetCount; i++) { byte[] data = datas[i]; SendPacketData(udpClient, data, packet.Type, category); } } else { byte[] data = packet.ToBytes(); SendPacketData(udpClient, data, packet.Type, category); } } public void SendPacketData(LLUDPClient udpClient, byte[] data, PacketType type, ThrottleOutPacketType category) { int dataLength = data.Length; bool doZerocode = (data[0] & Helpers.MSG_ZEROCODED) != 0; bool doCopy = true; // Frequency analysis of outgoing packet sizes shows a large clump of packets at each end of the spectrum. // The vast majority of packets are less than 200 bytes, although due to asset transfers and packet splitting // there are a decent number of packets in the 1000-1140 byte range. We allocate one of two sizes of data here // to accomodate for both common scenarios and provide ample room for ACK appending in both int bufferSize = (dataLength > 180) ? LLUDPServer.MTU : 200; UDPPacketBuffer buffer = new UDPPacketBuffer(udpClient.RemoteEndPoint, bufferSize); // Zerocode if needed if (doZerocode) { try { dataLength = Helpers.ZeroEncode(data, dataLength, buffer.Data); doCopy = false; } catch (IndexOutOfRangeException) { // The packet grew larger than the bufferSize while zerocoding. // Remove the MSG_ZEROCODED flag and send the unencoded data // instead m_log.Debug("[LLUDPSERVER]: Packet exceeded buffer size during zerocoding for " + type + ". DataLength=" + dataLength + " and BufferLength=" + buffer.Data.Length + ". Removing MSG_ZEROCODED flag"); data[0] = (byte)(data[0] & ~Helpers.MSG_ZEROCODED); } } // If the packet data wasn't already copied during zerocoding, copy it now if (doCopy) { if (dataLength <= buffer.Data.Length) { Buffer.BlockCopy(data, 0, buffer.Data, 0, dataLength); } else { bufferSize = dataLength; buffer = new UDPPacketBuffer(udpClient.RemoteEndPoint, bufferSize); // m_log.Error("[LLUDPSERVER]: Packet exceeded buffer size! This could be an indication of packet assembly not obeying the MTU. Type=" + // type + ", DataLength=" + dataLength + ", BufferLength=" + buffer.Data.Length + ". Dropping packet"); Buffer.BlockCopy(data, 0, buffer.Data, 0, dataLength); } } buffer.DataLength = dataLength; #region Queue or Send OutgoingPacket outgoingPacket = new OutgoingPacket(udpClient, buffer, category); if (!outgoingPacket.Client.EnqueueOutgoing(outgoingPacket)) SendPacketFinal(outgoingPacket); #endregion Queue or Send } public void SendAcks(LLUDPClient udpClient) { uint ack; if (udpClient.PendingAcks.Dequeue(out ack)) { List<PacketAckPacket.PacketsBlock> blocks = new List<PacketAckPacket.PacketsBlock>(); PacketAckPacket.PacketsBlock block = new PacketAckPacket.PacketsBlock(); block.ID = ack; blocks.Add(block); while (udpClient.PendingAcks.Dequeue(out ack)) { block = new PacketAckPacket.PacketsBlock(); block.ID = ack; blocks.Add(block); } PacketAckPacket packet = new PacketAckPacket(); packet.Header.Reliable = false; packet.Packets = blocks.ToArray(); SendPacket(udpClient, packet, ThrottleOutPacketType.Unknown, true); } } public void SendPing(LLUDPClient udpClient) { StartPingCheckPacket pc = (StartPingCheckPacket)PacketPool.Instance.GetPacket(PacketType.StartPingCheck); pc.Header.Reliable = false; pc.PingID.PingID = (byte)udpClient.CurrentPingSequence++; // We *could* get OldestUnacked, but it would hurt performance and not provide any benefit pc.PingID.OldestUnacked = 0; SendPacket(udpClient, pc, ThrottleOutPacketType.Unknown, false); } public void CompletePing(LLUDPClient udpClient, byte pingID) { CompletePingCheckPacket completePing = new CompletePingCheckPacket(); completePing.PingID.PingID = pingID; SendPacket(udpClient, completePing, ThrottleOutPacketType.Unknown, false); } public void ResendUnacked(LLUDPClient udpClient) { if (!udpClient.IsConnected) return; // Disconnect an agent if no packets are received for some time //FIXME: Make 60 an .ini setting if ((Environment.TickCount & Int32.MaxValue) - udpClient.TickLastPacketReceived > 1000 * 60) { m_log.Warn("[LLUDPSERVER]: Ack timeout, disconnecting " + udpClient.AgentID); RemoveClient(udpClient); return; } // Get a list of all of the packets that have been sitting unacked longer than udpClient.RTO List<OutgoingPacket> expiredPackets = udpClient.NeedAcks.GetExpiredPackets(udpClient.RTO); if (expiredPackets != null) { //m_log.Debug("[LLUDPSERVER]: Resending " + expiredPackets.Count + " packets to " + udpClient.AgentID + ", RTO=" + udpClient.RTO); // Exponential backoff of the retransmission timeout udpClient.BackoffRTO(); // Resend packets for (int i = 0; i < expiredPackets.Count; i++) { OutgoingPacket outgoingPacket = expiredPackets[i]; //m_log.DebugFormat("[LLUDPSERVER]: Resending packet #{0} (attempt {1}), {2}ms have passed", // outgoingPacket.SequenceNumber, outgoingPacket.ResendCount, Environment.TickCount - outgoingPacket.TickCount); // Set the resent flag outgoingPacket.Buffer.Data[0] = (byte)(outgoingPacket.Buffer.Data[0] | Helpers.MSG_RESENT); outgoingPacket.Category = ThrottleOutPacketType.Resend; // Bump up the resend count on this packet Interlocked.Increment(ref outgoingPacket.ResendCount); //Interlocked.Increment(ref Stats.ResentPackets); // Requeue or resend the packet if (!outgoingPacket.Client.EnqueueOutgoing(outgoingPacket)) SendPacketFinal(outgoingPacket); } } } public void Flush(LLUDPClient udpClient) { // FIXME: Implement? } /// <summary> /// Actually send a packet to a client. /// </summary> /// <param name="outgoingPacket"></param> internal void SendPacketFinal(OutgoingPacket outgoingPacket) { UDPPacketBuffer buffer = outgoingPacket.Buffer; byte flags = buffer.Data[0]; bool isResend = (flags & Helpers.MSG_RESENT) != 0; bool isReliable = (flags & Helpers.MSG_RELIABLE) != 0; bool isZerocoded = (flags & Helpers.MSG_ZEROCODED) != 0; LLUDPClient udpClient = outgoingPacket.Client; if (!udpClient.IsConnected) return; #region ACK Appending int dataLength = buffer.DataLength; // NOTE: I'm seeing problems with some viewers when ACKs are appended to zerocoded packets so I've disabled that here if (!isZerocoded) { // Keep appending ACKs until there is no room left in the buffer or there are // no more ACKs to append uint ackCount = 0; uint ack; while (dataLength + 5 < buffer.Data.Length && udpClient.PendingAcks.Dequeue(out ack)) { Utils.UIntToBytesBig(ack, buffer.Data, dataLength); dataLength += 4; ++ackCount; } if (ackCount > 0) { // Set the last byte of the packet equal to the number of appended ACKs buffer.Data[dataLength++] = (byte)ackCount; // Set the appended ACKs flag on this packet buffer.Data[0] = (byte)(buffer.Data[0] | Helpers.MSG_APPENDED_ACKS); } } buffer.DataLength = dataLength; #endregion ACK Appending #region Sequence Number Assignment if (!isResend) { // Not a resend, assign a new sequence number uint sequenceNumber = (uint)Interlocked.Increment(ref udpClient.CurrentSequence); Utils.UIntToBytesBig(sequenceNumber, buffer.Data, 1); outgoingPacket.SequenceNumber = sequenceNumber; if (isReliable) { // Add this packet to the list of ACK responses we are waiting on from the server udpClient.NeedAcks.Add(outgoingPacket); } } #endregion Sequence Number Assignment // Stats tracking Interlocked.Increment(ref udpClient.PacketsSent); if (isReliable) Interlocked.Add(ref udpClient.UnackedBytes, outgoingPacket.Buffer.DataLength); // Put the UDP payload on the wire AsyncBeginSend(buffer); // Keep track of when this packet was sent out (right now) outgoingPacket.TickCount = Environment.TickCount & Int32.MaxValue; } protected override void PacketReceived(UDPPacketBuffer buffer) { // Debugging/Profiling //try { Thread.CurrentThread.Name = "PacketReceived (" + m_scene.RegionInfo.RegionName + ")"; } //catch (Exception) { } LLUDPClient udpClient = null; Packet packet = null; int packetEnd = buffer.DataLength - 1; IPEndPoint address = (IPEndPoint)buffer.RemoteEndPoint; #region Decoding try { packet = Packet.BuildPacket(buffer.Data, ref packetEnd, // Only allocate a buffer for zerodecoding if the packet is zerocoded ((buffer.Data[0] & Helpers.MSG_ZEROCODED) != 0) ? new byte[4096] : null); } catch (MalformedDataException) { } // Fail-safe check if (packet == null) { m_log.ErrorFormat("[LLUDPSERVER]: Malformed data, cannot parse {0} byte packet from {1}:", buffer.DataLength, buffer.RemoteEndPoint); m_log.Error(Utils.BytesToHexString(buffer.Data, buffer.DataLength, null)); return; } #endregion Decoding #region Packet to Client Mapping // UseCircuitCode handling if (packet.Type == PacketType.UseCircuitCode) { object[] array = new object[] { buffer, packet }; if (m_asyncPacketHandling) Util.FireAndForget(HandleUseCircuitCode, array); else HandleUseCircuitCode(array); return; } // Determine which agent this packet came from IClientAPI client; if (!m_scene.TryGetClient(address, out client) || !(client is LLClientView)) { //m_log.Debug("[LLUDPSERVER]: Received a " + packet.Type + " packet from an unrecognized source: " + address + " in " + m_scene.RegionInfo.RegionName); return; } udpClient = ((LLClientView)client).UDPClient; if (!udpClient.IsConnected) return; #endregion Packet to Client Mapping // Stats tracking Interlocked.Increment(ref udpClient.PacketsReceived); int now = Environment.TickCount & Int32.MaxValue; udpClient.TickLastPacketReceived = now; #region ACK Receiving // Handle appended ACKs if (packet.Header.AppendedAcks && packet.Header.AckList != null) { for (int i = 0; i < packet.Header.AckList.Length; i++) udpClient.NeedAcks.Remove(packet.Header.AckList[i], now, packet.Header.Resent); } // Handle PacketAck packets if (packet.Type == PacketType.PacketAck) { PacketAckPacket ackPacket = (PacketAckPacket)packet; for (int i = 0; i < ackPacket.Packets.Length; i++) udpClient.NeedAcks.Remove(ackPacket.Packets[i].ID, now, packet.Header.Resent); // We don't need to do anything else with PacketAck packets return; } #endregion ACK Receiving #region ACK Sending if (packet.Header.Reliable) { udpClient.PendingAcks.Enqueue(packet.Header.Sequence); // This is a somewhat odd sequence of steps to pull the client.BytesSinceLastACK value out, // add the current received bytes to it, test if 2*MTU bytes have been sent, if so remove // 2*MTU bytes from the value and send ACKs, and finally add the local value back to // client.BytesSinceLastACK. Lockless thread safety int bytesSinceLastACK = Interlocked.Exchange(ref udpClient.BytesSinceLastACK, 0); bytesSinceLastACK += buffer.DataLength; if (bytesSinceLastACK > LLUDPServer.MTU * 2) { bytesSinceLastACK -= LLUDPServer.MTU * 2; SendAcks(udpClient); } Interlocked.Add(ref udpClient.BytesSinceLastACK, bytesSinceLastACK); } #endregion ACK Sending #region Incoming Packet Accounting // Check the archive of received reliable packet IDs to see whether we already received this packet if (packet.Header.Reliable && !udpClient.PacketArchive.TryEnqueue(packet.Header.Sequence)) { if (packet.Header.Resent) m_log.DebugFormat( "[LLUDPSERVER]: Received a resend of already processed packet #{0}, type {1} from {2}", packet.Header.Sequence, packet.Type, client.Name); else m_log.WarnFormat( "[LLUDPSERVER]: Received a duplicate (not marked as resend) of packet #{0}, type {1} from {2}", packet.Header.Sequence, packet.Type, client.Name); // Avoid firing a callback twice for the same packet return; } #endregion Incoming Packet Accounting #region BinaryStats LogPacketHeader(true, udpClient.CircuitCode, 0, packet.Type, (ushort)packet.Length); #endregion BinaryStats #region Ping Check Handling if (packet.Type == PacketType.StartPingCheck) { // We don't need to do anything else with ping checks StartPingCheckPacket startPing = (StartPingCheckPacket)packet; CompletePing(udpClient, startPing.PingID.PingID); if ((Environment.TickCount - m_elapsedMSSinceLastStatReport) >= 3000) { udpClient.SendPacketStats(); m_elapsedMSSinceLastStatReport = Environment.TickCount; } return; } else if (packet.Type == PacketType.CompletePingCheck) { // We don't currently track client ping times return; } #endregion Ping Check Handling // Inbox insertion packetInbox.Enqueue(new IncomingPacket(udpClient, packet)); } #region BinaryStats public class PacketLogger { public DateTime StartTime; public string Path = null; public System.IO.BinaryWriter Log = null; } public static PacketLogger PacketLog; protected static bool m_shouldCollectStats = false; // Number of seconds to log for static TimeSpan binStatsMaxFilesize = TimeSpan.FromSeconds(300); static object binStatsLogLock = new object(); static string binStatsDir = ""; public static void LogPacketHeader(bool incoming, uint circuit, byte flags, PacketType packetType, ushort size) { if (!m_shouldCollectStats) return; // Binary logging format is TTTTTTTTCCCCFPPPSS, T=Time, C=Circuit, F=Flags, P=PacketType, S=size // Put the incoming bit into the least significant bit of the flags byte if (incoming) flags |= 0x01; else flags &= 0xFE; // Put the flags byte into the most significant bits of the type integer uint type = (uint)packetType; type |= (uint)flags << 24; // m_log.Debug("1 LogPacketHeader(): Outside lock"); lock (binStatsLogLock) { DateTime now = DateTime.Now; // m_log.Debug("2 LogPacketHeader(): Inside lock. now is " + now.Ticks); try { if (PacketLog == null || (now > PacketLog.StartTime + binStatsMaxFilesize)) { if (PacketLog != null && PacketLog.Log != null) { PacketLog.Log.Close(); } // First log file or time has expired, start writing to a new log file PacketLog = new PacketLogger(); PacketLog.StartTime = now; PacketLog.Path = (binStatsDir.Length > 0 ? binStatsDir + System.IO.Path.DirectorySeparatorChar.ToString() : "") + String.Format("packets-{0}.log", now.ToString("yyyyMMddHHmmss")); PacketLog.Log = new BinaryWriter(File.Open(PacketLog.Path, FileMode.Append, FileAccess.Write)); } // Serialize the data byte[] output = new byte[18]; Buffer.BlockCopy(BitConverter.GetBytes(now.Ticks), 0, output, 0, 8); Buffer.BlockCopy(BitConverter.GetBytes(circuit), 0, output, 8, 4); Buffer.BlockCopy(BitConverter.GetBytes(type), 0, output, 12, 4); Buffer.BlockCopy(BitConverter.GetBytes(size), 0, output, 16, 2); // Write the serialized data to disk if (PacketLog != null && PacketLog.Log != null) PacketLog.Log.Write(output); } catch (Exception ex) { m_log.Error("Packet statistics gathering failed: " + ex.Message, ex); if (PacketLog.Log != null) { PacketLog.Log.Close(); } PacketLog = null; } } } #endregion BinaryStats private void HandleUseCircuitCode(object o) { DateTime startTime = DateTime.Now; object[] array = (object[])o; UDPPacketBuffer buffer = (UDPPacketBuffer)array[0]; UseCircuitCodePacket packet = (UseCircuitCodePacket)array[1]; m_log.DebugFormat("[LLUDPSERVER]: Handling UseCircuitCode request from {0}", buffer.RemoteEndPoint); IPEndPoint remoteEndPoint = (IPEndPoint)buffer.RemoteEndPoint; // Begin the process of adding the client to the simulator AddNewClient((UseCircuitCodePacket)packet, remoteEndPoint); // Acknowledge the UseCircuitCode packet SendAckImmediate(remoteEndPoint, packet.Header.Sequence); m_log.DebugFormat( "[LLUDPSERVER]: Handling UseCircuitCode request from {0} took {1}ms", buffer.RemoteEndPoint, (DateTime.Now - startTime).Milliseconds); } private void SendAckImmediate(IPEndPoint remoteEndpoint, uint sequenceNumber) { PacketAckPacket ack = new PacketAckPacket(); ack.Header.Reliable = false; ack.Packets = new PacketAckPacket.PacketsBlock[1]; ack.Packets[0] = new PacketAckPacket.PacketsBlock(); ack.Packets[0].ID = sequenceNumber; byte[] packetData = ack.ToBytes(); int length = packetData.Length; UDPPacketBuffer buffer = new UDPPacketBuffer(remoteEndpoint, length); buffer.DataLength = length; Buffer.BlockCopy(packetData, 0, buffer.Data, 0, length); AsyncBeginSend(buffer); } private bool IsClientAuthorized(UseCircuitCodePacket useCircuitCode, out AuthenticateResponse sessionInfo) { UUID agentID = useCircuitCode.CircuitCode.ID; UUID sessionID = useCircuitCode.CircuitCode.SessionID; uint circuitCode = useCircuitCode.CircuitCode.Code; sessionInfo = m_circuitManager.AuthenticateSession(sessionID, agentID, circuitCode); return sessionInfo.Authorised; } private void AddNewClient(UseCircuitCodePacket useCircuitCode, IPEndPoint remoteEndPoint) { UUID agentID = useCircuitCode.CircuitCode.ID; UUID sessionID = useCircuitCode.CircuitCode.SessionID; uint circuitCode = useCircuitCode.CircuitCode.Code; if (m_scene.RegionStatus != RegionStatus.SlaveScene) { AuthenticateResponse sessionInfo; if (IsClientAuthorized(useCircuitCode, out sessionInfo)) { AddClient(circuitCode, agentID, sessionID, remoteEndPoint, sessionInfo); } else { // Don't create circuits for unauthorized clients m_log.WarnFormat( "[LLUDPSERVER]: Connection request for client {0} connecting with unnotified circuit code {1} from {2}", useCircuitCode.CircuitCode.ID, useCircuitCode.CircuitCode.Code, remoteEndPoint); } } else { // Slave regions don't accept new clients m_log.Debug("[LLUDPSERVER]: Slave region " + m_scene.RegionInfo.RegionName + " ignoring UseCircuitCode packet"); } } protected virtual void AddClient(uint circuitCode, UUID agentID, UUID sessionID, IPEndPoint remoteEndPoint, AuthenticateResponse sessionInfo) { // Create the LLUDPClient LLUDPClient udpClient = new LLUDPClient(this, m_throttleRates, m_throttle, circuitCode, agentID, remoteEndPoint, m_defaultRTO, m_maxRTO); IClientAPI existingClient; if (!m_scene.TryGetClient(agentID, out existingClient)) { // Create the LLClientView LLClientView client = new LLClientView(remoteEndPoint, m_scene, this, udpClient, sessionInfo, agentID, sessionID, circuitCode); client.OnLogout += LogoutHandler; client.DisableFacelights = m_disableFacelights; // Start the IClientAPI client.Start(); } else { m_log.WarnFormat("[LLUDPSERVER]: Ignoring a repeated UseCircuitCode from {0} at {1} for circuit {2}", udpClient.AgentID, remoteEndPoint, circuitCode); } } private void RemoveClient(LLUDPClient udpClient) { // Remove this client from the scene IClientAPI client; if (m_scene.TryGetClient(udpClient.AgentID, out client)) { client.IsLoggingOut = true; client.Close(); } } private void IncomingPacketHandler() { // Set this culture for the thread that incoming packets are received // on to en-US to avoid number parsing issues Culture.SetCurrentCulture(); while (base.IsRunning) { try { IncomingPacket incomingPacket = null; // HACK: This is a test to try and rate limit packet handling on Mono. // If it works, a more elegant solution can be devised if (Util.FireAndForgetCount() < 2) { //m_log.Debug("[LLUDPSERVER]: Incoming packet handler is sleeping"); Thread.Sleep(30); } if (packetInbox.Dequeue(100, ref incomingPacket)) ProcessInPacket(incomingPacket);//, incomingPacket); Util.FireAndForget(ProcessInPacket, incomingPacket); } catch (Exception ex) { m_log.Error("[LLUDPSERVER]: Error in the incoming packet handler loop: " + ex.Message, ex); } Watchdog.UpdateThread(); } if (packetInbox.Count > 0) m_log.Warn("[LLUDPSERVER]: IncomingPacketHandler is shutting down, dropping " + packetInbox.Count + " packets"); packetInbox.Clear(); Watchdog.RemoveThread(); } private void OutgoingPacketHandler() { // Set this culture for the thread that outgoing packets are sent // on to en-US to avoid number parsing issues Culture.SetCurrentCulture(); // Typecast the function to an Action<IClientAPI> once here to avoid allocating a new // Action generic every round Action<IClientAPI> clientPacketHandler = ClientOutgoingPacketHandler; while (base.IsRunning) { try { m_packetSent = false; #region Update Timers m_resendUnacked = false; m_sendAcks = false; m_sendPing = false; // Update elapsed time int thisTick = Environment.TickCount & Int32.MaxValue; if (m_tickLastOutgoingPacketHandler > thisTick) m_elapsedMSOutgoingPacketHandler += ((Int32.MaxValue - m_tickLastOutgoingPacketHandler) + thisTick); else m_elapsedMSOutgoingPacketHandler += (thisTick - m_tickLastOutgoingPacketHandler); m_tickLastOutgoingPacketHandler = thisTick; // Check for pending outgoing resends every 100ms if (m_elapsedMSOutgoingPacketHandler >= 100) { m_resendUnacked = true; m_elapsedMSOutgoingPacketHandler = 0; m_elapsed100MSOutgoingPacketHandler += 1; } // Check for pending outgoing ACKs every 500ms if (m_elapsed100MSOutgoingPacketHandler >= 5) { m_sendAcks = true; m_elapsed100MSOutgoingPacketHandler = 0; m_elapsed500MSOutgoingPacketHandler += 1; } // Send pings to clients every 5000ms if (m_elapsed500MSOutgoingPacketHandler >= 10) { m_sendPing = true; m_elapsed500MSOutgoingPacketHandler = 0; } #endregion Update Timers // Handle outgoing packets, resends, acknowledgements, and pings for each // client. m_packetSent will be set to true if a packet is sent m_scene.ForEachClient(clientPacketHandler); // If nothing was sent, sleep for the minimum amount of time before a // token bucket could get more tokens if (!m_packetSent) Thread.Sleep((int)TickCountResolution); Watchdog.UpdateThread(); } catch (Exception ex) { m_log.Error("[LLUDPSERVER]: OutgoingPacketHandler loop threw an exception: " + ex.Message, ex); } } Watchdog.RemoveThread(); } private void ClientOutgoingPacketHandler(IClientAPI client) { try { if (client is LLClientView) { LLUDPClient udpClient = ((LLClientView)client).UDPClient; if (udpClient.IsConnected) { if (m_resendUnacked) ResendUnacked(udpClient); if (m_sendAcks) SendAcks(udpClient); if (m_sendPing) SendPing(udpClient); // Dequeue any outgoing packets that are within the throttle limits if (udpClient.DequeueOutgoing()) m_packetSent = true; } } } catch (Exception ex) { m_log.Error("[LLUDPSERVER]: OutgoingPacketHandler iteration for " + client.Name + " threw an exception: " + ex.Message, ex); } } private void ProcessInPacket(object state) { IncomingPacket incomingPacket = (IncomingPacket)state; Packet packet = incomingPacket.Packet; LLUDPClient udpClient = incomingPacket.Client; IClientAPI client; // Sanity check if (packet == null || udpClient == null) { m_log.WarnFormat("[LLUDPSERVER]: Processing a packet with incomplete state. Packet=\"{0}\", UDPClient=\"{1}\"", packet, udpClient); } // Make sure this client is still alive if (m_scene.TryGetClient(udpClient.AgentID, out client)) { try { // Process this packet client.ProcessInPacket(packet); } catch (ThreadAbortException) { // If something is trying to abort the packet processing thread, take that as a hint that it's time to shut down m_log.Info("[LLUDPSERVER]: Caught a thread abort, shutting down the LLUDP server"); Stop(); } catch (Exception e) { // Don't let a failure in an individual client thread crash the whole sim. m_log.ErrorFormat("[LLUDPSERVER]: Client packet handler for {0} for packet {1} threw an exception", udpClient.AgentID, packet.Type); m_log.Error(e.Message, e); } } else { m_log.DebugFormat("[LLUDPSERVER]: Dropping incoming {0} packet for dead client {1}", packet.Type, udpClient.AgentID); } } protected void LogoutHandler(IClientAPI client) { client.SendLogoutPacket(); if (client.IsActive) RemoveClient(((LLClientView)client).UDPClient); } } }
//------------------------------------------------------------------------------ // <copyright file="HttpContextWrapper2.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Web { using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Runtime.CompilerServices; using System.Security.Principal; using System.Threading.Tasks; using System.Web.Caching; using System.Web.Configuration; using System.Web.Instrumentation; using System.Web.Profile; using System.Web.SessionState; using System.Web.WebSockets; [TypeForwardedFrom("System.Web.Abstractions, Version=3.5.0.0, Culture=Neutral, PublicKeyToken=31bf3856ad364e35")] public class HttpContextWrapper : HttpContextBase { private readonly HttpContext _context; public HttpContextWrapper(HttpContext httpContext) { if (httpContext == null) { throw new ArgumentNullException("httpContext"); } _context = httpContext; } public override ISubscriptionToken AddOnRequestCompleted(Action<HttpContextBase> callback) { return _context.AddOnRequestCompleted(WrapCallback(callback)); } public override Exception[] AllErrors { get { return _context.AllErrors; } } [EditorBrowsable(EditorBrowsableState.Advanced)] public override bool AllowAsyncDuringSyncStages { get { return _context.AllowAsyncDuringSyncStages; } set { _context.AllowAsyncDuringSyncStages = value; } } public override HttpApplicationStateBase Application { get { return new HttpApplicationStateWrapper(_context.Application); } } // public override HttpApplication ApplicationInstance { get { return _context.ApplicationInstance; } set { _context.ApplicationInstance = value; } } public override AsyncPreloadModeFlags AsyncPreloadMode { get { return _context.AsyncPreloadMode; } set { _context.AsyncPreloadMode = value; } } // public override Cache Cache { get { return _context.Cache; } } public override IHttpHandler CurrentHandler { get { return _context.CurrentHandler; } } public override RequestNotification CurrentNotification { get { return _context.CurrentNotification; } } public override Exception Error { get { return _context.Error; } } public override IHttpHandler Handler { get { return _context.Handler; } set { _context.Handler = value; } } public override bool IsCustomErrorEnabled { get { return _context.IsCustomErrorEnabled; } } public override bool IsDebuggingEnabled { get { return _context.IsDebuggingEnabled; } } public override bool IsPostNotification { get { return _context.IsPostNotification; } } public override bool IsWebSocketRequest { get { return _context.IsWebSocketRequest; } } public override bool IsWebSocketRequestUpgrading { get { return _context.IsWebSocketRequestUpgrading; } } public override IDictionary Items { get { return _context.Items; } } public override PageInstrumentationService PageInstrumentation { get { return _context.PageInstrumentation; } } public override IHttpHandler PreviousHandler { get { return _context.PreviousHandler; } } // public override ProfileBase Profile { get { return _context.Profile; } } public override HttpRequestBase Request { get { return new HttpRequestWrapper(_context.Request); } } public override HttpResponseBase Response { get { return new HttpResponseWrapper(_context.Response); } } public override HttpServerUtilityBase Server { get { return new HttpServerUtilityWrapper(_context.Server); } } public override HttpSessionStateBase Session { get { HttpSessionState session = _context.Session; return (session != null) ? new HttpSessionStateWrapper(session) : null; } } public override bool SkipAuthorization { get { return _context.SkipAuthorization; } set { _context.SkipAuthorization = value; } } public override DateTime Timestamp { get { return _context.Timestamp; } } public override bool ThreadAbortOnTimeout { get { return _context.ThreadAbortOnTimeout; } set { _context.ThreadAbortOnTimeout = value; } } // public override TraceContext Trace { get { return _context.Trace; } } public override IPrincipal User { get { return _context.User; } set { _context.User = value; } } public override string WebSocketNegotiatedProtocol { get { return _context.WebSocketNegotiatedProtocol; } } public override IList<string> WebSocketRequestedProtocols { get { return _context.WebSocketRequestedProtocols; } } public override void AcceptWebSocketRequest(Func<AspNetWebSocketContext, Task> userFunc) { _context.AcceptWebSocketRequest(userFunc); } public override void AcceptWebSocketRequest(Func<AspNetWebSocketContext, Task> userFunc, AspNetWebSocketOptions options) { _context.AcceptWebSocketRequest(userFunc, options); } public override void AddError(Exception errorInfo) { _context.AddError(errorInfo); } public override void ClearError() { _context.ClearError(); } public override ISubscriptionToken DisposeOnPipelineCompleted(IDisposable target) { return _context.DisposeOnPipelineCompleted(target); } [SuppressMessage("Microsoft.Globalization", "CA1304:SpecifyCultureInfo", Justification = "Matches HttpContext class")] public override object GetGlobalResourceObject(string classKey, string resourceKey) { return HttpContext.GetGlobalResourceObject(classKey, resourceKey); } public override object GetGlobalResourceObject(string classKey, string resourceKey, CultureInfo culture) { return HttpContext.GetGlobalResourceObject(classKey, resourceKey, culture); } [SuppressMessage("Microsoft.Globalization", "CA1304:SpecifyCultureInfo", Justification = "Matches HttpContext class")] public override object GetLocalResourceObject(string virtualPath, string resourceKey) { return HttpContext.GetLocalResourceObject(virtualPath, resourceKey); } public override object GetLocalResourceObject(string virtualPath, string resourceKey, CultureInfo culture) { return HttpContext.GetLocalResourceObject(virtualPath, resourceKey, culture); } public override object GetSection(string sectionName) { return _context.GetSection(sectionName); } public override void RemapHandler(IHttpHandler handler) { _context.RemapHandler(handler); } public override void RewritePath(string path) { _context.RewritePath(path); } public override void RewritePath(string path, bool rebaseClientPath) { _context.RewritePath(path, rebaseClientPath); } public override void RewritePath(string filePath, string pathInfo, string queryString) { _context.RewritePath(filePath, pathInfo, queryString); } public override void RewritePath(string filePath, string pathInfo, string queryString, bool setClientFilePath) { _context.RewritePath(filePath, pathInfo, queryString, setClientFilePath); } public override void SetSessionStateBehavior(SessionStateBehavior sessionStateBehavior) { _context.SetSessionStateBehavior(sessionStateBehavior); } public override object GetService(Type serviceType) { return ((IServiceProvider)_context).GetService(serviceType); } internal static Action<HttpContext> WrapCallback(Action<HttpContextBase> callback) { if (callback != null) { return context => callback(new HttpContextWrapper(context)); } else { return null; } } } }
/* * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter; namespace com.ultracart.admin.v2.Model { /// <summary> /// ItemShippingMethod /// </summary> [DataContract] public partial class ItemShippingMethod : IEquatable<ItemShippingMethod>, IValidatableObject { /// <summary> /// Shipping method validity /// </summary> /// <value>Shipping method validity</value> [JsonConverter(typeof(StringEnumConverter))] public enum ShippingMethodValidityEnum { /// <summary> /// Enum Invalidfor for value: invalid for /// </summary> [EnumMember(Value = "invalid for")] Invalidfor = 1, /// <summary> /// Enum Validfor for value: valid for /// </summary> [EnumMember(Value = "valid for")] Validfor = 2, /// <summary> /// Enum Validonlyfor for value: valid only for /// </summary> [EnumMember(Value = "valid only for")] Validonlyfor = 3 } /// <summary> /// Shipping method validity /// </summary> /// <value>Shipping method validity</value> [DataMember(Name="shipping_method_validity", EmitDefaultValue=false)] public ShippingMethodValidityEnum? ShippingMethodValidity { get; set; } /// <summary> /// Initializes a new instance of the <see cref="ItemShippingMethod" /> class. /// </summary> /// <param name="cost">Cost.</param> /// <param name="eachAdditionalItemMarkup">Each additional item markup.</param> /// <param name="filterToIfAvailable">Filter to this method if available.</param> /// <param name="firstItemMarkup">First item markup.</param> /// <param name="fixedShippingCost">Fixed shipping cost.</param> /// <param name="flatFeeMarkup">Flat fee markup.</param> /// <param name="freeShipping">Free shipping.</param> /// <param name="perItemFeeMarkup">Per item fee markup.</param> /// <param name="percentageMarkup">Percentage markup.</param> /// <param name="percentageOfItemMarkup">Percentage of item markup.</param> /// <param name="relaxRestrictionsOnUpsell">Relax restrictions on upsell.</param> /// <param name="shippingMethod">Shipping method name.</param> /// <param name="shippingMethodOid">Shipping method object identifier.</param> /// <param name="shippingMethodValidity">Shipping method validity.</param> /// <param name="signatureRequired">Signature required.</param> public ItemShippingMethod(decimal? cost = default(decimal?), decimal? eachAdditionalItemMarkup = default(decimal?), bool? filterToIfAvailable = default(bool?), decimal? firstItemMarkup = default(decimal?), decimal? fixedShippingCost = default(decimal?), decimal? flatFeeMarkup = default(decimal?), bool? freeShipping = default(bool?), decimal? perItemFeeMarkup = default(decimal?), decimal? percentageMarkup = default(decimal?), decimal? percentageOfItemMarkup = default(decimal?), bool? relaxRestrictionsOnUpsell = default(bool?), string shippingMethod = default(string), int? shippingMethodOid = default(int?), ShippingMethodValidityEnum? shippingMethodValidity = default(ShippingMethodValidityEnum?), bool? signatureRequired = default(bool?)) { this.Cost = cost; this.EachAdditionalItemMarkup = eachAdditionalItemMarkup; this.FilterToIfAvailable = filterToIfAvailable; this.FirstItemMarkup = firstItemMarkup; this.FixedShippingCost = fixedShippingCost; this.FlatFeeMarkup = flatFeeMarkup; this.FreeShipping = freeShipping; this.PerItemFeeMarkup = perItemFeeMarkup; this.PercentageMarkup = percentageMarkup; this.PercentageOfItemMarkup = percentageOfItemMarkup; this.RelaxRestrictionsOnUpsell = relaxRestrictionsOnUpsell; this.ShippingMethod = shippingMethod; this.ShippingMethodOid = shippingMethodOid; this.ShippingMethodValidity = shippingMethodValidity; this.SignatureRequired = signatureRequired; } /// <summary> /// Cost /// </summary> /// <value>Cost</value> [DataMember(Name="cost", EmitDefaultValue=false)] public decimal? Cost { get; set; } /// <summary> /// Each additional item markup /// </summary> /// <value>Each additional item markup</value> [DataMember(Name="each_additional_item_markup", EmitDefaultValue=false)] public decimal? EachAdditionalItemMarkup { get; set; } /// <summary> /// Filter to this method if available /// </summary> /// <value>Filter to this method if available</value> [DataMember(Name="filter_to_if_available", EmitDefaultValue=false)] public bool? FilterToIfAvailable { get; set; } /// <summary> /// First item markup /// </summary> /// <value>First item markup</value> [DataMember(Name="first_item_markup", EmitDefaultValue=false)] public decimal? FirstItemMarkup { get; set; } /// <summary> /// Fixed shipping cost /// </summary> /// <value>Fixed shipping cost</value> [DataMember(Name="fixed_shipping_cost", EmitDefaultValue=false)] public decimal? FixedShippingCost { get; set; } /// <summary> /// Flat fee markup /// </summary> /// <value>Flat fee markup</value> [DataMember(Name="flat_fee_markup", EmitDefaultValue=false)] public decimal? FlatFeeMarkup { get; set; } /// <summary> /// Free shipping /// </summary> /// <value>Free shipping</value> [DataMember(Name="free_shipping", EmitDefaultValue=false)] public bool? FreeShipping { get; set; } /// <summary> /// Per item fee markup /// </summary> /// <value>Per item fee markup</value> [DataMember(Name="per_item_fee_markup", EmitDefaultValue=false)] public decimal? PerItemFeeMarkup { get; set; } /// <summary> /// Percentage markup /// </summary> /// <value>Percentage markup</value> [DataMember(Name="percentage_markup", EmitDefaultValue=false)] public decimal? PercentageMarkup { get; set; } /// <summary> /// Percentage of item markup /// </summary> /// <value>Percentage of item markup</value> [DataMember(Name="percentage_of_item_markup", EmitDefaultValue=false)] public decimal? PercentageOfItemMarkup { get; set; } /// <summary> /// Relax restrictions on upsell /// </summary> /// <value>Relax restrictions on upsell</value> [DataMember(Name="relax_restrictions_on_upsell", EmitDefaultValue=false)] public bool? RelaxRestrictionsOnUpsell { get; set; } /// <summary> /// Shipping method name /// </summary> /// <value>Shipping method name</value> [DataMember(Name="shipping_method", EmitDefaultValue=false)] public string ShippingMethod { get; set; } /// <summary> /// Shipping method object identifier /// </summary> /// <value>Shipping method object identifier</value> [DataMember(Name="shipping_method_oid", EmitDefaultValue=false)] public int? ShippingMethodOid { get; set; } /// <summary> /// Signature required /// </summary> /// <value>Signature required</value> [DataMember(Name="signature_required", EmitDefaultValue=false)] public bool? SignatureRequired { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class ItemShippingMethod {\n"); sb.Append(" Cost: ").Append(Cost).Append("\n"); sb.Append(" EachAdditionalItemMarkup: ").Append(EachAdditionalItemMarkup).Append("\n"); sb.Append(" FilterToIfAvailable: ").Append(FilterToIfAvailable).Append("\n"); sb.Append(" FirstItemMarkup: ").Append(FirstItemMarkup).Append("\n"); sb.Append(" FixedShippingCost: ").Append(FixedShippingCost).Append("\n"); sb.Append(" FlatFeeMarkup: ").Append(FlatFeeMarkup).Append("\n"); sb.Append(" FreeShipping: ").Append(FreeShipping).Append("\n"); sb.Append(" PerItemFeeMarkup: ").Append(PerItemFeeMarkup).Append("\n"); sb.Append(" PercentageMarkup: ").Append(PercentageMarkup).Append("\n"); sb.Append(" PercentageOfItemMarkup: ").Append(PercentageOfItemMarkup).Append("\n"); sb.Append(" RelaxRestrictionsOnUpsell: ").Append(RelaxRestrictionsOnUpsell).Append("\n"); sb.Append(" ShippingMethod: ").Append(ShippingMethod).Append("\n"); sb.Append(" ShippingMethodOid: ").Append(ShippingMethodOid).Append("\n"); sb.Append(" ShippingMethodValidity: ").Append(ShippingMethodValidity).Append("\n"); sb.Append(" SignatureRequired: ").Append(SignatureRequired).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public virtual string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as ItemShippingMethod); } /// <summary> /// Returns true if ItemShippingMethod instances are equal /// </summary> /// <param name="input">Instance of ItemShippingMethod to be compared</param> /// <returns>Boolean</returns> public bool Equals(ItemShippingMethod input) { if (input == null) return false; return ( this.Cost == input.Cost || (this.Cost != null && this.Cost.Equals(input.Cost)) ) && ( this.EachAdditionalItemMarkup == input.EachAdditionalItemMarkup || (this.EachAdditionalItemMarkup != null && this.EachAdditionalItemMarkup.Equals(input.EachAdditionalItemMarkup)) ) && ( this.FilterToIfAvailable == input.FilterToIfAvailable || (this.FilterToIfAvailable != null && this.FilterToIfAvailable.Equals(input.FilterToIfAvailable)) ) && ( this.FirstItemMarkup == input.FirstItemMarkup || (this.FirstItemMarkup != null && this.FirstItemMarkup.Equals(input.FirstItemMarkup)) ) && ( this.FixedShippingCost == input.FixedShippingCost || (this.FixedShippingCost != null && this.FixedShippingCost.Equals(input.FixedShippingCost)) ) && ( this.FlatFeeMarkup == input.FlatFeeMarkup || (this.FlatFeeMarkup != null && this.FlatFeeMarkup.Equals(input.FlatFeeMarkup)) ) && ( this.FreeShipping == input.FreeShipping || (this.FreeShipping != null && this.FreeShipping.Equals(input.FreeShipping)) ) && ( this.PerItemFeeMarkup == input.PerItemFeeMarkup || (this.PerItemFeeMarkup != null && this.PerItemFeeMarkup.Equals(input.PerItemFeeMarkup)) ) && ( this.PercentageMarkup == input.PercentageMarkup || (this.PercentageMarkup != null && this.PercentageMarkup.Equals(input.PercentageMarkup)) ) && ( this.PercentageOfItemMarkup == input.PercentageOfItemMarkup || (this.PercentageOfItemMarkup != null && this.PercentageOfItemMarkup.Equals(input.PercentageOfItemMarkup)) ) && ( this.RelaxRestrictionsOnUpsell == input.RelaxRestrictionsOnUpsell || (this.RelaxRestrictionsOnUpsell != null && this.RelaxRestrictionsOnUpsell.Equals(input.RelaxRestrictionsOnUpsell)) ) && ( this.ShippingMethod == input.ShippingMethod || (this.ShippingMethod != null && this.ShippingMethod.Equals(input.ShippingMethod)) ) && ( this.ShippingMethodOid == input.ShippingMethodOid || (this.ShippingMethodOid != null && this.ShippingMethodOid.Equals(input.ShippingMethodOid)) ) && ( this.ShippingMethodValidity == input.ShippingMethodValidity || (this.ShippingMethodValidity != null && this.ShippingMethodValidity.Equals(input.ShippingMethodValidity)) ) && ( this.SignatureRequired == input.SignatureRequired || (this.SignatureRequired != null && this.SignatureRequired.Equals(input.SignatureRequired)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.Cost != null) hashCode = hashCode * 59 + this.Cost.GetHashCode(); if (this.EachAdditionalItemMarkup != null) hashCode = hashCode * 59 + this.EachAdditionalItemMarkup.GetHashCode(); if (this.FilterToIfAvailable != null) hashCode = hashCode * 59 + this.FilterToIfAvailable.GetHashCode(); if (this.FirstItemMarkup != null) hashCode = hashCode * 59 + this.FirstItemMarkup.GetHashCode(); if (this.FixedShippingCost != null) hashCode = hashCode * 59 + this.FixedShippingCost.GetHashCode(); if (this.FlatFeeMarkup != null) hashCode = hashCode * 59 + this.FlatFeeMarkup.GetHashCode(); if (this.FreeShipping != null) hashCode = hashCode * 59 + this.FreeShipping.GetHashCode(); if (this.PerItemFeeMarkup != null) hashCode = hashCode * 59 + this.PerItemFeeMarkup.GetHashCode(); if (this.PercentageMarkup != null) hashCode = hashCode * 59 + this.PercentageMarkup.GetHashCode(); if (this.PercentageOfItemMarkup != null) hashCode = hashCode * 59 + this.PercentageOfItemMarkup.GetHashCode(); if (this.RelaxRestrictionsOnUpsell != null) hashCode = hashCode * 59 + this.RelaxRestrictionsOnUpsell.GetHashCode(); if (this.ShippingMethod != null) hashCode = hashCode * 59 + this.ShippingMethod.GetHashCode(); if (this.ShippingMethodOid != null) hashCode = hashCode * 59 + this.ShippingMethodOid.GetHashCode(); if (this.ShippingMethodValidity != null) hashCode = hashCode * 59 + this.ShippingMethodValidity.GetHashCode(); if (this.SignatureRequired != null) hashCode = hashCode * 59 + this.SignatureRequired.GetHashCode(); return hashCode; } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: Platform independent integer ** ** ===========================================================*/ using System.Globalization; using System.Runtime.CompilerServices; using System.Runtime.Versioning; using System.Security; namespace System { // CONTRACT with Runtime // The UIntPtr type is one of the primitives understood by the compilers and runtime // Data Contract: Single field of type void * [CLSCompliant(false)] public struct UIntPtr : IEquatable<UIntPtr> { unsafe private void* _value; [Intrinsic] public static readonly UIntPtr Zero; [Intrinsic] [NonVersionable] public unsafe UIntPtr(uint value) { _value = (void*)value; } [Intrinsic] [NonVersionable] public unsafe UIntPtr(ulong value) { #if BIT64 _value = (void*)value; #else _value = (void*)checked((uint)value); #endif } [Intrinsic] [NonVersionable] public unsafe UIntPtr(void* value) { _value = value; } [Intrinsic] [NonVersionable] public unsafe void* ToPointer() { return _value; } [Intrinsic] [NonVersionable] public unsafe uint ToUInt32() { #if BIT64 return checked((uint)_value); #else return (uint)_value; #endif } [Intrinsic] [NonVersionable] public unsafe ulong ToUInt64() { return (ulong)_value; } [Intrinsic] [NonVersionable] public static explicit operator UIntPtr(uint value) { return new UIntPtr(value); } [Intrinsic] [NonVersionable] public static explicit operator UIntPtr(ulong value) { return new UIntPtr(value); } [Intrinsic] [NonVersionable] public static unsafe explicit operator UIntPtr(void* value) { return new UIntPtr(value); } [Intrinsic] [NonVersionable] public static unsafe explicit operator void* (UIntPtr value) { return value._value; } [Intrinsic] [NonVersionable] public static unsafe explicit operator uint(UIntPtr value) { #if BIT64 return checked((uint)value._value); #else return (uint)value._value; #endif } [Intrinsic] [NonVersionable] public static unsafe explicit operator ulong(UIntPtr value) { return (ulong)value._value; } unsafe bool IEquatable<UIntPtr>.Equals(UIntPtr value) { return _value == value._value; } [Intrinsic] [NonVersionable] public static unsafe bool operator ==(UIntPtr value1, UIntPtr value2) { return value1._value == value2._value; } [Intrinsic] [NonVersionable] public static unsafe bool operator !=(UIntPtr value1, UIntPtr value2) { return value1._value != value2._value; } public static unsafe int Size { [Intrinsic] [NonVersionable] get { #if BIT64 return 8; #else return 4; #endif } } public unsafe override String ToString() { #if BIT64 return ((ulong)_value).ToString(FormatProvider.InvariantCulture); #else return ((uint)_value).ToString(FormatProvider.InvariantCulture); #endif } public unsafe override bool Equals(Object obj) { if (obj is UIntPtr) { return (_value == ((UIntPtr)obj)._value); } return false; } public unsafe override int GetHashCode() { #if BIT64 ulong l = (ulong)_value; return (unchecked((int)l) ^ (int)(l >> 32)); #else return unchecked((int)_value); #endif } [NonVersionable] public static UIntPtr Add(UIntPtr pointer, int offset) { return pointer + offset; } [Intrinsic] [NonVersionable] public static UIntPtr operator +(UIntPtr pointer, int offset) { #if BIT64 return new UIntPtr(pointer.ToUInt64() + (ulong)offset); #else return new UIntPtr(pointer.ToUInt32() + (uint)offset); #endif } [NonVersionable] public static UIntPtr Subtract(UIntPtr pointer, int offset) { return pointer - offset; } [Intrinsic] [NonVersionable] public static UIntPtr operator -(UIntPtr pointer, int offset) { #if BIT64 return new UIntPtr(pointer.ToUInt64() - (ulong)offset); #else return new UIntPtr(pointer.ToUInt32() - (uint)offset); #endif } } }
using System; using System.Windows.Forms; using System.Collections; using System.Drawing; namespace SpaceInvaders { class Game { private AlienManager aliens; private Player player; private Saucer saucer; private BulletManager bullets; private StarManager stars; private Image buffer; private Graphics bufferGraphics; private Graphics displayGraphics; private Form form; private Font font = new Font("Impact", 14); private Font largeFont = new Font("Impact", 26); private Brush fontBrush = Brushes.White; private double renderElapsed = 0d; bool saucerLaunchedThisLevel = false; private Game() {} private static Game instance = new Game(); internal static Game Instance { get { return instance; } } internal void FireBullet(Bullet bullet) { bullets.AlienBullets.Add(bullet); } internal void Initialize(Form mainForm) { this.form = mainForm; // Set up the off-screen buffer used for double-buffering buffer = new Bitmap(mainForm.Width, mainForm.Height); bufferGraphics = Graphics.FromImage(buffer); displayGraphics = mainForm.CreateGraphics(); stars = new StarManager(); } /// <summary> /// Input handler for the whole game. /// </summary> internal void OnKeyDown(object sender, KeyEventArgs e) { // The escape key will exit the application regardless // what state the game is in. if (e.KeyCode == Keys.Escape) Application.Exit(); // If game is not active if (Global.GameOver) { if (e.KeyCode == Keys.F5) startNewGame(); } else { // In game allowed keys switch (e.KeyCode) { case Keys.Left: Global.PlayerDirection = Directions.Left; break; case Keys.Right: Global.PlayerDirection = Directions.Right; break; case Keys.Down: Global.PlayerDirection = Directions.None; break; case Keys.Space: if (!player.Dead) bullets.PlayerBullets.Add(new Bullet(player.GetBulletStartLocation(), Directions.Up)); break; } } } /// <summary> /// Main game loop. /// </summary> internal void GameLoop() { DateTime start; double elapsed = 0d; while (form.Created) { start = DateTime.Now; stars.Step(elapsed); render(elapsed); Application.DoEvents(); if (!Global.GameOver) { step(elapsed); detectCollision(); } elapsed = (DateTime.Now - start).TotalMilliseconds; } } /// <summary> /// Main step method. /// </summary> private void step(double elapsed) { bullets.Step(elapsed); aliens.Step(elapsed); player.Step(elapsed); // Should a saucer be launched now? shouldSaucerLaunch(); if (saucer != null && saucer.Active) { saucer.Step(elapsed); if (!saucer.Active) saucer = null; } if (Global.PlayersRemaining == 0) { Global.GameOver = true; return; } if (Global.LevelFinished) { Global.CurrentLevel++; Global.BulletChancePerSecond *= 1.2f; Global.AlienSpeed *= 1.1f; startNewBoard(); } } /// <summary> /// Main render method. /// </summary> private void render(double elapsed) { // Shouldn't render every game loop iteration, so // throttle it down to a reasonable level renderElapsed += elapsed; if (renderElapsed < 30) return; renderElapsed = 0; bufferGraphics.Clear(Color.Black); stars.Render(bufferGraphics); // While the game is in progress if (!Global.GameOver) { aliens.Render(bufferGraphics); if (saucer != null && saucer.Active) saucer.Render(bufferGraphics); bullets.Render(bufferGraphics); player.Render(bufferGraphics); } else { // Show "Game Over" message in the center of the screen bufferGraphics.DrawString("Game Over", largeFont, fontBrush, 310, 290); bufferGraphics.DrawString("Press F5 to start a new game", font, fontBrush, 278, 350); } // Display banner bufferGraphics.DrawString("Score: " + Global.Score, font, fontBrush, 10, 10); bufferGraphics.DrawString("Level: " + Global.CurrentLevel, font, fontBrush, 630, 10); bufferGraphics.DrawString("Players: " + Global.PlayersRemaining, font, fontBrush, 710, 10); // Blit the off-screen buffer on to the display displayGraphics.DrawImage(buffer, 0, 0); } /// <summary> /// Called to check for any collisions between bullets and /// the player and/or aliens. /// </summary> private void detectCollision() { // Check to see if the player hit any aliens foreach (Bullet bullet in bullets.PlayerBullets) { if (!bullet.Active) continue; aliens.CheckForCollision(bullet); if (saucer != null && saucer.Active) saucer.CheckForCollision(bullet); } // Check to see if the aliens have hit the player if (!player.Dead) { foreach (Bullet bullet in bullets.AlienBullets) { if (!bullet.Active) continue; player.CheckForCollision(bullet); } } } private void shouldSaucerLaunch() { // Should launch a saucer only once per level, when there are 10 aliens left // in the level. if (saucer == null && !saucerLaunchedThisLevel && aliens.RemainingAliens == 10) { saucer = new Saucer(); saucerLaunchedThisLevel = true; } } /// <summary> /// Resets the game's state to start a new game /// </summary> private void startNewGame() { Global.GameOver = false; Global.Score = 0; Global.PlayersRemaining = 3; Global.CurrentLevel = 1; Global.BulletChancePerSecond = Global.BulletChancePerSecondStartValue; Global.AlienSpeed = Global.AlienSpeedStartValue; player = new Player(new Point(form.ClientSize.Width / 2 - 20, form.ClientSize.Height - 50)); startNewBoard(); } /// <summary> /// Called at the beginning of a new game and at the start /// of each new level a player reaches. Initializes bullet lists /// and places a new set of aliens at their starting positions. /// </summary> private void startNewBoard() { bullets = new BulletManager(); Global.LevelFinished = false; saucerLaunchedThisLevel = false; aliens = new AlienManager(Global.AliensPerRow, Global.AlienRows); } } }
using System.Collections.Generic; using System.Diagnostics; using System.Linq; using Microsoft.AspNetCore.Razor.Language; namespace OrchardCore.DisplayManagement.Liquid.TagHelpers { // Internal for testing internal static class RequiredAttributeParser { public static void AddRequiredAttributes(string requiredAttributes, TagMatchingRuleDescriptorBuilder ruleBuilder) { var requiredAttributeParser = new DefaultRequiredAttributeParser(requiredAttributes); requiredAttributeParser.AddRequiredAttributes(ruleBuilder); } private class DefaultRequiredAttributeParser { private const char RequiredAttributeWildcardSuffix = '*'; private static readonly IReadOnlyDictionary<char, RequiredAttributeDescriptor.ValueComparisonMode> CssValueComparisons = new Dictionary<char, RequiredAttributeDescriptor.ValueComparisonMode> { { '=', RequiredAttributeDescriptor.ValueComparisonMode.FullMatch }, { '^', RequiredAttributeDescriptor.ValueComparisonMode.PrefixMatch }, { '$', RequiredAttributeDescriptor.ValueComparisonMode.SuffixMatch } }; private static readonly char[] InvalidPlainAttributeNameCharacters = { ' ', '\t', ',', RequiredAttributeWildcardSuffix }; private static readonly char[] InvalidCssAttributeNameCharacters = (new[] { ' ', '\t', ',', ']' }) .Concat(CssValueComparisons.Keys) .ToArray(); private static readonly char[] InvalidCssQuotelessValueCharacters = { ' ', '\t', ']' }; private int _index; private string _requiredAttributes; public DefaultRequiredAttributeParser(string requiredAttributes) { _requiredAttributes = requiredAttributes; } private char Current => _requiredAttributes[_index]; private bool AtEnd => _index >= _requiredAttributes.Length; public void AddRequiredAttributes(TagMatchingRuleDescriptorBuilder ruleBuilder) { if (string.IsNullOrEmpty(_requiredAttributes)) { return; } var descriptors = new List<RequiredAttributeDescriptor>(); PassOptionalWhitespace(); do { var successfulParse = true; ruleBuilder.Attribute(attributeBuilder => { if (At('[')) { if (!TryParseCssSelector(attributeBuilder)) { successfulParse = false; return; } } else { ParsePlainSelector(attributeBuilder); } PassOptionalWhitespace(); if (At(',')) { _index++; if (!EnsureNotAtEnd(attributeBuilder)) { successfulParse = false; return; } } else if (!AtEnd) { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_InvalidRequiredAttributeCharacter(Current, _requiredAttributes); //attributeBuilder.Diagnostics.Add(diagnostic); successfulParse = false; return; } PassOptionalWhitespace(); }); if (!successfulParse) { break; } } while (!AtEnd); } private void ParsePlainSelector(RequiredAttributeDescriptorBuilder attributeBuilder) { var nameEndIndex = _requiredAttributes.IndexOfAny(InvalidPlainAttributeNameCharacters, _index); string attributeName; var nameComparison = RequiredAttributeDescriptor.NameComparisonMode.FullMatch; if (nameEndIndex == -1) { attributeName = _requiredAttributes.Substring(_index); _index = _requiredAttributes.Length; } else { attributeName = _requiredAttributes.Substring(_index, nameEndIndex - _index); _index = nameEndIndex; if (_requiredAttributes[nameEndIndex] == RequiredAttributeWildcardSuffix) { nameComparison = RequiredAttributeDescriptor.NameComparisonMode.PrefixMatch; // Move past wild card _index++; } } attributeBuilder.Name = attributeName; attributeBuilder.NameComparisonMode = nameComparison; } private void ParseCssAttributeName(RequiredAttributeDescriptorBuilder builder) { var nameStartIndex = _index; var nameEndIndex = _requiredAttributes.IndexOfAny(InvalidCssAttributeNameCharacters, _index); nameEndIndex = nameEndIndex == -1 ? _requiredAttributes.Length : nameEndIndex; _index = nameEndIndex; var attributeName = _requiredAttributes.Substring(nameStartIndex, nameEndIndex - nameStartIndex); builder.Name = attributeName; } private bool TryParseCssValueComparison(RequiredAttributeDescriptorBuilder builder, out RequiredAttributeDescriptor.ValueComparisonMode valueComparison) { Debug.Assert(!AtEnd); if (CssValueComparisons.TryGetValue(Current, out valueComparison)) { var op = Current; _index++; if (op != '=' && At('=')) { // Two length operator (ex: ^=). Move past the second piece _index++; } else if (op != '=') // We're at an incomplete operator (ex: [foo^] { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_PartialRequiredAttributeOperator(op, _requiredAttributes); //builder.Diagnostics.Add(diagnostic); return false; } } else if (!At(']')) { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_InvalidRequiredAttributeOperator(Current, _requiredAttributes); //builder.Diagnostics.Add(diagnostic); return false; } builder.ValueComparisonMode = valueComparison; return true; } private bool TryParseCssValue(RequiredAttributeDescriptorBuilder builder) { int valueStart; int valueEnd; if (At('\'') || At('"')) { var quote = Current; // Move past the quote _index++; valueStart = _index; valueEnd = _requiredAttributes.IndexOf(quote, _index); if (valueEnd == -1) { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_InvalidRequiredAttributeMismatchedQuotes(quote, _requiredAttributes); //builder.Diagnostics.Add(diagnostic); return false; } _index = valueEnd + 1; } else { valueStart = _index; var valueEndIndex = _requiredAttributes.IndexOfAny(InvalidCssQuotelessValueCharacters, _index); valueEnd = valueEndIndex == -1 ? _requiredAttributes.Length : valueEndIndex; _index = valueEnd; } var value = _requiredAttributes.Substring(valueStart, valueEnd - valueStart); builder.Value = value; return true; } private bool TryParseCssSelector(RequiredAttributeDescriptorBuilder attributeBuilder) { Debug.Assert(At('[')); // Move past '['. _index++; PassOptionalWhitespace(); ParseCssAttributeName(attributeBuilder); PassOptionalWhitespace(); if (!EnsureNotAtEnd(attributeBuilder)) { return false; } if (!TryParseCssValueComparison(attributeBuilder, out RequiredAttributeDescriptor.ValueComparisonMode valueComparison)) { return false; } PassOptionalWhitespace(); if (!EnsureNotAtEnd(attributeBuilder)) { return false; } if (valueComparison != RequiredAttributeDescriptor.ValueComparisonMode.None && !TryParseCssValue(attributeBuilder)) { return false; } PassOptionalWhitespace(); if (At(']')) { // Move past the ending bracket. _index++; return true; } else if (AtEnd) { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_CouldNotFindMatchingEndBrace(_requiredAttributes); //attributeBuilder.Diagnostics.Add(diagnostic); } else { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_InvalidRequiredAttributeCharacter(Current, _requiredAttributes); //attributeBuilder.Diagnostics.Add(diagnostic); } return false; } private bool EnsureNotAtEnd(RequiredAttributeDescriptorBuilder builder) { if (AtEnd) { //var diagnostic = RazorDiagnosticFactory.CreateTagHelper_CouldNotFindMatchingEndBrace(_requiredAttributes); //builder.Diagnostics.Add(diagnostic); return false; } return true; } private bool At(char c) { return !AtEnd && Current == c; } private void PassOptionalWhitespace() { while (!AtEnd && (Current == ' ' || Current == '\t')) { _index++; } } } } }
using System; using System.Globalization; using System.Net; using Assert = Microsoft.VisualStudio.TestTools.UnitTesting.Assert; using Orleans; using Orleans.Runtime; using Orleans.Runtime.Configuration; using Orleans.Serialization; using Orleans.TestingHost; using Orleans.TestingHost.Utils; using Xunit; using Xunit.Abstractions; namespace UnitTests.General { public class Identifiertests { private readonly ITestOutputHelper output; private static readonly Random random = new Random(); class A { } class B : A { } public Identifiertests(ITestOutputHelper output) { this.output = output; SerializationManager.InitializeForTesting(); BufferPool.InitGlobalBufferPool(new MessagingConfiguration(false)); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_IsSystem() { GrainId testGrain = Constants.DirectoryServiceId; output.WriteLine("Testing GrainID " + testGrain); Assert.IsTrue(testGrain.IsSystemTarget, "System grain ID is not flagged as a system ID"); GrainId sGrain = (GrainId)SerializationManager.DeepCopy(testGrain); output.WriteLine("Testing GrainID " + sGrain); Assert.IsTrue(sGrain.IsSystemTarget, "String round-trip grain ID is not flagged as a system ID"); Assert.AreEqual(testGrain, sGrain, "Should be equivalent GrainId object"); Assert.AreSame(testGrain, sGrain, "Should be same / intern'ed GrainId object"); ActivationId testActivation = ActivationId.GetSystemActivation(testGrain, SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 2456), 0)); output.WriteLine("Testing ActivationID " + testActivation); Assert.IsTrue(testActivation.IsSystem, "System activation ID is not flagged as a system ID"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsNullKeyExtension() { Xunit.Assert.Throws<ArgumentNullException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: null)); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsEmptyKeyExtension() { Xunit.Assert.Throws<ArgumentException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: "")); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsWhiteSpaceKeyExtension() { Xunit.Assert.Throws<ArgumentException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: " \t\n\r")); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeySerializationShouldReproduceAnIdenticalObject() { { var expected = UniqueKey.NewKey(Guid.NewGuid()); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.AreEqual(expected, actual, "UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #1)."); } { var kx = random.Next().ToString(CultureInfo.InvariantCulture); var expected = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.AreEqual(expected, actual, "UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #2)."); } { var kx = random.Next().ToString(CultureInfo.InvariantCulture) + new String('*', 400); var expected = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.AreEqual(expected, actual, "UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #3)."); } } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ParsingUniqueKeyStringificationShouldReproduceAnIdenticalObject() { UniqueKey expected1 = UniqueKey.NewKey(Guid.NewGuid()); string str1 = expected1.ToHexString(); UniqueKey actual1 = UniqueKey.Parse(str1); Assert.AreEqual(expected1, actual1, "UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 1)."); string kx3 = "case 3"; UniqueKey expected3 = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx3); string str3 = expected3.ToHexString(); UniqueKey actual3 = UniqueKey.Parse(str3); Assert.AreEqual(expected3, actual3, "UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 3)."); long pk = random.Next(); UniqueKey expected4 = UniqueKey.NewKey(pk); string str4 = expected4.ToHexString(); UniqueKey actual4 = UniqueKey.Parse(str4); Assert.AreEqual(expected4, actual4, "UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 4)."); pk = random.Next(); string kx5 = "case 5"; UniqueKey expected5 = UniqueKey.NewKey(pk, category: UniqueKey.Category.KeyExtGrain, keyExt: kx5); string str5 = expected5.ToHexString(); UniqueKey actual5 = UniqueKey.Parse(str5); Assert.AreEqual(expected5, actual5, "UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 5)."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainIdShouldEncodeAndDecodePrimaryKeyGuidCorrectly() { const int repeat = 100; for (int i = 0; i < repeat; ++i) { Guid expected = Guid.NewGuid(); GrainId grainId = GrainId.GetGrainIdForTesting(expected); Guid actual = grainId.Key.PrimaryKeyToGuid(); Assert.AreEqual(expected, actual, string.Format("Failed to encode and decode grain id with GUID {0}", expected)); } } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainId_ToFromPrintableString() { Guid guid = Guid.NewGuid(); GrainId grainId = GrainId.GetGrainIdForTesting(guid); GrainId roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Guid key"); string extKey = "Guid-ExtKey-1"; guid = Guid.NewGuid(); grainId = GrainId.GetGrainId(0, guid, extKey); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Guid key + Extended Key"); grainId = GrainId.GetGrainId(0, guid, null); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Guid key + null Extended Key"); long key = random.Next(); guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainIdForTesting(guid); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Int64 key"); extKey = "Long-ExtKey-2"; key = random.Next(); guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainId(0, guid, extKey); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Int64 key + Extended Key"); guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainId(0, guid, null); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.AreEqual(grainId, roundTripped, "GrainId.ToPrintableString -- Int64 key + null Extended Key"); } private GrainId RoundTripGrainIdToParsable(GrainId input) { string str = input.ToParsableString(); GrainId output = GrainId.FromParsableString(str); return output; } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueTypeCodeDataShouldStore32BitsOfInformation() { const int expected = unchecked((int)0xfabccbaf); var uk = UniqueKey.NewKey(0, UniqueKey.Category.None, expected); var actual = uk.BaseTypeCode; Assert.AreEqual( expected, actual, "UniqueKey.BaseTypeCode should store at least 32 bits of information."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeysShouldPreserveTheirPrimaryKeyValueIfItIsGuid() { const int all32Bits = unchecked((int)0xffffffff); var expectedKey1 = Guid.NewGuid(); const string expectedKeyExt1 = "1"; var uk1 = UniqueKey.NewKey(expectedKey1, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt1); string actualKeyExt1; var actualKey1 = uk1.PrimaryKeyToGuid(out actualKeyExt1); Assert.AreEqual( expectedKey1, actualKey1, "UniqueKey objects should preserve the value of their primary key (Guid case #1)."); Assert.AreEqual( expectedKeyExt1, actualKeyExt1, "UniqueKey objects should preserve the value of their key extension (Guid case #1)."); var expectedKey2 = Guid.NewGuid(); const string expectedKeyExt2 = "2"; var uk2 = UniqueKey.NewKey(expectedKey2, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt2); string actualKeyExt2; var actualKey2 = uk2.PrimaryKeyToGuid(out actualKeyExt2); Assert.AreEqual( expectedKey2, actualKey2, "UniqueKey objects should preserve the value of their primary key (Guid case #2)."); Assert.AreEqual( expectedKeyExt2, actualKeyExt2, "UniqueKey objects should preserve the value of their key extension (Guid case #2)."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeysShouldPreserveTheirPrimaryKeyValueIfItIsLong() { const int all32Bits = unchecked((int)0xffffffff); var n1 = random.Next(); var n2 = random.Next(); const string expectedKeyExt = "1"; var expectedKey = unchecked((long)((((ulong)((uint)n1)) << 32) | ((uint)n2))); var uk = UniqueKey.NewKey(expectedKey, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt); string actualKeyExt; var actualKey = uk.PrimaryKeyToLong(out actualKeyExt); Assert.AreEqual( expectedKey, actualKey, "UniqueKey objects should preserve the value of their primary key (long case)."); Assert.AreEqual( expectedKeyExt, actualKeyExt, "UniqueKey objects should preserve the value of their key extension (long case)."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_HashCorrectness() { // This tests that our optimized Jenkins hash computes the same value as the reference implementation int testCount = 1000; JenkinsHash jenkinsHash = JenkinsHash.Factory.GetHashGenerator(false); for (int i = 0; i < testCount; i++) { byte[] byteData = new byte[24]; random.NextBytes(byteData); ulong u1 = BitConverter.ToUInt64(byteData, 0); ulong u2 = BitConverter.ToUInt64(byteData, 8); ulong u3 = BitConverter.ToUInt64(byteData, 16); var referenceHash = jenkinsHash.ComputeHash(byteData); var optimizedHash = jenkinsHash.ComputeHash(u1, u2, u3); Assert.AreEqual(referenceHash, optimizedHash, "Optimized hash value doesn't match the reference value for inputs {0}, {1}, {2}", u1, u2, u3); } } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Interning_GrainID() { Guid guid = new Guid(); GrainId gid1 = GrainId.FromParsableString(guid.ToString("B")); GrainId gid2 = GrainId.FromParsableString(guid.ToString("N")); Assert.AreEqual(gid1, gid2, "Should be equal GrainId's"); Assert.AreSame(gid1, gid2, "Should be same / intern'ed GrainId object"); // Round-trip through Serializer GrainId gid3 = (GrainId)SerializationManager.RoundTripSerializationForTesting(gid1); Assert.AreEqual(gid1, gid3, "Should be equal GrainId's"); Assert.AreEqual(gid2, gid3, "Should be equal GrainId's"); Assert.AreSame(gid1, gid3, "Should be same / intern'ed GrainId object"); Assert.AreSame(gid2, gid3, "Should be same / intern'ed GrainId object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Interning_string_equals() { Interner<string, string> interner = new Interner<string, string>(); const string str = "1"; string r1 = interner.FindOrCreate("1", () => str); string r2 = interner.FindOrCreate("1", () => null); // Should always be found Assert.AreEqual(r1, r2, "1: Objects should be equal"); Assert.AreSame(r1, r2, "2: Objects should be same / intern'ed"); // Round-trip through Serializer string r3 = (string)SerializationManager.RoundTripSerializationForTesting(r1); Assert.AreEqual(r1, r3, "3: Should be equal"); Assert.AreEqual(r2, r3, "4: Should be equal"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Intern_derived_class() { Interner<int, A> interner = new Interner<int, A>(); var obj1 = new A(); var obj2 = new B(); var obj3 = new B(); var r1 = interner.InternAndUpdateWithMoreDerived(1, obj1); Assert.AreEqual(obj1, r1, "Objects should be equal"); Assert.AreSame(obj1, r1, "Objects should be same / intern'ed"); var r2 = interner.InternAndUpdateWithMoreDerived(2, obj2); Assert.AreEqual(obj2, r2, "Objects should be equal"); Assert.AreSame(obj2, r2, "Objects should be same / intern'ed"); // Interning should not replace instances of same class var r3 = interner.InternAndUpdateWithMoreDerived(2, obj3); Assert.AreSame(obj2, r3, "Interning should return previous object"); Assert.AreNotSame(obj3, r3, "Interning should not replace previous object of same class"); // Interning should return instances of most derived class var r4 = interner.InternAndUpdateWithMoreDerived(1, obj2); Assert.AreSame(obj2, r4, "Interning should return most derived object"); Assert.AreNotSame(obj1, r4, "Interning should replace cached instances of less derived object"); // Interning should not return instances of less derived class var r5 = interner.InternAndUpdateWithMoreDerived(2, obj1); Assert.AreNotSame(obj1, r5, "Interning should not return less derived object"); Assert.AreSame(obj2, r5, "Interning should return previously cached instances of more derived object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Intern_FindOrCreate_derived_class() { Interner<int, A> interner = new Interner<int, A>(); var obj1 = new A(); var obj2 = new B(); var obj3 = new B(); var r1 = interner.FindOrCreate(1, () => obj1); Assert.AreEqual(obj1, r1, "Objects should be equal"); Assert.AreSame(obj1, r1, "Objects should be same / intern'ed"); var r2 = interner.FindOrCreate(2, () => obj2); Assert.AreEqual(obj2, r2, "Objects should be equal"); Assert.AreSame(obj2, r2, "Objects should be same / intern'ed"); // FindOrCreate should not replace instances of same class var r3 = interner.FindOrCreate(2, () => obj3); Assert.AreSame(obj2, r3, "FindOrCreate should return previous object"); Assert.AreNotSame(obj3, r3, "FindOrCreate should not replace previous object of same class"); // FindOrCreate should not replace cached instances with instances of most derived class var r4 = interner.FindOrCreate(1, () => obj2); Assert.AreSame(obj1, r4, "FindOrCreate return previously cached object"); Assert.AreNotSame(obj2, r4, "FindOrCreate should not replace previously cached object"); // FindOrCreate should not replace cached instances with instances of less derived class var r5 = interner.FindOrCreate(2, () => obj1); Assert.AreNotSame(obj1, r5, "FindOrCreate should not replace previously cached object"); Assert.AreSame(obj2, r5, "FindOrCreate return previously cached object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress() { //string addrStr1 = "1.2.3.4@11111@1"; SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); SiloAddress a2 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); Assert.AreEqual(a1, a2, "Should be equal SiloAddress's"); Assert.AreSame(a1, a2, "Should be same / intern'ed SiloAddress object"); // Round-trip through Serializer SiloAddress a3 = (SiloAddress)SerializationManager.RoundTripSerializationForTesting(a1); Assert.AreEqual(a1, a3, "Should be equal SiloAddress's"); Assert.AreEqual(a2, a3, "Should be equal SiloAddress's"); Assert.AreSame(a1, a3, "Should be same / intern'ed SiloAddress object"); Assert.AreSame(a2, a3, "Should be same / intern'ed SiloAddress object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress2() { SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); SiloAddress a2 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 2222), 12345); Assert.AreNotEqual(a1, a2, "Should not be equal SiloAddress's"); Assert.AreNotSame(a1, a2, "Should not be same / intern'ed SiloAddress object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress_Serialization() { SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); // Round-trip through Serializer SiloAddress a3 = (SiloAddress)SerializationManager.RoundTripSerializationForTesting(a1); Assert.AreEqual(a1, a3, "Should be equal SiloAddress's"); Assert.AreSame(a1, a3, "Should be same / intern'ed SiloAddress object"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainID_AsGuid() { string guidString = "0699605f-884d-4343-9977-f40a39ab7b2b"; Guid grainIdGuid = Guid.Parse(guidString); GrainId grainId = GrainId.GetGrainIdForTesting(grainIdGuid); //string grainIdToKeyString = grainId.ToKeyString(); string grainIdToFullString = grainId.ToFullString(); string grainIdToGuidString = GrainIdToGuidString(grainId); string grainIdKeyString = grainId.Key.ToString(); output.WriteLine("Guid={0}", grainIdGuid); output.WriteLine("GrainId={0}", grainId); //output.WriteLine("GrainId.ToKeyString={0}", grainIdToKeyString); output.WriteLine("GrainId.Key.ToString={0}", grainIdKeyString); output.WriteLine("GrainIdToGuidString={0}", grainIdToGuidString); output.WriteLine("GrainId.ToFullString={0}", grainIdToFullString); // Equal: Public APIs //Assert.AreEqual(guidString, grainIdToKeyString, "GrainId.ToKeyString"); Assert.AreEqual(guidString, grainIdToGuidString, "GrainIdToGuidString"); // Equal: Internal APIs Assert.AreEqual(grainIdGuid, grainId.GetPrimaryKey(), "GetPrimaryKey Guid"); // NOT-Equal: Internal APIs Assert.AreNotEqual(guidString, grainIdKeyString, "GrainId.Key.ToString"); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void SiloAddress_ToFrom_ParsableString() { SiloAddress address1 = SiloAddress.NewLocalAddress(12345); string addressStr1 = address1.ToParsableString(); SiloAddress addressObj1 = SiloAddress.FromParsableString(addressStr1); output.WriteLine("Convert -- From: {0} Got result string: '{1}' object: {2}", address1, addressStr1, addressObj1); Assert.AreEqual(address1, addressObj1, "SiloAddress equal after To-From-ParsableString"); //const string addressStr2 = "127.0.0.1-11111-144611139"; const string addressStr2 = "127.0.0.1:11111@144611139"; SiloAddress addressObj2 = SiloAddress.FromParsableString(addressStr2); string addressStr2Out = addressObj2.ToParsableString(); output.WriteLine("Convert -- From: {0} Got result string: '{1}' object: {2}", addressStr2, addressStr2Out, addressObj2); Assert.AreEqual(addressStr2, addressStr2Out, "SiloAddress equal after From-To-ParsableString"); } internal string GrainIdToGuidString(GrainId grainId) { const string pkIdentifierStr = "PrimaryKey:"; string grainIdFullString = grainId.ToFullString(); int pkStartIdx = grainIdFullString.IndexOf(pkIdentifierStr, StringComparison.Ordinal) + pkIdentifierStr.Length + 1; string pkGuidString = grainIdFullString.Substring(pkStartIdx, Guid.Empty.ToString().Length); return pkGuidString; } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers"), TestCategory("GrainReference")] public void GrainReference_Test1() { Guid guid = Guid.NewGuid(); GrainId regularGrainId = GrainId.GetGrainIdForTesting(guid); GrainReference grainRef = GrainReference.FromGrainId(regularGrainId); TestGrainReference(grainRef); grainRef = GrainReference.FromGrainId(regularGrainId, "generic"); TestGrainReference(grainRef); GrainId systemTragetGrainId = GrainId.NewSystemTargetGrainIdByTypeCode(2); grainRef = GrainReference.FromGrainId(systemTragetGrainId, null, SiloAddress.NewLocalAddress(1)); TestGrainReference(grainRef); GrainId observerGrainId = GrainId.NewClientId(); grainRef = GrainReference.NewObserverGrainReference(observerGrainId, GuidId.GetNewGuidId()); TestGrainReference(grainRef); } private void TestGrainReference(GrainReference grainRef) { GrainReference roundTripped = RoundTripGrainReferenceToKey(grainRef); Assert.AreEqual(grainRef, roundTripped, "GrainReference.ToKeyString"); roundTripped = SerializationManager.RoundTripSerializationForTesting(grainRef); Assert.AreEqual(grainRef, roundTripped, "GrainReference.OrleansSerializer"); roundTripped = TestingUtils.RoundTripDotNetSerializer(grainRef); Assert.AreEqual(grainRef, roundTripped, "GrainReference.DotNetSerializer"); } private GrainReference RoundTripGrainReferenceToKey(GrainReference input) { string str = input.ToKeyString(); GrainReference output = GrainReference.FromKeyString(str); return output; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: Some floating-point math operations ** ** ===========================================================*/ namespace System { //This class contains only static members and doesn't require serialization. using System; using System.Runtime; using System.Runtime.CompilerServices; using System.Runtime.ConstrainedExecution; using System.Runtime.Versioning; using System.Diagnostics.Contracts; public static class Math { private static double doubleRoundLimit = 1e16d; private const int maxRoundingDigits = 15; // This table is required for the Round function which can specify the number of digits to round to private static double[] roundPower10Double = new double[] { 1E0, 1E1, 1E2, 1E3, 1E4, 1E5, 1E6, 1E7, 1E8, 1E9, 1E10, 1E11, 1E12, 1E13, 1E14, 1E15 }; public const double PI = 3.14159265358979323846; public const double E = 2.7182818284590452354; [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Acos(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Asin(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Atan(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Atan2(double y,double x); public static Decimal Ceiling(Decimal d) { return Decimal.Ceiling(d); } [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Ceiling(double a); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Cos (double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Cosh(double value); public static Decimal Floor(Decimal d) { return Decimal.Floor(d); } [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Floor(double d); [System.Security.SecuritySafeCritical] // auto-generated private static unsafe double InternalRound(double value, int digits, MidpointRounding mode) { if (Abs(value) < doubleRoundLimit) { Double power10 = roundPower10Double[digits]; value *= power10; if (mode == MidpointRounding.AwayFromZero) { double fraction = SplitFractionDouble(&value); if (Abs(fraction) >= 0.5d) { value += Sign(fraction); } } else { // On X86 this can be inlined to just a few instructions value = Round(value); } value /= power10; } return value; } [System.Security.SecuritySafeCritical] // auto-generated private unsafe static double InternalTruncate(double d) { SplitFractionDouble(&d); return d; } [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Sin(double a); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Tan(double a); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Sinh(double value); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Tanh(double value); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Round(double a); public static double Round(double value, int digits) { if ((digits < 0) || (digits > maxRoundingDigits)) throw new ArgumentOutOfRangeException("digits", Environment.GetResourceString("ArgumentOutOfRange_RoundingDigits")); Contract.EndContractBlock(); return InternalRound(value, digits, MidpointRounding.ToEven); } public static double Round(double value, MidpointRounding mode) { return Round(value, 0, mode); } public static double Round(double value, int digits, MidpointRounding mode) { if ((digits < 0) || (digits > maxRoundingDigits)) throw new ArgumentOutOfRangeException("digits", Environment.GetResourceString("ArgumentOutOfRange_RoundingDigits")); if (mode < MidpointRounding.ToEven || mode > MidpointRounding.AwayFromZero) { throw new ArgumentException(Environment.GetResourceString("Argument_InvalidEnumValue", mode, "MidpointRounding"), "mode"); } Contract.EndContractBlock(); return InternalRound(value, digits, mode); } public static Decimal Round(Decimal d) { return Decimal.Round(d,0); } public static Decimal Round(Decimal d, int decimals) { return Decimal.Round(d,decimals); } public static Decimal Round(Decimal d, MidpointRounding mode) { return Decimal.Round(d, 0, mode); } public static Decimal Round(Decimal d, int decimals, MidpointRounding mode) { return Decimal.Round(d, decimals, mode); } [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private static unsafe extern double SplitFractionDouble(double* value); public static Decimal Truncate(Decimal d) { return Decimal.Truncate(d); } public static double Truncate(double d) { return InternalTruncate(d); } [System.Security.SecuritySafeCritical] // auto-generated [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Sqrt(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Log (double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Log10(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Exp(double d); [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern double Pow(double x, double y); public static double IEEERemainder(double x, double y) { if (Double.IsNaN(x)) { return x; // IEEE 754-2008: NaN payload must be preserved } if (Double.IsNaN(y)) { return y; // IEEE 754-2008: NaN payload must be preserved } double regularMod = x % y; if (Double.IsNaN(regularMod)) { return Double.NaN; } if (regularMod == 0) { if (Double.IsNegative(x)) { return Double.NegativeZero; } } double alternativeResult; alternativeResult = regularMod - (Math.Abs(y) * Math.Sign(x)); if (Math.Abs(alternativeResult) == Math.Abs(regularMod)) { double divisionResult = x/y; double roundedResult = Math.Round(divisionResult); if (Math.Abs(roundedResult) > Math.Abs(divisionResult)) { return alternativeResult; } else { return regularMod; } } if (Math.Abs(alternativeResult) < Math.Abs(regularMod)) { return alternativeResult; } else { return regularMod; } } /*================================Abs========================================= **Returns the absolute value of it's argument. ============================================================================*/ [CLSCompliant(false)] public static sbyte Abs(sbyte value) { if (value >= 0) return value; else return AbsHelper(value); } private static sbyte AbsHelper(sbyte value) { Contract.Requires(value < 0, "AbsHelper should only be called for negative values! (workaround for JIT inlining)"); if (value == SByte.MinValue) throw new OverflowException(Environment.GetResourceString("Overflow_NegateTwosCompNum")); Contract.EndContractBlock(); return ((sbyte)(-value)); } public static short Abs(short value) { if (value >= 0) return value; else return AbsHelper(value); } private static short AbsHelper(short value) { Contract.Requires(value < 0, "AbsHelper should only be called for negative values! (workaround for JIT inlining)"); if (value == Int16.MinValue) throw new OverflowException(Environment.GetResourceString("Overflow_NegateTwosCompNum")); Contract.EndContractBlock(); return (short) -value; } public static int Abs(int value) { if (value >= 0) return value; else return AbsHelper(value); } private static int AbsHelper(int value) { Contract.Requires(value < 0, "AbsHelper should only be called for negative values! (workaround for JIT inlining)"); if (value == Int32.MinValue) throw new OverflowException(Environment.GetResourceString("Overflow_NegateTwosCompNum")); Contract.EndContractBlock(); return -value; } public static long Abs(long value) { if (value >= 0) return value; else return AbsHelper(value); } private static long AbsHelper(long value) { Contract.Requires(value < 0, "AbsHelper should only be called for negative values! (workaround for JIT inlining)"); if (value == Int64.MinValue) throw new OverflowException(Environment.GetResourceString("Overflow_NegateTwosCompNum")); Contract.EndContractBlock(); return -value; } [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] extern public static float Abs(float value); // This is special code to handle NaN (We need to make sure NaN's aren't // negated). In CSharp, the else clause here should always be taken if // value is NaN, since the normal case is taken if and only if value < 0. // To illustrate this completely, a compiler has translated this into: // "load value; load 0; bge; ret -value ; ret value". // The bge command branches for comparisons with the unordered NaN. So // it runs the else case, which returns +value instead of negating it. // return (value < 0) ? -value : value; [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] extern public static double Abs(double value); // This is special code to handle NaN (We need to make sure NaN's aren't // negated). In CSharp, the else clause here should always be taken if // value is NaN, since the normal case is taken if and only if value < 0. // To illustrate this completely, a compiler has translated this into: // "load value; load 0; bge; ret -value ; ret value". // The bge command branches for comparisons with the unordered NaN. So // it runs the else case, which returns +value instead of negating it. // return (value < 0) ? -value : value; public static Decimal Abs(Decimal value) { return Decimal.Abs(value); } /*================================MAX========================================= **Returns the larger of val1 and val2 ============================================================================*/ [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static sbyte Max(sbyte val1, sbyte val2) { return (val1>=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static byte Max(byte val1, byte val2) { return (val1>=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static short Max(short val1, short val2) { return (val1>=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static ushort Max(ushort val1, ushort val2) { return (val1>=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static int Max(int val1, int val2) { return (val1>=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static uint Max(uint val1, uint val2) { return (val1>=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static long Max(long val1, long val2) { return (val1>=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static ulong Max(ulong val1, ulong val2) { return (val1>=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static float Max(float val1, float val2) { if (val1 > val2) return val1; if (Single.IsNaN(val1)) return val1; return val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static double Max(double val1, double val2) { if (val1 > val2) return val1; if (Double.IsNaN(val1)) return val1; return val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static Decimal Max(Decimal val1, Decimal val2) { return Decimal.Max(val1,val2); } /*================================MIN========================================= **Returns the smaller of val1 and val2. ============================================================================*/ [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static sbyte Min(sbyte val1, sbyte val2) { return (val1<=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static byte Min(byte val1, byte val2) { return (val1<=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static short Min(short val1, short val2) { return (val1<=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static ushort Min(ushort val1, ushort val2) { return (val1<=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static int Min(int val1, int val2) { return (val1<=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static uint Min(uint val1, uint val2) { return (val1<=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static long Min(long val1, long val2) { return (val1<=val2)?val1:val2; } [CLSCompliant(false)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] [System.Runtime.Versioning.NonVersionable] public static ulong Min(ulong val1, ulong val2) { return (val1<=val2)?val1:val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static float Min(float val1, float val2) { if (val1 < val2) return val1; if (Single.IsNaN(val1)) return val1; return val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static double Min(double val1, double val2) { if (val1 < val2) return val1; if (Double.IsNaN(val1)) return val1; return val2; } [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public static Decimal Min(Decimal val1, Decimal val2) { return Decimal.Min(val1,val2); } /*=====================================Clamp==================================== ** ==============================================================================*/ [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Byte Clamp(Byte value, Byte min, Byte max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Decimal Clamp(Decimal value, Decimal min, Decimal max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Double Clamp(Double value, Double min, Double max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Int16 Clamp(Int16 value, Int16 min, Int16 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Int32 Clamp(Int32 value, Int32 min, Int32 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Int64 Clamp(Int64 value, Int64 min, Int64 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static SByte Clamp(SByte value, SByte min, SByte max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Single Clamp(Single value, Single min, Single max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static UInt16 Clamp(UInt16 value, UInt16 min, UInt16 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static UInt32 Clamp(UInt32 value, UInt32 min, UInt32 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } [MethodImpl(MethodImplOptions.AggressiveInlining)] [CLSCompliant(false)] public static UInt64 Clamp(UInt64 value, UInt64 min, UInt64 max) { if (min > max) ThrowMinMaxException(min, max); if (value < min) return min; else if (value > max) return max; return value; } private static void ThrowMinMaxException<T>(T min, T max) { throw new ArgumentException(Environment.GetResourceString("Argument_MinMaxValue", min, max)); } /*=====================================Log====================================== ** ==============================================================================*/ public static double Log(double a, double newBase) { if (Double.IsNaN(a)) { return a; // IEEE 754-2008: NaN payload must be preserved } if (Double.IsNaN(newBase)) { return newBase; // IEEE 754-2008: NaN payload must be preserved } if (newBase == 1) return Double.NaN; if (a != 1 && (newBase == 0 || Double.IsPositiveInfinity(newBase))) return Double.NaN; return (Log(a)/Log(newBase)); } // Sign function for VB. Returns -1, 0, or 1 if the sign of the number // is negative, 0, or positive. Throws for floating point NaN's. [CLSCompliant(false)] public static int Sign(sbyte value) { if (value < 0) return -1; else if (value > 0) return 1; else return 0; } // Sign function for VB. Returns -1, 0, or 1 if the sign of the number // is negative, 0, or positive. Throws for floating point NaN's. public static int Sign(short value) { if (value < 0) return -1; else if (value > 0) return 1; else return 0; } // Sign function for VB. Returns -1, 0, or 1 if the sign of the number // is negative, 0, or positive. Throws for floating point NaN's. public static int Sign(int value) { if (value < 0) return -1; else if (value > 0) return 1; else return 0; } public static int Sign(long value) { if (value < 0) return -1; else if (value > 0) return 1; else return 0; } public static int Sign (float value) { if (value < 0) return -1; else if (value > 0) return 1; else if (value == 0) return 0; throw new ArithmeticException(Environment.GetResourceString("Arithmetic_NaN")); } public static int Sign(double value) { if (value < 0) return -1; else if (value > 0) return 1; else if (value == 0) return 0; throw new ArithmeticException(Environment.GetResourceString("Arithmetic_NaN")); } public static int Sign(Decimal value) { if (value < 0) return -1; else if (value > 0) return 1; else return 0; } public static long BigMul(int a, int b) { return ((long)a) * b; } public static int DivRem(int a, int b, out int result) { result = a%b; return a/b; } public static long DivRem(long a, long b, out long result) { result = a%b; return a/b; } } }
namespace Lucene.Net.QueryParsers.Classic { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public static class RegexpToken { /// <summary>End of File. </summary> public const int EOF = 0; /// <summary>RegularExpression Id. </summary> public const int NUM_CHAR = 1; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int ESCAPED_CHAR = 2; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int TERM_START_CHAR = 3; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int TERM_CHAR = 4; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int WHITESPACE = 5; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int QUOTED_CHAR = 6; // LUCENENET specific: removed leading underscore to make CLS compliant /// <summary>RegularExpression Id. </summary> public const int AND = 8; /// <summary>RegularExpression Id. </summary> public const int OR = 9; /// <summary>RegularExpression Id. </summary> public const int NOT = 10; /// <summary>RegularExpression Id. </summary> public const int PLUS = 11; /// <summary>RegularExpression Id. </summary> public const int MINUS = 12; /// <summary>RegularExpression Id. </summary> public const int BAREOPER = 13; /// <summary>RegularExpression Id. </summary> public const int LPAREN = 14; /// <summary>RegularExpression Id. </summary> public const int RPAREN = 15; /// <summary>RegularExpression Id. </summary> public const int COLON = 16; /// <summary>RegularExpression Id. </summary> public const int STAR = 17; /// <summary>RegularExpression Id. </summary> public const int CARAT = 18; /// <summary>RegularExpression Id. </summary> public const int QUOTED = 19; /// <summary>RegularExpression Id. </summary> public const int TERM = 20; /// <summary>RegularExpression Id. </summary> public const int FUZZY_SLOP = 21; /// <summary>RegularExpression Id. </summary> public const int PREFIXTERM = 22; /// <summary>RegularExpression Id. </summary> public const int WILDTERM = 23; /// <summary>RegularExpression Id. </summary> public const int REGEXPTERM = 24; /// <summary>RegularExpression Id. </summary> public const int RANGEIN_START = 25; /// <summary>RegularExpression Id. </summary> public const int RANGEEX_START = 26; /// <summary>RegularExpression Id. </summary> public const int NUMBER = 27; /// <summary>RegularExpression Id. </summary> public const int RANGE_TO = 28; /// <summary>RegularExpression Id. </summary> public const int RANGEIN_END = 29; /// <summary>RegularExpression Id. </summary> public const int RANGEEX_END = 30; /// <summary>RegularExpression Id. </summary> public const int RANGE_QUOTED = 31; /// <summary>RegularExpression Id. </summary> public const int RANGE_GOOP = 32; } public static class LexicalToken { /// <summary>Lexical state.</summary> public const int Boost = 0; /// <summary>Lexical state.</summary> public const int Range = 1; /// <summary>Lexical state.</summary> public const int DEFAULT = 2; } // NOTE: In Java, this was an interface. However, in // .NET we cannot define constants in an interface. // So, instead we are making it a static class so it // can be shared between classes with different base classes. // public interface QueryParserConstants /// <summary> Token literal values and constants. /// Generated by org.javacc.parser.OtherFilesGen#start() /// </summary> public static class QueryParserConstants { ///// <summary>End of File. </summary> //public const int EndOfFileToken = 0; ///// <summary>RegularExpression Id. </summary> //public const int NumCharToken = 1; ///// <summary>RegularExpression Id. </summary> //public const int EscapedCharToken = 2; ///// <summary>RegularExpression Id. </summary> //public const int TermStartCharToken = 3; ///// <summary>RegularExpression Id. </summary> //public const int TermCharToken = 4; ///// <summary>RegularExpression Id. </summary> //public const int WhitespaceToken = 5; ///// <summary>RegularExpression Id. </summary> //public const int QuotedCharToken = 6; ///// <summary>RegularExpression Id. </summary> //public const int AndToken = 8; ///// <summary>RegularExpression Id. </summary> //public const int OrToken = 9; ///// <summary>RegularExpression Id. </summary> //public const int NotToken = 10; ///// <summary>RegularExpression Id. </summary> //public const int PlusToken = 11; ///// <summary>RegularExpression Id. </summary> //public const int MinusToken = 12; ///// <summary>RegularExpression Id. </summary> //public const int BareOperToken = 13; ///// <summary>RegularExpression Id. </summary> //public const int LParanToken = 14; ///// <summary>RegularExpression Id. </summary> //public const int RParenToken = 15; ///// <summary>RegularExpression Id. </summary> //public const int ColonToken = 16; ///// <summary>RegularExpression Id. </summary> //public const int StarToken = 17; ///// <summary>RegularExpression Id. </summary> //public const int CaratToken = 18; ///// <summary>RegularExpression Id. </summary> //public const int QuotedToken = 19; ///// <summary>RegularExpression Id. </summary> //public const int TermToken = 20; ///// <summary>RegularExpression Id. </summary> //public const int FuzzySlopToken = 21; ///// <summary>RegularExpression Id. </summary> //public const int PrefixTermToken = 22; ///// <summary>RegularExpression Id. </summary> //public const int WildTermToken = 23; ///// <summary>RegularExpression Id. </summary> //public const int RegExpTermToken = 24; ///// <summary>RegularExpression Id. </summary> //public const int RangeInStartToken = 25; ///// <summary>RegularExpression Id. </summary> //public const int RangeExStartToken = 26; ///// <summary>RegularExpression Id. </summary> //public const int NumberToken = 27; ///// <summary>RegularExpression Id. </summary> //public const int RangeToToken = 28; ///// <summary>RegularExpression Id. </summary> //public const int RangeInEndToken = 29; ///// <summary>RegularExpression Id. </summary> //public const int RangeExEndToken = 30; ///// <summary>RegularExpression Id. </summary> //public const int RangeQuotedToken = 31; ///// <summary>RegularExpression Id. </summary> //public const int RangeGoopToken = 32; ///// <summary>Lexical state. </summary> //public const int BoostToken = 0; ///// <summary>Lexical state. </summary> //public const int RangeToken = 1; ///// <summary>Lexical state. </summary> //public const int DefaultToken = 2; /// <summary>Literal token values. </summary> public static string[] TokenImage = new string[] { "<EOF>", "<_NUM_CHAR>", "<_ESCAPED_CHAR>", "<_TERM_START_CHAR>", "<_TERM_CHAR>", "<_WHITESPACE>", "<_QUOTED_CHAR>", "<token of kind 7>", "<AND>", "<OR>", "<NOT>", "\"+\"", "\"-\"", "<BAREOPER>", "\"(\"", "\")\"", "\":\"", "\"*\"", "\"^\"", "<QUOTED>", "<TERM>", "<FUZZY_SLOP>", "<PREFIXTERM>", "<WILDTERM>", "<REGEXPTERM>", "\"[\"", "\"{\"", "<NUMBER>", "\"TO\"", "\"]\"", "<RANGEIN_QUOTED>", "<RANGEIN_GOOP>", "\"TO\"", "\"}\"", "<RANGE_QUOTED>", "<RANGE_GOOP>" }; } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * [email protected]. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ #if FEATURE_NATIVE using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Reflection.Emit; using System.Runtime.InteropServices; using Microsoft.Scripting; using Microsoft.Scripting.Runtime; using IronPython.Runtime; using IronPython.Runtime.Operations; using IronPython.Runtime.Types; using System.Text; #if CLR2 using Microsoft.Scripting.Math; #else using System.Numerics; using Microsoft.Scripting.Utils; #endif namespace IronPython.Modules { /// <summary> /// Provides support for interop with native code from Python code. /// </summary> public static partial class CTypes { /// <summary> /// Meta class for structures. Validates _fields_ on creation, provides factory /// methods for creating instances from addresses and translating to parameters. /// </summary> [PythonType, PythonHidden] public class StructType : PythonType, INativeType { internal Field[] _fields; private int? _size, _alignment, _pack; private static readonly Field[] _emptyFields = new Field[0]; // fields were never initialized before a type was created public StructType(CodeContext/*!*/ context, string name, PythonTuple bases, PythonDictionary members) : base(context, name, bases, members) { foreach (PythonType pt in ResolutionOrder) { StructType st = pt as StructType; if (st != this && st != null) { st.EnsureFinal(); } UnionType ut = pt as UnionType; if (ut != null) { ut.EnsureFinal(); } } object pack; if (members.TryGetValue("_pack_", out pack)) { if (!(pack is int) || ((int)pack < 0)) { throw PythonOps.ValueError("pack must be a non-negative integer"); } _pack = (int)pack; } object fields; if (members.TryGetValue("_fields_", out fields)) { // When we support alternate endianness this should change to: //__setattr__(context, "_fields_", fields); SetFields(fields); } // TODO: _anonymous_ } private StructType(Type underlyingSystemType) : base(underlyingSystemType) { } public static ArrayType/*!*/ operator *(StructType type, int count) { return MakeArrayType(type, count); } public static ArrayType/*!*/ operator *(int count, StructType type) { return MakeArrayType(type, count); } public _Structure from_address(CodeContext/*!*/ context, int address) { return from_address(context, new IntPtr(address)); } public _Structure from_address(CodeContext/*!*/ context, BigInteger address) { return from_address(context, new IntPtr((long)address)); } public _Structure from_address(CodeContext/*!*/ context, IntPtr ptr) { _Structure res = (_Structure)CreateInstance(context); res.SetAddress(ptr); return res; } public _Structure from_buffer(ArrayModule.array array, [DefaultParameterValue(0)]int offset) { ValidateArraySizes(array, offset, ((INativeType)this).Size); _Structure res = (_Structure)CreateInstance(Context.SharedContext); IntPtr addr = array.GetArrayAddress(); res._memHolder = new MemoryHolder(addr.Add(offset), ((INativeType)this).Size); res._memHolder.AddObject("ffffffff", array); return res; } public _Structure from_buffer_copy(ArrayModule.array array, [DefaultParameterValue(0)]int offset) { ValidateArraySizes(array, offset, ((INativeType)this).Size); _Structure res = (_Structure)CreateInstance(Context.SharedContext); res._memHolder = new MemoryHolder(((INativeType)this).Size); res._memHolder.CopyFrom(array.GetArrayAddress().Add(offset), new IntPtr(((INativeType)this).Size)); GC.KeepAlive(array); return res; } /// <summary> /// Converts an object into a function call parameter. /// /// Structures just return themselves. /// </summary> public object from_param(object obj) { if (!Builtin.isinstance(obj, this)) { throw PythonOps.TypeError("expected {0} instance got {1}", Name, PythonTypeOps.GetName(obj)); } return obj; } public object in_dll(object library, string name) { throw new NotImplementedException("in dll"); } public new virtual void __setattr__(CodeContext/*!*/ context, string name, object value) { if (name == "_fields_") { lock (this) { if (_fields != null) { throw PythonOps.AttributeError("_fields_ is final"); } SetFields(value); } } base.__setattr__(context, name, value); } #region INativeType Members int INativeType.Size { get { EnsureSizeAndAlignment(); return _size.Value; } } int INativeType.Alignment { get { EnsureSizeAndAlignment(); return _alignment.Value; } } object INativeType.GetValue(MemoryHolder/*!*/ owner, object readingFrom, int offset, bool raw) { _Structure res = (_Structure)CreateInstance(this.Context.SharedContext); res._memHolder = owner.GetSubBlock(offset); return res; } object INativeType.SetValue(MemoryHolder/*!*/ address, int offset, object value) { try { return SetValueInternal(address, offset, value); } catch (ArgumentTypeException e) { throw PythonOps.RuntimeError("({0}) <type 'exceptions.TypeError'>: {1}", Name, e.Message); } catch (ArgumentException e) { throw PythonOps.RuntimeError("({0}) <type 'exceptions.ValueError'>: {1}", Name, e.Message); } } internal object SetValueInternal(MemoryHolder address, int offset, object value) { IList<object> init = value as IList<object>; if (init != null) { if (init.Count > _fields.Length) { throw PythonOps.TypeError("too many initializers"); } for (int i = 0; i < init.Count; i++) { _fields[i].SetValue(address, offset, init[i]); } } else { CData data = value as CData; if (data != null) { data._memHolder.CopyTo(address, offset, data.Size); return data._memHolder.EnsureObjects(); } else { throw new NotImplementedException("set value"); } } return null; } Type/*!*/ INativeType.GetNativeType() { EnsureFinal(); return GetMarshalTypeFromSize(_size.Value); } MarshalCleanup INativeType.EmitMarshalling(ILGenerator/*!*/ method, LocalOrArg argIndex, List<object>/*!*/ constantPool, int constantPoolArgument) { Type argumentType = argIndex.Type; argIndex.Emit(method); if (argumentType.IsValueType) { method.Emit(OpCodes.Box, argumentType); } constantPool.Add(this); method.Emit(OpCodes.Ldarg, constantPoolArgument); method.Emit(OpCodes.Ldc_I4, constantPool.Count - 1); method.Emit(OpCodes.Ldelem_Ref); method.Emit(OpCodes.Call, typeof(ModuleOps).GetMethod("CheckCDataType")); method.Emit(OpCodes.Call, typeof(CData).GetMethod("get_UnsafeAddress")); method.Emit(OpCodes.Ldobj, ((INativeType)this).GetNativeType()); return null; } Type/*!*/ INativeType.GetPythonType() { return typeof(object); } void INativeType.EmitReverseMarshalling(ILGenerator method, LocalOrArg value, List<object> constantPool, int constantPoolArgument) { value.Emit(method); EmitCDataCreation(this, method, constantPool, constantPoolArgument); } string INativeType.TypeFormat { get { if (_pack != null || _fields == _emptyFields || _fields == null) { return "B"; } StringBuilder res = new StringBuilder(); res.Append("T{"); foreach (Field f in _fields) { res.Append(f.NativeType.TypeFormat); res.Append(':'); res.Append(f.FieldName); res.Append(':'); } res.Append('}'); return res.ToString(); } } #endregion internal static PythonType MakeSystemType(Type underlyingSystemType) { return PythonType.SetPythonType(underlyingSystemType, new StructType(underlyingSystemType)); } private void SetFields(object fields) { lock (this) { IList<object> list = GetFieldsList(fields); int size; int alignment; int? bitCount = null; int? curBitCount = null; INativeType lastType = null; List<Field> allFields = GetBaseSizeAlignmentAndFields(out size, out alignment); IList<object> anonFields = GetAnonymousFields(this); for (int fieldIndex = 0; fieldIndex < list.Count; fieldIndex++) { object o = list[fieldIndex]; string fieldName; INativeType cdata; GetFieldInfo(this, o, out fieldName, out cdata, out bitCount); int prevSize = UpdateSizeAndAlignment(cdata, bitCount, lastType, ref size, ref alignment, ref curBitCount); Field newField = new Field(fieldName, cdata, prevSize, allFields.Count, bitCount, curBitCount - bitCount); allFields.Add(newField); AddSlot(fieldName, newField); if (anonFields != null && anonFields.Contains(fieldName)) { AddAnonymousFields(this, allFields, cdata, newField); } lastType = cdata; } CheckAnonymousFields(allFields, anonFields); if (bitCount != null) { size += lastType.Size; } _fields = allFields.ToArray(); _size = PythonStruct.Align(size, alignment); _alignment = alignment; } } internal static void CheckAnonymousFields(List<Field> allFields, IList<object> anonFields) { if (anonFields != null) { foreach (string s in anonFields) { bool found = false; foreach (Field f in allFields) { if (f.FieldName == s) { found = true; break; } } if (!found) { throw PythonOps.AttributeError("anonymous field {0} is not defined in this structure", s); } } } } internal static IList<object> GetAnonymousFields(PythonType type) { object anonymous; IList<object> anonFields = null; if (type.TryGetBoundAttr(type.Context.SharedContext, type, "_anonymous_", out anonymous)) { anonFields = anonymous as IList<object>; if (anonFields == null) { throw PythonOps.TypeError("_anonymous_ must be a sequence"); } } return anonFields; } internal static void AddAnonymousFields(PythonType type, List<Field> allFields, INativeType cdata, Field newField) { Field[] childFields; if (cdata is StructType) { childFields = ((StructType)cdata)._fields; } else if (cdata is UnionType) { childFields = ((UnionType)cdata)._fields; } else { throw PythonOps.TypeError("anonymous field must be struct or union"); } foreach (Field existingField in childFields) { Field anonField = new Field( existingField.FieldName, existingField.NativeType, checked(existingField.offset + newField.offset), allFields.Count ); type.AddSlot(existingField.FieldName, anonField); allFields.Add(anonField); } } private List<Field> GetBaseSizeAlignmentAndFields(out int size, out int alignment) { size = 0; alignment = 1; List<Field> allFields = new List<Field>(); INativeType lastType = null; int? totalBitCount = null; foreach (PythonType pt in BaseTypes) { StructType st = pt as StructType; if (st != null) { foreach (Field f in st._fields) { allFields.Add(f); UpdateSizeAndAlignment(f.NativeType, f.BitCount, lastType, ref size, ref alignment, ref totalBitCount); if (f.NativeType == this) { throw StructureCannotContainSelf(); } lastType = f.NativeType; } } } return allFields; } private int UpdateSizeAndAlignment(INativeType cdata, int? bitCount, INativeType lastType, ref int size, ref int alignment, ref int? totalBitCount) { int prevSize = size; if (bitCount != null) { if (lastType != null && lastType.Size != cdata.Size) { totalBitCount = null; prevSize = size += lastType.Size; } size = PythonStruct.Align(size, cdata.Alignment); if (totalBitCount != null) { if ((bitCount + totalBitCount + 7) / 8 <= cdata.Size) { totalBitCount = bitCount + totalBitCount; } else { size += lastType.Size; prevSize = size; totalBitCount = bitCount; } } else { totalBitCount = bitCount; } } else { if (totalBitCount != null) { size += lastType.Size; prevSize = size; totalBitCount = null; } if (_pack != null) { alignment = _pack.Value; prevSize = size = PythonStruct.Align(size, _pack.Value); size += cdata.Size; } else { alignment = Math.Max(alignment, cdata.Alignment); prevSize = size = PythonStruct.Align(size, cdata.Alignment); size += cdata.Size; } } return prevSize; } internal void EnsureFinal() { if (_fields == null) { SetFields(PythonTuple.EMPTY); if (_fields.Length == 0) { // track that we were initialized w/o fields. _fields = _emptyFields; } } } /// <summary> /// If our size/alignment hasn't been initialized then grabs the size/alignment /// from all of our base classes. If later new _fields_ are added we'll be /// initialized and these values will be replaced. /// </summary> private void EnsureSizeAndAlignment() { Debug.Assert(_size.HasValue == _alignment.HasValue); // these are always iniitalized together if (_size == null) { lock (this) { if (_size == null) { int size, alignment; GetBaseSizeAlignmentAndFields(out size, out alignment); _size = size; _alignment = alignment; } } } } } } } #endif
#region Licence... /* The MIT License (MIT) Copyright (c) 2014 Oleg Shilo Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion Licence... using System; namespace WixSharp { /// <summary> /// Specifies predefined values for <see cref="Project.UI"/>, /// which control type of User Interface used to interact with user during the installation. /// </summary> public enum WUI { /// <summary> /// WixUI_ProgressOnly is "no-UI" dialog set which includes only progress bar. /// </summary> WixUI_ProgressOnly, /// <summary> /// WixUI_Minimal is the simplest of the built-in WixUI dialog sets. /// </summary> WixUI_Minimal, /// <summary> /// WixUI_InstallDir does not allow the user to choose what features to install, but it adds a dialog to /// let the user choose a directory where the product will be installed. /// </summary> WixUI_InstallDir, /// <summary> /// WixUI_Common is defines "common" built-in dialog set. It is used to define additional /// custom dialogs. /// </summary> WixUI_Common, /// <summary> /// WixUI_FeatureTree built-in dialog set. /// <para>WixUI_FeatureTree is a simpler version of WixUI_Mondo that omits the setup type dialog.</para> /// </summary> WixUI_FeatureTree, /// <summary> /// WixUI_Mondo includes the full set of dialogs (hence "Mondo"). /// </summary> WixUI_Mondo, /// <summary> /// WixUI_Advanced provides the option of a one-click install like WixUI_Minimal, but it also allows directory and feature /// selection like other dialog sets if the user chooses to configure advanced options. /// </summary> WixUI_Advanced } /// <summary> /// Specifies predefined values for <see cref="Action.Return"/>, /// which controls invoking type of <c>Custom Actions</c>. /// </summary> public enum Return { /// <summary> /// Indicates that the custom action will run asynchronously but the installer will wait for the return code at sequence end. /// </summary> asyncWait, /// <summary> /// Indicates that the custom action will run asynchronously and execution may continue after the installer terminates. /// </summary> asyncNoWait, /// <summary> /// Indicates that the custom action will run synchronously and the return code will be checked for success. /// </summary> check, /// <summary> /// Indicates that the custom action will run synchronously and the return code will not be checked. /// </summary> ignore } //good read: http://stackoverflow.com/questions/5564619/what-is-the-purpose-of-administrative-installation-initiated-using-msiexec-a /// <summary> /// Specifies predefined values for <see cref="Action.Sequence" />, /// which controls which MSI sequence contains corresponding <c>Custom Action</c>. /// </summary> public class Sequence { /// <summary> /// <c>Custom Action</c> belongs to <c>InstallExecuteSequence</c>. /// </summary> public static Sequence InstallExecuteSequence = new Sequence("InstallExecuteSequence"); /// <summary> /// <c>Custom Action</c> belongs to <c>InstallUISequence</c>. /// </summary> public static Sequence InstallUISequence = new Sequence("InstallUISequence"); /// <summary> /// The AdminExecuteSequence table lists actions that the installer calls in sequence when the top-level ADMIN action is executed. /// </summary> public static Sequence AdminExecuteSequence = new Sequence("AdminExecuteSequence"); /// <summary> /// The AdminUISequence table lists actions that the installer calls in sequence when the top-level ADMIN action is executed and the internal user interface level is set to full UI or reduced UI. The installer skips the actions in this table if the user interface level is set to basic UI or no UI. /// </summary> public static Sequence AdminUISequence = new Sequence("AdminUISequence"); /// <summary> /// <c>Custom Action</c> does not belong to any sequence. Use this value when you need <c>Custom Action</c> /// to be invoked not from the installation sequence but from another <c>Custom Action</c>. /// </summary> public static Sequence NotInSequence = new Sequence("NotInSequence"); /// <summary> /// Initializes a new instance of the <see cref="Sequence"/> class. /// </summary> /// <param name="value">The value.</param> public Sequence(string value) { Value = value; } /// <summary> /// The string value of the Sequence object /// </summary> protected string Value; /// <summary> /// Gets the string values of the Sequence object. Note there can be more that a single value. For example /// Sequence.InstallExecuteSequence | Sequence.InstallUISequence will yield "InstallExecuteSequence" and /// "InstallExecuteSequence" /// </summary> /// <returns></returns> public string[] GetValues() { return (Value ?? "").Split('|'); } /// <summary> /// Returns a <see cref="System.String" /> that represents this instance. /// </summary> /// <returns> /// A <see cref="System.String" /> that represents this instance. /// </returns> public override string ToString() { return Value; } /// <summary> /// Implements the operator +. /// </summary> /// <param name="first">The first.</param> /// <param name="second">The second.</param> /// <returns> /// The result of the operator. /// </returns> public static Sequence operator +(Sequence first, Sequence second) { return new Sequence(first.Value + "|" + second.Value); } /// <summary> /// Implements the operator |. /// </summary> /// <param name="first">The first.</param> /// <param name="second">The second.</param> /// <returns> /// The result of the operator. /// </returns> public static Sequence operator |(Sequence first, Sequence second) { return new Sequence(first.Value + "|" + second.Value); } } /// <summary> /// Specifies predefined values for <see cref="Action.Execute"/> attribute, /// which controls at what stage of installation script <c>Custom Action</c> will be executed. /// </summary> public enum Execute { /// <summary> /// Indicates that the custom action will run after successful completion of the installation script (at the end of the installation). /// </summary> commit, /// <summary> /// Indicates that the custom action runs in-script (possibly with elevated privileges). /// </summary> deferred, /// <summary> /// Indicates that the custom action will only run in the first sequence that runs it. /// </summary> firstSequence, /// <summary> /// Indicates that the custom action will run during normal processing time with user privileges. This is the default. /// </summary> immediate, /// <summary> /// Indicates that the custom action will only run in the first sequence that runs it in the same process. /// </summary> oncePerProcess, /// <summary> /// Indicates that a custom action will run in the rollback sequence when a failure occurs during installation, usually to undo changes made by a deferred custom action. /// </summary> rollback, /// <summary> /// Indicates that a custom action should be run a second time if it was previously run in an earlier sequence. /// </summary> secondSequence } /// <summary> /// Specifies predefined values for <see cref="Action.When"/>, /// which controls when the <c>Custom Action</c> occurs relative to /// the order controlling <c>Action</c>. /// <para>The order-controlling <c>Action</c> is defined by <see cref="Step"/></para> /// </summary> public enum When { /// <summary> /// Execute after order controlling action. /// </summary> After, /// <summary> /// Execute before order controlling action. /// </summary> Before } /// <summary> /// Use this attribute to specify the privileges required to install the package on Windows Vista and above. /// </summary> public enum InstallPrivileges { /// <summary> /// Set this value to declare that the package does not require elevated privileges to install. /// </summary> limited, /// <summary> /// Set this value to declare that the package requires elevated privileges to install. This is the default value. /// </summary> elevated } /// <summary> /// Use this attribute to specify the priviliges required to install the package on Windows Vista and above. /// </summary> public enum InstallScope { /// <summary> /// Set this value to declare that the package is a per-machine installation and requires elevated privileges to install. Sets the ALLUSERS property to 1. /// </summary> perMachine, /// <summary> /// Set this value to declare that the package is a per-user installation and does not require elevated privileges to install. Sets the package's InstallPrivileges attribute to "limited." /// </summary> perUser } /// <summary> /// Sets the default script language (<see cref="IISVirtualDir.DefaultScript"/>) for the Web site. /// </summary> public enum DefaultScript { /// <summary> /// VBScript /// </summary> VBScript, /// <summary> /// JScript /// </summary> JScript } /// <summary> /// Sets the platform(s) for which native images will be generated. /// </summary> public class NativeImagePlatform : StringEnum<NativeImagePlatform> { /// <summary> /// Initializes a new instance of the <see cref="NativeImagePlatform"/> class. /// </summary> /// <param name="value">The value.</param> public NativeImagePlatform(string value) : base(value) { } /// <summary> /// Attempt to generate native images only for the 32-bit version of the .NET Framework on the target machine. If the 32-bit version of the .NET Framework 2.0 or newer is not present on the target machine, native image custom actions will not be scheduled. This is the default value. /// </summary> public static NativeImagePlatform x86 = new NativeImagePlatform("32bit"); //it's illegal to start member name from digit (e.g. NativeImagePlatform.32bit) /// <summary> /// Attempt to generate native images only for the 64-bit version of the .NET Framework on the target machine. If a 64-bit version of the .NET Framework 2.0 or newer is not present on the target machine, native image custom actions will not be scheduled. /// </summary> public static NativeImagePlatform x64 = new NativeImagePlatform("64bit"); /// <summary> /// Attempt to generate native images for the 32-bit and 64-bit versions of the .NET Framework on the target machine. If a version of the .NET Framework 2.0 or newer is not present on the target machine for a processor architecture, native image custom actions will not be scheduled for that processor architecture. /// </summary> public static NativeImagePlatform all = new NativeImagePlatform("all"); } /// <summary> /// Sets the <see cref="T:WixSharp.Project.Package.Platform"/>) for the target platform type. /// </summary> public enum Platform { /// <summary> /// Set this value to declare that the package is an x86 package. /// </summary> x86, /// <summary> /// Set this value to declare that the package is an ia64 package. This value requires that the InstallerVersion property be set to 200 or greater. /// </summary> ia64, /// <summary> /// Set this value to declare that the package is an x64 package. This value requires that the InstallerVersion property be set to 200 or greater. /// </summary> x64, /// <summary> /// Set this value to declare that the package is an arm package. This value requires that the InstallerVersion property be set to 500 or greater. /// </summary> arm, /// <summary> /// Set this value to declare that the package is an arm64 package. This value requires that the InstallerVersion property be set to 500 or greater. /// </summary> arm64, } /// <summary> /// Indicates the compression level for a cabinet. /// </summary> public enum CompressionLevel { #pragma warning disable 1591 high, medium, low, mszip, none } /// <summary> /// Sets the <see cref="T:IISVirtualDir.Certificate.StoreLocation"/> for the Web site certificate. /// </summary> public enum StoreLocation { currentUser, localMachine } public enum StoreType { file, commonName, sha1Hash } [Flags] public enum HashAlgorithmType { sha1 = 0x01, sha256 = 0x02 } #pragma warning restore 1591 /// <summary> /// Sets the (<see cref="T:IISVirtualDir.Certificate.StoreName"/>) for the Web site certificate. /// </summary> public enum StoreName { /// <summary> /// Contains the certificates of certificate authorities that the user trusts to issue certificates to others. Certificates /// in these stores are normally supplied with the operating system or by the user's network administrator. /// </summary> ca, /// <summary> /// Use the "personal" value instead. /// </summary> my, /// <summary> /// Contains personal certificates. These certificates will usually have an associated private key. This store is often referred to as the "MY" certificate store. /// </summary> personal, /// <summary> /// /// </summary> request, /// <summary> /// Contains the certificates of certificate authorities that the user trusts to issue certificates to others. Certificates in these stores are normally supplied with the operating system or by the user's network administrator. Certificates in this store are typically self-signed. /// </summary> root, /// <summary> /// Contains the certificates of those that the user normally sends enveloped messages to or receives signed messages from. See MSDN documentation for more information. /// </summary> otherPeople, /// <summary> /// Contains the certificates of those directly trusted people and resources. /// </summary> trustedPeople, /// <summary> /// Contains the certificates of those publishers who are trusted. /// </summary> trustedPublisher, } /// <summary> /// Values of the application isolation level of <see cref="IISVirtualDir.Isolation"/> for pre-IIS 6 applications /// </summary> public enum Isolation { /// <summary> /// Means the application executes within the IIS process. /// </summary> low, /// <summary> /// Executes pooled in a separate process. /// </summary> medium, /// <summary> /// Means execution alone in a separate process. /// </summary> high } /// <summary> /// Determines what service action should be taken on an error. /// </summary> public enum SvcErrorControl { /// <summary> ///Logs the error and continues with the startup operation. /// </summary> ignore, /// <summary> ///Logs the error, displays a message box and continues the startup operation. /// </summary> normal, /// <summary> /// Logs the error if possible and the system is restarted with the last configuration known to be good. If the last-known-good configuration is being started, the startup operation fails. /// </summary> critical } /// <summary> /// Determines when the service should be started. The Windows Installer does not support boot or system. /// </summary> public enum SvcStartType { /// <summary> /// The service will start during startup of the system. /// </summary> auto, /// <summary> ///The service will start when the service control manager calls the StartService function. /// </summary> demand, /// <summary> /// The service can no longer be started. /// </summary> disabled, /// <summary> /// The service is a device driver that will be started by the operating system boot loader. This value is not currently supported by the Windows Installer. /// </summary> boot, /// <summary> /// The service is a device driver that will be started by the IoInitSystem function. This value is not currently supported by the Windows Installer. /// </summary> system } /// <summary> /// The Windows Installer does not currently support kernelDriver or systemDriver. This attribute's value must be one of the following: /// </summary> public enum SvcType { /// <summary> /// A Win32 service that runs its own process. /// </summary> ownProcess, /// <summary> /// A Win32 service that shares a process. /// </summary> shareProcess, /// <summary> /// A kernel driver service. This value is not currently supported by the Windows Installer. /// </summary> kernelDriver, /// <summary> /// A file system driver service. This value is not currently supported by the Windows Installer. /// </summary> systemDriver } /// <summary> /// Specifies whether an action occur on install, uninstall or both. /// </summary> public enum SvcEventType { /// <summary> /// Specifies that occur on install. /// </summary> install, /// <summary> /// Specifies that occur on uninstall. /// </summary> uninstall, /// <summary> /// Specifies that occur on install and uninstall. /// </summary> both } /// <summary> /// Indicates how value of the environment variable should be set. /// </summary> public enum EnvVarPart { /// <summary> /// This value is the entire environmental variable. This is the default. /// </summary> all, /// <summary> ///This value is prefixed. /// </summary> first, /// <summary> ///This value is appended. /// </summary> last } /// <summary> /// Specifies whether the environmental variable should be created, set or removed when the parent component is installed. /// </summary> public enum EnvVarAction { /// <summary> /// Creates the environment variable if it does not exist, then set it during installation. This has no effect on the value of /// the environment variable if it already exists. /// </summary> create, /// <summary> /// Creates the environment variable if it does not exist, and then set it during installation. If the environment variable exists, /// set it during the installation. /// </summary> set, /// <summary> /// Removes the environment variable during an installation. The installer only removes an environment variable /// during an installation if the name and value of the variable match the entries in the Name and Value attributes. /// If you want to remove an environment variable, regardless of its value, do not set the Value attribute. /// </summary> remove } /// <summary> /// Specifies the architecture for this assembly. /// </summary> public enum ProcessorArchitecture { /// <summary> /// The file is a .NET Framework assembly that is processor-neutral. /// </summary> msil, /// <summary> /// The file is a .NET Framework assembly for the x86 processor. /// </summary> x86, /// <summary> /// The file is a .NET Framework assembly for the x64 processor. /// </summary> x64, /// <summary> /// The file is a .NET Framework assembly for the ia64 processor. /// </summary> ia64 } /// <summary> /// Specifies the architecture for the driver to be installed. /// </summary> public enum DriverArchitecture { /// <summary> /// The driver is for the x86 processor. /// </summary> x86, /// <summary> /// The driver is for the x64 processor. /// </summary> x64, /// <summary> /// The driver is for the ia64 processor. /// </summary> ia64 } /// <summary> /// Specifies what Action should be executed on the RegistryKey when un-/installing /// </summary> public enum RegistryKeyAction { /// <summary> /// Creates the key, if absent, when the parent component is installed. /// </summary> create, /// <summary> /// Creates the key, if absent, when the parent component is installed then remove the key with all its values and subkeys when the parent component is uninstalled. /// </summary> createAndRemoveOnUninstall, /// <summary> /// Does nothing; this element is used merely in WiX authoring for organization and does nothing to the final output. /// </summary> none } /// <summary> /// Determines the initial display of this feature in the feature tree. /// </summary> public enum FeatureDisplay { /// <summary> /// Initially shows the feature collapsed. This is the default value. /// </summary> collapse, /// <summary> /// Initially shows the feature expanded. /// </summary> expand, /// <summary> /// Prevents the feature from displaying in the user interface. /// </summary> hidden } /// <summary> /// Bootstrapper variable (<see cref="WixSharp.Bootstrapper.Variable"/>) type. /// </summary> public enum VariableType { /// <summary> /// The string type /// </summary> @string, /// <summary> /// The numeric type /// </summary> numeric, /// <summary> /// The version type /// </summary> version } /// <summary> /// Specify whether the DOM object should use XPath language or the old XSLPattern language (default) as the query language. /// </summary> public enum XmlFileSelectionLanguage { /// <summary> /// XPath language /// </summary> XPath, /// <summary> /// XSLPattern language /// </summary> XSLPattern, } /// <summary> /// The type of modification to be made to the XML file when the component is installed. /// </summary> public enum XmlFileAction { /// <summary> /// Creates a new element under the element specified in ElementPath. /// The Name attribute is required in this case and specifies the name of the new element. /// The Value attribute is not necessary when createElement is specified as the action. /// If the Value attribute is set, it will cause the new element's text value to be set. /// </summary> createElement, /// <summary> /// Deletes a value from the element specified in the ElementPath. /// If Name is specified, the attribute with that name is deleted. /// If Name is not specified, the text value of the element specified in the ElementPath is deleted. /// The Value attribute is ignored if deleteValue is the action specified. /// </summary> deleteValue, /// <summary> /// Sets a value in the element specified in the ElementPath. /// If Name is specified, and attribute with that name is set to the value specified in Value. /// If Name is not specified, the text value of the element is set. /// Value is a required attribute if setValue is the action specified. /// </summary> setValue, /// <summary> /// Sets all the values in the elements that match the ElementPath. /// If Name is specified, attributes with that name are set to the same value specified in Value. /// If Name is not specified, the text values of the elements are set. /// Value is a required attribute if setBulkValue is the action specified. /// </summary> bulkSetValue, } /// <summary> /// Rights for this ACE. /// </summary> public enum UrlReservationRights { /// <summary> /// The 'register' rights value of the child UrlAce element /// </summary> register, /// <summary> /// The 'delete' rights value of the child UrlAce element /// </summary> @delegate, /// <summary> /// The 'all' rights value of the child UrlAce element /// </summary> all, } /// <summary> /// Specifies the behavior when trying to install a URL reservation and it already exists. /// </summary> public enum UrlReservationHandleExisting { /// <summary> /// Replaces the existing URL reservation (the default). /// </summary> replace, /// <summary> /// Keeps the existing URL reservation. /// </summary> ignore, /// <summary> /// The installation fails. /// </summary> fail, } /// <summary> /// Flags for indicating when the service should be configured. /// </summary> [Flags] public enum ConfigureServiceTrigger { #pragma warning disable 1591 /// <summary> /// Not a valid value for ServiceConfig.On(Install, Reinstall, Uninstall) /// </summary> None = 0, Install = 1, Reinstall = 2, Uninstall = 4 #pragma warning restore 1591 } /// <summary> /// Possible values for ServiceInstall.(First|Second|Third)FailureActionType /// </summary> public enum FailureActionType { #pragma warning disable 1591 none, reboot, restart, runCommand #pragma warning restore 1591 } /// <summary> /// CA assembly validation mode /// </summary> public enum CAValidation { /// <summary> /// The CA assembly is loaded in the temporary remote AppDomain for validation. /// Assembly file is unlocked and at the end of the validation the assembly is unloaded. /// </summary> InRemoteAppDomain, /// <summary> /// The CA assembly is loaded in the current AppDomain for validation. /// Assembly file is unlocked but the assembly will not be unloaded at the end of the validation. /// This mode may lead to unpredictable behaviour. /// </summary> InCurrentAppDomain, /// <summary> /// CA assembly validation is disabled. /// </summary> Disabled } /// <summary> /// Sign Tool output level /// </summary> public enum SignOutputLevel { /// <summary> /// Displays verbose output regardless of whether the command runs successfully or fails, /// and displays warning messages. /// </summary> Verbose, /// <summary> /// Displays no output if the command runs successfully, /// and displays minimal output if the command fails. /// </summary> Minimal, /// <summary> /// Displays standard output /// </summary> Standard, /// <summary> /// Displays debugging information. /// </summary> Debug } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.IO; using System.Threading; using Xunit; public partial class RenamedTests { [Fact] public static void FileSystemWatcher_Renamed_Directory() { using (var dir = Utility.CreateTestDirectory()) using (var watcher = new FileSystemWatcher(".")) { watcher.Filter = Path.GetFileName(dir.Path); AutoResetEvent eventOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Renamed); string newName = dir.Path + "_rename"; Utility.EnsureDelete(newName); watcher.EnableRaisingEvents = true; dir.Move(newName); Utility.ExpectEvent(eventOccured, "renamed"); } } [Fact] public static void FileSystemWatcher_Renamed_Negative() { using (var dir = Utility.CreateTestDirectory()) using (var watcher = new FileSystemWatcher()) { // put everything in our own directory to avoid collisions watcher.Path = Path.GetFullPath(dir.Path); watcher.Filter = "*.*"; AutoResetEvent eventOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Renamed); watcher.EnableRaisingEvents = true; // run all scenarios together to avoid unnecessary waits, // assert information is verbose enough to trace to failure cause // create a file using (var testFile = new TemporaryTestFile(Path.Combine(dir.Path, "file"))) using (var testDir = new TemporaryTestDirectory(Path.Combine(dir.Path, "dir"))) { // change a file testFile.WriteByte(0xFF); testFile.Flush(); // deleting a file & directory by leaving the using block } Utility.ExpectNoEvent(eventOccured, "created"); } } [Fact] public static void FileSystemWatcher_Renamed_NestedDirectory() { Utility.TestNestedDirectoriesHelper(WatcherChangeTypes.Renamed, (AutoResetEvent are, TemporaryTestDirectory ttd) => { ttd.Move(ttd.Path + "_2"); Utility.ExpectEvent(are, "renamed"); }); } [Fact] public static void FileSystemWatcher_Renamed_FileInNestedDirectory() { Utility.TestNestedDirectoriesHelper(WatcherChangeTypes.Renamed | WatcherChangeTypes.Created, (AutoResetEvent are, TemporaryTestDirectory ttd) => { using (var nestedFile = new TemporaryTestFile(Path.Combine(ttd.Path, "nestedFile"))) { Utility.ExpectEvent(are, "file created", Utility.WaitForCreationTimeoutInMs); nestedFile.Move(nestedFile.Path + "_2"); Utility.ExpectEvent(are, "renamed"); } }); } [Fact] // Note: Can't use the TestNestedDirectoriesHelper since we need access to the root public static void FileSystemWatcher_Moved_NestedDirectoryRoot() { // Create a test root with our watch dir and a temp directory since, on the default Ubuntu install, the system // temp directory is on a different mount point and Directory.Move does not work across mount points. using (var root = Utility.CreateTestDirectory()) using (var dir = Utility.CreateTestDirectory(Path.Combine(root.Path, "test_root"))) using (var temp = Utility.CreateTestDirectory(Path.Combine(root.Path, "temp"))) using (var watcher = new FileSystemWatcher()) { AutoResetEvent createdOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Created); // not "using" to avoid race conditions with FSW callbacks AutoResetEvent deletedOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Deleted); watcher.Path = Path.GetFullPath(dir.Path); watcher.Filter = "*"; watcher.IncludeSubdirectories = true; watcher.EnableRaisingEvents = true; using (var dir1 = new TemporaryTestDirectory(Path.Combine(dir.Path, "dir1"))) { Utility.ExpectEvent(createdOccured, "dir1 created", Utility.WaitForCreationTimeoutInMs); using (var dir2 = new TemporaryTestDirectory(Path.Combine(dir1.Path, "dir2"))) { Utility.ExpectEvent(createdOccured, "dir2 created", Utility.WaitForCreationTimeoutInMs); using (var file = Utility.CreateTestFile(Path.Combine(dir2.Path, "test file"))) { }; // Move the directory out of the watched folder and expect that we get a deleted event string original = dir1.Path; string target = Path.Combine(temp.Path, Path.GetFileName(dir1.Path)); dir1.Move(target); Utility.ExpectEvent(deletedOccured, "dir1 moved out"); // Move the directory back and expect a created event dir1.Move(original); Utility.ExpectEvent(createdOccured, "dir1 moved back"); } } } } [Fact] // Note: Can't use the TestNestedDirectoriesHelper since we need access to the root public static void FileSystemWatcher_Moved_NestedDirectoryRootWithoutSubdirectoriesFlag() { // Create a test root with our watch dir and a temp directory since, on the default Ubuntu install, the system // temp directory is on a different mount point and Directory.Move does not work across mount points. using (var root = Utility.CreateTestDirectory()) using (var dir = Utility.CreateTestDirectory(Path.Combine(root.Path, "test_root"))) using (var temp = Utility.CreateTestDirectory(Path.Combine(root.Path, "temp"))) using (var watcher = new FileSystemWatcher()) { AutoResetEvent createdOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Created); // not "using" to avoid race conditions with FSW callbacks AutoResetEvent deletedOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Deleted); watcher.Path = Path.GetFullPath(dir.Path); watcher.Filter = "*"; watcher.IncludeSubdirectories = false; watcher.EnableRaisingEvents = true; using (var dir1 = new TemporaryTestDirectory(Path.Combine(dir.Path, "dir1"))) { Utility.ExpectEvent(createdOccured, "dir1 created", Utility.WaitForCreationTimeoutInMs); using (var dir2 = new TemporaryTestDirectory(Path.Combine(dir1.Path, "dir2"))) { Utility.ExpectNoEvent(createdOccured, "dir2 created"); using (var file = Utility.CreateTestFile(Path.Combine(dir2.Path, "test file"))) { }; // Move the directory out of the watched folder and expect that we get a deleted event string original = dir1.Path; string target = Path.Combine(temp.Path, Path.GetFileName(dir1.Path)); dir1.Move(target); Utility.ExpectEvent(deletedOccured, "dir1 moved out"); // Move the directory back and expect a created event dir1.Move(original); Utility.ExpectEvent(createdOccured, "dir1 moved back"); } } } } [Fact] // Note: Can't use the TestNestedDirectoriesHelper since we need access to the root public static void FileSystemWatcher_Moved_NestedDirectoryTreeMoveFileAndFolder() { using (var root = Utility.CreateTestDirectory()) using (var dir = Utility.CreateTestDirectory(Path.Combine(root.Path, "test_root"))) using (var temp = Utility.CreateTestDirectory(Path.Combine(root.Path, "temp"))) using (var dir1 = new TemporaryTestDirectory(Path.Combine(dir.Path, "dir1"))) using (var watcher = new FileSystemWatcher()) { AutoResetEvent eventOccured = Utility.WatchForEvents(watcher, WatcherChangeTypes.Created | WatcherChangeTypes.Deleted | WatcherChangeTypes.Changed); watcher.Path = Path.GetFullPath(dir.Path); watcher.Filter = "*"; watcher.IncludeSubdirectories = true; watcher.EnableRaisingEvents = true; string filePath = Path.Combine(dir1.Path, "test_file"); using (var file = File.Create(filePath)) { // Wait for the file to be created then make a change to validate that we get a change Utility.ExpectEvent(eventOccured, "test file created"); byte[] buffer = new byte[4096]; file.Write(buffer, 0, buffer.Length); file.Flush(); } Utility.ExpectEvent(eventOccured, "test file changed"); // Move the nested dir out of scope and validate that we get a single deleted event string original = dir1.Path; string target = Path.Combine(temp.Path, "dir1"); dir1.Move(target); Utility.ExpectEvent(eventOccured, "nested dir deleted"); // Move the dir (and child file) back into scope and validate that we get a created event dir1.Move(original); Utility.ExpectEvent(eventOccured, "nested dir created"); using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Write)) { byte[] buffer = new byte[4096]; fs.Write(buffer, 0, buffer.Length); fs.Flush(); } Utility.ExpectEvent(eventOccured, "test file changed"); } } }
/* * MindTouch Core - open source enterprise collaborative networking * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com [email protected] * * For community documentation and downloads visit www.opengarden.org; * please review the licensing section. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * http://www.gnu.org/copyleft/gpl.html */ using System; using System.Collections.Generic; using System.IO; using System.Text.RegularExpressions; using MindTouch.Deki.Data; using MindTouch.Deki.Logic; using MindTouch.Dream; using MindTouch.Dream.Http; using MindTouch.Tasking; using MindTouch.Xml; namespace MindTouch.Deki { using Yield = IEnumerator<IYield>; public partial class DekiWikiService { //--- Constants --- private static readonly Regex MSIE_USER_AGENT_REGEX = new Regex("MSIE[^;8]+(;|\\))"); private static readonly Regex MSIE6_USER_AGENT_REGEX = new Regex("MSIE 6.0(|b)(;|\\))"); //--- Features --- [DreamFeature("GET:files", "Retrieve information for all attached files")] [DreamFeatureParam("skip", "int?", "Number of files to skip. Default: 0")] [DreamFeatureParam("numfiles", "int?", "Number of files to retrieve. 'ALL' for no limit. Default: 100")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Read access to the page sis required")] public new Yield GetFiles(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PermissionsBL.CheckUserAllowed(DekiContext.Current.User, Permissions.READ); uint skip = context.GetParam<uint>("skip", 0); uint numfiles = 100; string numfilesStr = context.GetParam("numfiles", numfiles.ToString()); if (StringUtil.EqualsInvariantIgnoreCase(numfilesStr, "ALL")) { numfiles = uint.MaxValue; } else { if (!uint.TryParse(numfilesStr, out numfiles)) throw new DreamBadRequestException(DekiResources.CANNOT_PARSE_NUMFILES); } IList<AttachmentBE> files = AttachmentBL.Instance.RetrieveAttachments(skip, numfiles); XDoc ret = AttachmentBL.Instance.GetFileXml(files, false, null, null, null); response.Return(DreamMessage.Ok(ret)); yield break; } [DreamFeature("GET:pages/{pageid}/files/{filename}/info", "Retrieve file attachment information")] [DreamFeature("GET:files/{fileid}/info", "Retrieve file attachment information")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("revision", "string?", "File revision to retrieve. 'head' by default will retrieve latest revision. positive integer will retrieve specific revision")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Read access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield GetFileInfo(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PageBE parentPage = null; AttachmentBE fileRevision = GetAttachment(request, Permissions.READ, true, false, out parentPage); // found matching attachments. Put into the response XDoc bool? revisionInfo = null; if (!StringUtil.EqualsInvariantIgnoreCase(DreamContext.Current.GetParam("revision", "HEAD"), "HEAD")) { revisionInfo = true; } response.Return(DreamMessage.Ok(AttachmentBL.Instance.GetFileXml(fileRevision, true, null, revisionInfo))); yield break; } [DreamFeature("OPTIONS:pages/{pageid}/files/{filename}", "Retrieve available HTTP options")] [DreamFeature("OPTIONS:files/{fileid}", "Retrieve available HTTP options")] [DreamFeature("OPTIONS:files/{fileid}/{filename}", "Retrieve available HTTP options")] public Yield GetOptions(DreamContext context, DreamMessage request, Result<DreamMessage> response) { DreamMessage responseMsg = DreamMessage.Ok(); responseMsg.Headers.Add("DAV", "1"); // The WebDAV DAV header specifies whether the resource supports the WebDAV schema and protocol. responseMsg.Headers.Add("MS-Author-Via", "DAV"); //suggests to certain authoring applications the protocol mechanism to author with responseMsg.Headers.Add("Allow", "DELETE, GET, HEAD, LOCK, PUT"); response.Return(responseMsg); yield break; } [DreamFeature("LOCK:pages/{pageid}/files/{filename}", "Lock file (always returns 412 - Precondition Failed)")] [DreamFeature("LOCK:files/{fileid}", "Lock file (always returns 412 - Precondition Failed)")] [DreamFeature("LOCK:files/{fileid}/{filename}", "Lock file (always returns 412 - Precondition Failed)")] public Yield LockFile(DreamContext context, DreamMessage request, Result<DreamMessage> response) { response.Return(new DreamMessage(DreamStatus.PreconditionFailed, null)); yield break; } [DreamFeature("GET:pages/{pageid}/files/{filename}", "Retrieve file attachment content")] [DreamFeature("GET:files/{fileid}", "Retrieve file attachment content")] [DreamFeature("GET:files/{fileid}/{filename}", "Retrieve file attachment content")] [DreamFeature("HEAD:pages/{pageid}/files/{filename}", "Retrieve file attachment content (Note: image manipulation arguments are ignored for HEAD requests)")] [DreamFeature("HEAD:files/{fileid}", "Retrieve file attachment content (Note: image manipulation arguments are ignored for HEAD requests)")] [DreamFeature("HEAD:files/{fileid}/{filename}", "Retrieve file attachment content (Note: image manipulation arguments are ignored for HEAD requests)")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("height", "int?", "Height of the image")] [DreamFeatureParam("width", "int?", "Width of the image")] [DreamFeatureParam("ratio", "{fixed, var}?", "Fixed preserves aspect ratio by applying height and width as bounding maximums rather than absolute values. Variable will use the width and height given. Default: fixed")] [DreamFeatureParam("format", "{jpg, png, bmp, gif}?", "Convert output to given type. Default is to use original type.")] [DreamFeatureParam("size", "{original, thumb, webview, bestfit, custom}?", "Return a resized image from one of the preset cached sizes. Use 'thumb' or 'webview' to return a smaller scaled image. Use 'bestfit' along with height/width to return one of the known sizes being at least the size given. Default: original")] [DreamFeatureParam("revision", "string?", "File revision to retrieve. 'head' by default will retrieve latest revision. positive integer will retrieve specific revision")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Read access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] [DreamFeatureStatus(DreamStatus.NotImplemented, "Requested operation is not currently supported")] public Yield GetFile(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PageBE parentPage = null; DreamMessage responseMsg = null; AttachmentBE fileRevision = GetAttachment(request, Permissions.READ, true, false, out parentPage); if(fileRevision.IsHidden) { PermissionsBL.CheckUserAllowed(DekiContext.Current.User, Permissions.ADMIN); } // check if only file information is requested if(context.Verb == Verb.HEAD) { response.Return(new DreamMessage(DreamStatus.Ok, null, fileRevision.MimeType, (long)fileRevision.Size, Stream.Null)); yield break; } try { if(request.CheckCacheRevalidation(fileRevision.Timestamp)) { responseMsg = DreamMessage.NotModified(); } if (responseMsg == null) { #region Preview related parameter parsing string sFormat = context.GetParam("format", string.Empty); string sRatio = context.GetParam("ratio", string.Empty); uint height = context.GetParam<uint>("height", 0); uint width = context.GetParam<uint>("width", 0); string cachedSize = context.GetParam("size", string.Empty); // check 'ratio' parameter RatioType ratio = RatioType.UNDEFINED; if (!string.IsNullOrEmpty(sRatio)) { switch (sRatio.ToLowerInvariant().Trim()) { case "var": case "variable": ratio = RatioType.VARIABLE; break; case "fixed": ratio = RatioType.FIXED; break; default: throw new DreamBadRequestException(DekiResources.INVALID_FILE_RATIO); } } // check 'size' parameter SizeType size = SizeType.UNDEFINED; if (!string.IsNullOrEmpty(cachedSize) && !SysUtil.TryParseEnum(cachedSize.Trim(), out size)) { throw new DreamAbortException(DreamMessage.BadRequest(DekiResources.INVALID_FILE_SIZE)); } // check 'format' parameter FormatType format = FormatType.UNDEFINED; if(!string.IsNullOrEmpty(sFormat) && !SysUtil.TryParseEnum(sFormat.Trim(), out format)) { throw new DreamBadRequestException(DekiResources.INVALID_FILE_FORMAT); } #endregion //if any preview related parameters are set, do preview logic. Otherwise return the file StreamInfo file = null; if((size != SizeType.UNDEFINED && size != SizeType.ORIGINAL) || ratio != RatioType.UNDEFINED || format != FormatType.UNDEFINED || height != 0 || width != 0 ) { file = AttachmentPreviewBL.RetrievePreview(fileRevision, height, width, ratio, size, format); } else { file = DekiContext.Current.Instance.Storage.GetFile(fileRevision, SizeType.ORIGINAL, true); } // prepare response if(file == null) { throw new DreamInternalErrorException(string.Format(DekiResources.COULD_NOT_RETRIEVE_FILE, fileRevision.ResourceId, fileRevision.Revision)); } if(file.Uri != null) { responseMsg = DreamMessage.Redirect(file.Uri); } else { bool inline = fileRevision.ImageHeight.HasValue; // see if we can use the MimeType map for allowing inlining if(!inline) { // if IE inline security is not disabled bool isIE = false; if(!DekiContext.Current.Instance.EnableUnsafeIEContentInlining) { // check the user agent to see if we're dealing with IE isIE = MSIE_USER_AGENT_REGEX.IsMatch(request.Headers.UserAgent ?? string.Empty); } // see if the mime-type could allow inlining inline = DekiContext.Current.Instance.MimeTypeCanBeInlined(fileRevision.MimeType); if(inline && isIE) { // check whether the creator of the file had unsafecontent permission, to override IE security IList<AttachmentBE> revisions = AttachmentBL.Instance.GetResourceRevisions(fileRevision.ResourceId, ResourceBE.ChangeOperations.CONTENT, SortDirection.DESC, 1); UserBE lastContentEditor = UserBL.GetUserById(revisions[0].UserId); inline = PermissionsBL.IsUserAllowed(lastContentEditor, parentPage, Permissions.UNSAFECONTENT); } } responseMsg = DreamMessage.Ok(fileRevision.MimeType, file.Length, file.Stream); responseMsg.Headers["X-Content-Type-Options"] = "nosniff"; responseMsg.Headers.ContentDisposition = new ContentDisposition(inline, fileRevision.Timestamp, null, null, fileRevision.Name, file.Length, request.Headers.UserAgent); // MSIE6 will delete a downloaded file before the helper app trying to use it can get to it so we //have to do custom cache control headers for MSIE6 so that the file can actually be opened if(MSIE6_USER_AGENT_REGEX.IsMatch(request.Headers.UserAgent ?? string.Empty)) { responseMsg.Headers["Expires"] = "0"; responseMsg.Headers.Pragma = "cache"; responseMsg.Headers.CacheControl = "private"; } else { responseMsg.SetCacheMustRevalidate(fileRevision.Timestamp); } } } } catch { if(responseMsg != null) { responseMsg.Close(); } throw; } response.Return(responseMsg); yield break; } [DreamFeature("GET:pages/{pageid}/files/{filename}/revisions", "Retrieve file revision info")] [DreamFeature("GET:files/{fileid}/revisions", "Retrieve file revision info")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("changefilter", "string?", "Only show revisions having a user-action listed in this comma delimited list. Valid actions are: CONTENT, NAME, LANGUAGE, META, DELETEFLAG, PARENT (default: all actions)")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Read access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield GetFileRevisions(DreamContext context, DreamMessage request, Result<DreamMessage> response) { CheckResponseCache(context, false); //Default change filter is CONTENT changes to preserve backwards compat string changeFilterStr = context.GetParam("changefilter", AttachmentBL.DEFAULT_REVISION_FILTER.ToString()); ResourceBE.ChangeOperations changeFilter = ResourceBE.ChangeOperations.UNDEFINED; if(!string.IsNullOrEmpty(changeFilterStr)) { if(StringUtil.EqualsInvariantIgnoreCase("all", changeFilterStr)) { changeFilter = ResourceBE.ChangeOperations.UNDEFINED; } else if(!SysUtil.TryParseEnum(changeFilterStr, out changeFilter)) { throw new DreamBadRequestException("changefilter value is invalid. Possible values are ALL, " + string.Join(",", Enum.GetNames(typeof(ResourceBE.ChangeOperations)))); } } PageBE parentPage = null; AttachmentBE fileRevision = GetAttachment(request, Permissions.READ, false, false, out parentPage); XUri listUri = AttachmentBL.Instance.GetUri(fileRevision).At("revisions").With("changefilter", changeFilterStr.ToLowerInvariant()); XDoc ret = AttachmentBL.Instance.GetFileRevisionsXml(fileRevision.ResourceId, changeFilter, listUri, fileRevision.Revision); response.Return(DreamMessage.Ok(ret)); yield break; } [DreamFeature("POST:pages/{pageid}/files/{filename}/revisions", "Performs operations such as hide/unhide for revisions of files")] [DreamFeature("POST:files/{fileid}/revisions", "Performs operations such as hide/unhide for revisions of files")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("comment", "string?", "Reason for hiding revisions")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "DELETE access is required to hide a revision and ADMIN access to unhide")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield PostFileRevisions(DreamContext context, DreamMessage request, Result<DreamMessage> response) { AttachmentBE file = GetAttachment(request, Permissions.DELETE); AttachmentBE[] modifiedRevs = AttachmentBL.Instance.ModifyRevisionVisibility(file, request.ToDocument(), context.GetParam("comment", string.Empty)); XDoc ret = AttachmentBL.Instance.GetAttachmentRevisionListXml(modifiedRevs); response.Return(DreamMessage.Ok(ret)); yield break; } [DreamFeature("PUT:pages/{pageid}/files/{filename}", "Replace an existing attachment with a new version or create a new attachment")] [DreamFeature("PUT:files/{fileid}", "Replace an existing attachment with a new version")] [DreamFeature("PUT:files/{fileid}/{filename}", "Replace an existing attachment with a new version")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("description", "string?", "file attachment description")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects.")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Update access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested page could not be found")] public Yield PutFile(DreamContext context, DreamMessage request, Result<DreamMessage> response) { // Retrieve the file PageBE page; string userFileName; AttachmentBE file = GetAttachmentFromUrl(false, out page, false, false); // If the file does not exist, attempt to retrieve the page if (null == file) { if (null == page) { if (null != DreamContext.Current.GetParam<string>("fileid")) { throw new DreamAbortException(DreamMessage.NotFound(DekiResources.COULD_NOT_FIND_FILE)); } page = PageBL.GetPageFromUrl(); } userFileName = GetFilenameFromPathSegment(DreamContext.Current.GetParam<string>("filename")); } else { string fileNameParam = DreamContext.Current.GetParam("filename", null); if (fileNameParam == null) { userFileName = file.Name; } else { userFileName = GetFilenameFromPathSegment(fileNameParam); } } // Retrieve the file description string userDescription = context.GetParam("description", string.Empty); if(userDescription.Length > AttachmentBL.MAX_DESCRIPTION_LENGTH) { userDescription = userDescription.Substring(0, AttachmentBL.MAX_DESCRIPTION_LENGTH); } // Validate the page PageBL.AuthorizePage(DekiContext.Current.User, Permissions.UPDATE, page, false); // Get entire stream so it can be reused AttachmentBE savedFileRevision = AttachmentBL.Instance.AddAttachment(file, request.AsStream(), request.ContentLength, request.ContentType, page, userDescription, userFileName); // report an error on failure, and don't redirect if (savedFileRevision == null) { response.Return(DreamMessage.InternalError(DekiResources.FAILED_TO_SAVE_UPLOAD)); yield break; } response.Return(DreamMessage.Ok(AttachmentBL.Instance.GetFileXml(savedFileRevision, true, null, null))); yield break; } [DreamFeature("POST:files/{fileid}/move", "Move an attachment from one page to another and/or change the filename")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("to", "string?", "page id of target page")] [DreamFeatureParam("name", "string?", "new filename")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Update access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield PostFileMove(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PageBE sourcePage = null; PageBE destPage = null; AttachmentBE fileToMove = GetAttachmentFromUrl(true, out sourcePage, false, false); // parameter parsing string name = context.GetParam("name", null); string to = context.GetParam("to", null); if(string.IsNullOrEmpty(name) && string.IsNullOrEmpty(to)) { throw new DreamBadRequestException(DekiResources.ATTACHMENT_MOVE_INVALID_PARAM); } if(name == null) { name = fileToMove.Name; } destPage = to != null ? PageBL.GetPageFromPathSegment(true, to) : sourcePage; //Check if we're actually doing anything if(sourcePage.ID == destPage.ID && StringUtil.EqualsInvariant(fileToMove.Name, name)) { throw new DreamAbortException(DreamMessage.BadRequest(string.Format(DekiResources.ATTACHMENT_EXISTS_ON_PAGE, fileToMove.Name, destPage.Title.AsUserFriendlyName()))); } //Ensure write access to source and destination pages. IList<PageBE> pList = PermissionsBL.FilterDisallowed(DekiContext.Current.User, new PageBE[] { sourcePage, destPage }, true, Permissions.UPDATE); // perform the move AttachmentBE ret = AttachmentBL.Instance.MoveAttachment(fileToMove, sourcePage, destPage, name, true); response.Return(DreamMessage.Ok(AttachmentBL.Instance.GetFileXml(ret, true, null, false))); yield break; } [DreamFeature("DELETE:pages/{pageid}/files/{filename}", "Delete file attachment")] [DreamFeature("DELETE:files/{fileid}", "Delete file attachment")] [DreamFeature("DELETE:files/{fileid}/{filename}", "Delete file attachment")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Update access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "The requested file could not be found")] public Yield DeleteFile(DreamContext context, DreamMessage request, Result<DreamMessage> response) { AttachmentBE fileRevision = GetAttachment(request, Permissions.UPDATE); // check if anything needs to be done if (!fileRevision.ResourceIsDeleted) { AttachmentBL.Instance.RemoveAttachments(new AttachmentBE[] { fileRevision }); response.Return(DreamMessage.Ok()); }else{ response.Return(DreamMessage.NotFound(DekiResources.FILE_ALREADY_REMOVED)); } yield break; } [DreamFeature("GET:pages/{pageid}/files/{filename}/description", "retrieves a file description")] [DreamFeature("GET:files/{fileid}/description", "retrieves a file description")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("revision", "string?", "File revision to retrieve. 'head' by default will retrieve latest revision. positive integer will retrieve specific revision")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "The request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Read access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield GetFileDescription(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PageBE parentPage = null; AttachmentBE file = GetAttachment(request, Permissions.READ, true, false, out parentPage); PropertyBE descriptionProperty = PropertyBL.Instance.GetResource(file.ResourceId, file.ResourceType, AttachmentBL.PROP_DESC); if(descriptionProperty == null) { // reply with no data response.Return(DreamMessage.Ok()); } else { // reply with description response.Return(DreamMessage.Ok(descriptionProperty.Content.MimeType, descriptionProperty.Content.ToText())); } yield break; } [DreamFeature("PUT:pages/{pageid}/files/{filename}/description", "Update attachment description")] [DreamFeature("PUT:files/{fileid}/description", "Update attachment description")] [DreamFeature("DELETE:files/{fileid}/description", "Reset the file description")] [DreamFeature("DELETE:pages/{pageid}/files/{filename}/description", "Reset the file description")] [DreamFeatureParam("{pageid}", "string", "either an integer page ID, \"home\", or \"=\" followed by a double uri-encoded page title")] [DreamFeatureParam("{filename}", "string", "\"=\" followed by a double uri-encoded file name")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("redirects", "int?", "If zero, do not follow page redirects (only applies when {pageid} is present).")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "Request completed successfully")] [DreamFeatureStatus(DreamStatus.BadRequest, "Invalid input parameter or request body")] [DreamFeatureStatus(DreamStatus.Forbidden, "Update access to the page is required")] [DreamFeatureStatus(DreamStatus.NotFound, "Requested file could not be found")] public Yield PutFileDescription(DreamContext context, DreamMessage request, Result<DreamMessage> response) { PageBE parentPage; AttachmentBE file = GetAttachment(request, Permissions.UPDATE, false, false, out parentPage); // determine if description needs to be set or cleared string description = StringUtil.EqualsInvariant(context.Verb, "PUT") ? request.AsText() : string.Empty; file = AttachmentBL.Instance.SetDescription(file, description); response.Return(DreamMessage.Ok(AttachmentBL.Instance.GetFileXml(file, true, null, null))); yield break; } [DreamFeature("POST:files/{fileid}/index", "re-index an attachment")] [DreamFeatureParam("{fileid}", "int", "identifies a file by ID")] [DreamFeatureParam("authenticate", "bool?", "Force authentication for request (default: false)")] [DreamFeatureStatus(DreamStatus.Ok, "Request completed successfully")] [DreamFeatureStatus(DreamStatus.Forbidden, "MindTouch API key or Administrator access is required.")] internal Yield IndexFile(DreamContext context, DreamMessage request, Result<DreamMessage> response) { AttachmentBE file = GetAttachment(request, Permissions.NONE); DekiContext.Current.Instance.EventSink.AttachmentPoke(context.StartTime, file); response.Return(DreamMessage.Ok()); yield break; } //--- Methods --- private AttachmentBE GetAttachment(DreamMessage request, Permissions access) { PageBE p; return GetAttachment(request, access, false, false, out p); } private AttachmentBE GetAttachment(DreamMessage request, Permissions access, bool allowRevs, bool allowDeleted, out PageBE parentPage) { AttachmentBE file = GetAttachmentFromUrl(true, out parentPage, allowRevs, allowDeleted); PageBL.AuthorizePage(DekiContext.Current.User, access, parentPage, false); DreamContext.Current.SetState<UserBE>(DekiContext.Current.User); DreamContext.Current.SetState<PageBE>(parentPage); //Identify images for upgrades List<AttachmentBE> fileList = new List<AttachmentBE>(); fileList.Add(file); AttachmentBL.Instance.IdentifyUnknownImages(fileList); return file; } private AttachmentBE GetAttachmentFromUrl(bool mustExist, out PageBE page, bool allowRevs, bool allowDeleted) { AttachmentBE file = null; int revision = AttachmentBE.HEADREVISION; page = null; string revStr = DreamContext.Current.GetParam("revision", "head").Trim(); if(allowRevs) { if(!StringUtil.EqualsInvariantIgnoreCase(revStr, "head") && !StringUtil.EqualsInvariantIgnoreCase(revStr, "0")) if(!int.TryParse(revStr, out revision)) { throw new DreamAbortException(DreamMessage.BadRequest(DekiResources.REVISION_HEAD_OR_INT)); } } else if(!StringUtil.EqualsInvariantIgnoreCase(revStr, "head") && !StringUtil.EqualsInvariantIgnoreCase(revStr, "0")) { throw new DreamAbortException(DreamMessage.BadRequest(DekiResources.REVISION_NOT_SUPPORTED)); } uint fileId = DreamContext.Current.GetParam<uint>("fileid", 0); if (fileId != 0) { uint resourceId = ResourceMapBL.GetResourceIdByFileId(fileId) ?? 0; if(resourceId > 0) { // use resourceid to retrieve attachment file = AttachmentBL.Instance.GetResource(resourceId, revision); } if(file != null) { page = PageBL.GetPageById(file.ParentPageId); } } else { // use filename to retrieve attachment string fileName = GetFilenameFromPathSegment(DreamContext.Current.GetParam<string>("filename")); page = PageBL.GetPageFromUrl(); DeletionFilter deleteFilter = allowDeleted ? DeletionFilter.ANY : DeletionFilter.ACTIVEONLY; file = AttachmentBL.Instance.GetResource(page, fileName, deleteFilter, revision); } if(file == null) { if(mustExist) { throw new DreamAbortException(DreamMessage.NotFound(DekiResources.COULD_NOT_FIND_FILE)); } } else { if(!allowDeleted ) { if(file.ResourceIsDeleted) { throw new DreamAbortException(DreamMessage.NotFound(DekiResources.FILE_HAS_BEEN_REMOVED)); } if(page == null) { //Throw a 404 status if file is not marked as deleted but the parent page cannot be found. //This may be caused by an unfinished page delete operation that didn't mark the file as deleted. throw new DreamAbortException(DreamMessage.NotFound(DekiResources.FILE_HAS_BEEN_REMOVED)); } } //Parent resource of a file is the page file.ParentResource = new PageWrapperBE(page); } return file; } public static string GetFilenameFromPathSegment(string filePathSegment) { string filename = null; if (filePathSegment.StartsWith("=")) { filename = XUri.Decode(filePathSegment.Substring(1)); } if (string.IsNullOrEmpty(filename)) { throw new DreamAbortException(DreamMessage.BadRequest(DekiResources.MISSING_FILENAME)); } return filename; } } }
/*============================================================================ * Copyright (C) Microsoft Corporation, All rights reserved. *============================================================================ */ #region Using directives using System; using System.Globalization; using System.Management.Automation; using System.Threading; #endregion namespace Microsoft.Management.Infrastructure.CimCmdlets { /// <summary> /// <para> /// Subscription result event args /// </para> /// </summary> internal abstract class CimSubscriptionEventArgs : EventArgs { /// <summary> /// <para> /// Returns an Object value for an operation context /// </para> /// </summary> public Object Context { get { return context; } } protected Object context; } /// <summary> /// <para> /// Subscription result event args /// </para> /// </summary> internal class CimSubscriptionResultEventArgs : CimSubscriptionEventArgs { /// <summary> /// <para> /// subscription result /// </para> /// </summary> public CimSubscriptionResult Result { get { return result; } } private CimSubscriptionResult result; /// <summary> /// <para>Constructor</para> /// </summary> /// <param name="theResult"></param> public CimSubscriptionResultEventArgs( CimSubscriptionResult theResult) { this.context = null; this.result = theResult; } } /// <summary> /// <para> /// Subscription result event args /// </para> /// </summary> internal class CimSubscriptionExceptionEventArgs : CimSubscriptionEventArgs { /// <summary> /// <para> /// subscription result /// </para> /// </summary> public Exception Exception { get { return exception; } } private Exception exception; /// <summary> /// <para>Constructor</para> /// </summary> /// <param name="theResult"></param> public CimSubscriptionExceptionEventArgs( Exception theException) { this.context = null; this.exception = theException; } } /// <summary> /// <para> /// Implements operations of register-cimindication cmdlet. /// </para> /// </summary> internal sealed class CimRegisterCimIndication : CimAsyncOperation { /// <summary> /// <para> /// New subscription result event /// </para> /// </summary> public event EventHandler<CimSubscriptionEventArgs> OnNewSubscriptionResult; /// <summary> /// <para> /// Constructor /// </para> /// </summary> public CimRegisterCimIndication() : base() { this.ackedEvent = new ManualResetEventSlim(false); } /// <summary> /// Start an indication subscription target to the given computer. /// </summary> /// <param name="computerName">null stands for localhost</param> /// <param name="nameSpace"></param> /// <param name="queryDialect"></param> /// <param name="queryExpression"></param> /// <param name="opreationTimeout"></param> public void RegisterCimIndication( string computerName, string nameSpace, string queryDialect, string queryExpression, UInt32 opreationTimeout) { DebugHelper.WriteLogEx("queryDialect = '{0}'; queryExpression = '{1}'", 0, queryDialect, queryExpression); this.TargetComputerName = computerName; CimSessionProxy proxy = CreateSessionProxy(computerName, opreationTimeout); proxy.SubscribeAsync(nameSpace, queryDialect, queryExpression); WaitForAckMessage(); } /// <summary> /// Start an indication subscription through a given <see cref="CimSession"/>. /// </summary> /// <param name="cimSession">Cannot be null</param> /// <param name="nameSpace"></param> /// <param name="queryDialect"></param> /// <param name="queryExpression"></param> /// <param name="opreationTimeout"></param> /// <exception cref="ArgumentNullException">throw if cimSession is null</exception> public void RegisterCimIndication( CimSession cimSession, string nameSpace, string queryDialect, string queryExpression, UInt32 opreationTimeout) { DebugHelper.WriteLogEx("queryDialect = '{0}'; queryExpression = '{1}'", 0, queryDialect, queryExpression); if (cimSession == null) { throw new ArgumentNullException(String.Format(CultureInfo.CurrentUICulture, Strings.NullArgument, @"cimSession")); } this.TargetComputerName = cimSession.ComputerName; CimSessionProxy proxy = CreateSessionProxy(cimSession, opreationTimeout); proxy.SubscribeAsync(nameSpace, queryDialect, queryExpression); WaitForAckMessage(); } #region override methods /// <summary> /// <para> /// Subscribe to the events issued by <see cref="CimSessionProxy"/>. /// </para> /// </summary> /// <param name="proxy"></param> protected override void SubscribeToCimSessionProxyEvent(CimSessionProxy proxy) { DebugHelper.WriteLog("SubscribeToCimSessionProxyEvent", 4); // Raise event instead of write object to ps proxy.OnNewCmdletAction += this.CimIndicationHandler; proxy.OnOperationCreated += this.OperationCreatedHandler; proxy.OnOperationDeleted += this.OperationDeletedHandler; proxy.EnableMethodResultStreaming = false; } /// <summary> /// <para> /// Handler used to handle new action event from /// <seealso cref="CimSessionProxy"/> object. /// </para> /// </summary> /// <param name="cimSession"> /// <seealso cref="CimSession"/> object raised the event /// </param> /// <param name="actionArgs">event argument</param> private void CimIndicationHandler(object cimSession, CmdletActionEventArgs actionArgs) { DebugHelper.WriteLogEx("action is {0}. Disposed {1}", 0, actionArgs.Action, this.Disposed); if (this.Disposed) { return; } // NOTES: should move after this.Disposed, but need to log the exception CimWriteError cimWriteError = actionArgs.Action as CimWriteError; if (cimWriteError != null) { this.exception = cimWriteError.Exception; if (!this.ackedEvent.IsSet) { // an exception happened DebugHelper.WriteLogEx("an exception happened", 0); this.ackedEvent.Set(); return; } EventHandler<CimSubscriptionEventArgs> temp = this.OnNewSubscriptionResult; if (temp != null) { DebugHelper.WriteLog("Raise an exception event", 2); temp(this, new CimSubscriptionExceptionEventArgs(this.exception)); } DebugHelper.WriteLog("Got an exception: {0}", 2, exception); } CimWriteResultObject cimWriteResultObject = actionArgs.Action as CimWriteResultObject; if (cimWriteResultObject != null) { CimSubscriptionResult result = cimWriteResultObject.Result as CimSubscriptionResult; if (result != null) { EventHandler<CimSubscriptionEventArgs> temp = this.OnNewSubscriptionResult; if (temp != null) { DebugHelper.WriteLog("Raise an result event", 2); temp(this, new CimSubscriptionResultEventArgs(result)); } } else { if (!this.ackedEvent.IsSet) { // an ACK message returned DebugHelper.WriteLogEx("an ack message happened", 0); this.ackedEvent.Set(); return; } else { DebugHelper.WriteLogEx("an ack message should not happen here", 0); } } } } /// <summary> /// block the ps thread until ACK message or Error happened. /// </summary> private void WaitForAckMessage() { DebugHelper.WriteLogEx(); this.ackedEvent.Wait(); if (this.exception != null) { DebugHelper.WriteLogEx("error happened", 0); if (this.Cmdlet != null) { DebugHelper.WriteLogEx("Throw Terminating error", 1); // throw terminating error ErrorRecord errorRecord = ErrorToErrorRecord.ErrorRecordFromAnyException( new InvocationContext(this.TargetComputerName, null), this.exception, null); this.Cmdlet.ThrowTerminatingError(errorRecord); } else { DebugHelper.WriteLogEx("Throw exception", 1); // throw exception out throw this.exception; } } DebugHelper.WriteLogEx("ACK happened", 0); } #endregion #region internal property /// <summary> /// The cmdlet object who issue this subscription, /// to throw ThrowTerminatingError /// in case there is a subscription failure /// </summary> /// <param name="cmdlet"></param> internal Cmdlet Cmdlet { set; get; } /// <summary> /// target computername /// </summary> internal String TargetComputerName { set; get; } #endregion #region private methods /// <summary> /// <para> /// Create <see cref="CimSessionProxy"/> and set properties /// </para> /// </summary> /// <param name="computerName"></param> /// <param name="timeout"></param> /// <returns></returns> private CimSessionProxy CreateSessionProxy( string computerName, UInt32 timeout) { CimSessionProxy proxy = CreateCimSessionProxy(computerName); proxy.OperationTimeout = timeout; return proxy; } /// <summary> /// Create <see cref="CimSessionProxy"/> and set properties /// </summary> /// <param name="session"></param> /// <param name="timeout"></param> /// <returns></returns> private CimSessionProxy CreateSessionProxy( CimSession session, UInt32 timeout) { CimSessionProxy proxy = CreateCimSessionProxy(session); proxy.OperationTimeout = timeout; return proxy; } #endregion #region private members /// <summary> /// Exception occurred while start the subscription /// </summary> internal Exception Exception { get { return exception; } } private Exception exception; #endregion }//End Class }//End namespace
using System; using System.IO; using NUnit.Framework; using Org.BouncyCastle.Asn1.Cms; using Org.BouncyCastle.Crypto; using Org.BouncyCastle.Crypto.Generators; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Math; using Org.BouncyCastle.Security; using Org.BouncyCastle.Utilities.Test; namespace Org.BouncyCastle.OpenSsl.Tests { /** * basic class for reading test.pem - the password is "secret" */ [TestFixture] public class ReaderTest : SimpleTest { private class Password : IPasswordFinder { private readonly char[] password; public Password( char[] word) { this.password = (char[]) word.Clone(); } public char[] GetPassword() { return (char[]) password.Clone(); } } public override string Name { get { return "PEMReaderTest"; } } public override void PerformTest() { IPasswordFinder pGet = new Password("secret".ToCharArray()); PemReader pemRd = OpenPemResource("test.pem", pGet); AsymmetricCipherKeyPair pair; object o; while ((o = pemRd.ReadObject()) != null) { // if (o is AsymmetricCipherKeyPair) // { // ackp = (AsymmetricCipherKeyPair)o; // // Console.WriteLine(ackp.Public); // Console.WriteLine(ackp.Private); // } // else // { // Console.WriteLine(o.ToString()); // } } // // pkcs 7 data // pemRd = OpenPemResource("pkcs7.pem", null); ContentInfo d = (ContentInfo)pemRd.ReadObject(); if (!d.ContentType.Equals(CmsObjectIdentifiers.EnvelopedData)) { Fail("failed envelopedData check"); } /* { // // ECKey // pemRd = OpenPemResource("eckey.pem", null); // TODO Resolve return type issue with EC keys and fix PemReader to return parameters // ECNamedCurveParameterSpec spec = (ECNamedCurveParameterSpec)pemRd.ReadObject(); pair = (AsymmetricCipherKeyPair)pemRd.ReadObject(); ISigner sgr = SignerUtilities.GetSigner("ECDSA"); sgr.Init(true, pair.Private); byte[] message = new byte[] { (byte)'a', (byte)'b', (byte)'c' }; sgr.BlockUpdate(message, 0, message.Length); byte[] sigBytes = sgr.GenerateSignature(); sgr.Init(false, pair.Public); sgr.BlockUpdate(message, 0, message.Length); if (!sgr.VerifySignature(sigBytes)) { Fail("EC verification failed"); } // TODO Resolve this issue with the algorithm name, study Java version // if (!((ECPublicKeyParameters) pair.Public).AlgorithmName.Equals("ECDSA")) // { // Fail("wrong algorithm name on public got: " + ((ECPublicKeyParameters) pair.Public).AlgorithmName); // } // // if (!((ECPrivateKeyParameters) pair.Private).AlgorithmName.Equals("ECDSA")) // { // Fail("wrong algorithm name on private got: " + ((ECPrivateKeyParameters) pair.Private).AlgorithmName); // } } */ // // writer/parser test // IAsymmetricCipherKeyPairGenerator kpGen = GeneratorUtilities.GetKeyPairGenerator("RSA"); kpGen.Init( new RsaKeyGenerationParameters( BigInteger.ValueOf(0x10001), new SecureRandom(), 768, 25)); pair = kpGen.GenerateKeyPair(); keyPairTest("RSA", pair); // kpGen = KeyPairGenerator.getInstance("DSA"); // kpGen.initialize(512, new SecureRandom()); DsaParametersGenerator pGen = new DsaParametersGenerator(); pGen.Init(512, 80, new SecureRandom()); kpGen = GeneratorUtilities.GetKeyPairGenerator("DSA"); kpGen.Init( new DsaKeyGenerationParameters( new SecureRandom(), pGen.GenerateParameters())); pair = kpGen.GenerateKeyPair(); keyPairTest("DSA", pair); // // PKCS7 // MemoryStream bOut = new MemoryStream(); PemWriter pWrt = new PemWriter(new StreamWriter(bOut)); pWrt.WriteObject(d); pWrt.Writer.Close(); pemRd = new PemReader(new StreamReader(new MemoryStream(bOut.ToArray(), false))); d = (ContentInfo)pemRd.ReadObject(); if (!d.ContentType.Equals(CmsObjectIdentifiers.EnvelopedData)) { Fail("failed envelopedData recode check"); } // OpenSSL test cases (as embedded resources) doOpenSslDsaTest("unencrypted"); doOpenSslRsaTest("unencrypted"); doOpenSslTests("aes128"); doOpenSslTests("aes192"); doOpenSslTests("aes256"); doOpenSslTests("blowfish"); doOpenSslTests("des1"); doOpenSslTests("des2"); doOpenSslTests("des3"); doOpenSslTests("rc2_128"); doOpenSslDsaTest("rc2_40_cbc"); doOpenSslRsaTest("rc2_40_cbc"); doOpenSslDsaTest("rc2_64_cbc"); doOpenSslRsaTest("rc2_64_cbc"); // TODO Figure out why exceptions differ for commented out cases doDudPasswordTest("7fd98", 0, "Corrupted stream - out of bounds length found"); doDudPasswordTest("ef677", 1, "Corrupted stream - out of bounds length found"); // doDudPasswordTest("800ce", 2, "cannot recognise object in stream"); doDudPasswordTest("b6cd8", 3, "DEF length 81 object truncated by 56"); doDudPasswordTest("28ce09", 4, "DEF length 110 object truncated by 28"); doDudPasswordTest("2ac3b9", 5, "DER length more than 4 bytes: 11"); doDudPasswordTest("2cba96", 6, "DEF length 100 object truncated by 35"); doDudPasswordTest("2e3354", 7, "DEF length 42 object truncated by 9"); doDudPasswordTest("2f4142", 8, "DER length more than 4 bytes: 14"); doDudPasswordTest("2fe9bb", 9, "DER length more than 4 bytes: 65"); doDudPasswordTest("3ee7a8", 10, "DER length more than 4 bytes: 57"); doDudPasswordTest("41af75", 11, "malformed sequence in DSA private key"); doDudPasswordTest("1704a5", 12, "corrupted stream detected"); // doDudPasswordTest("1c5822", 13, "corrupted stream detected"); // doDudPasswordTest("5a3d16", 14, "corrupted stream detected"); doDudPasswordTest("8d0c97", 15, "corrupted stream detected"); doDudPasswordTest("bc0daf", 16, "corrupted stream detected"); doDudPasswordTest("aaf9c4d",17, "Corrupted stream - out of bounds length found"); // encrypted private key test pGet = new Password("password".ToCharArray()); pemRd = OpenPemResource("enckey.pem", pGet); RsaPrivateCrtKeyParameters privKey = (RsaPrivateCrtKeyParameters)pemRd.ReadObject(); if (!privKey.PublicExponent.Equals(new BigInteger("10001", 16))) { Fail("decryption of private key data check failed"); } // general PKCS8 test pGet = new Password("password".ToCharArray()); pemRd = OpenPemResource("pkcs8test.pem", pGet); while ((privKey = (RsaPrivateCrtKeyParameters)pemRd.ReadObject()) != null) { if (!privKey.PublicExponent.Equals(new BigInteger("10001", 16))) { Fail("decryption of private key data check failed"); } } } private void keyPairTest( string name, AsymmetricCipherKeyPair pair) { MemoryStream bOut = new MemoryStream(); PemWriter pWrt = new PemWriter(new StreamWriter(bOut)); pWrt.WriteObject(pair.Public); pWrt.Writer.Close(); PemReader pemRd = new PemReader(new StreamReader(new MemoryStream(bOut.ToArray(), false))); AsymmetricKeyParameter pubK = (AsymmetricKeyParameter) pemRd.ReadObject(); if (!pubK.Equals(pair.Public)) { Fail("Failed public key read: " + name); } bOut = new MemoryStream(); pWrt = new PemWriter(new StreamWriter(bOut)); pWrt.WriteObject(pair.Private); pWrt.Writer.Close(); pemRd = new PemReader(new StreamReader(new MemoryStream(bOut.ToArray(), false))); AsymmetricCipherKeyPair kPair = (AsymmetricCipherKeyPair) pemRd.ReadObject(); if (!kPair.Private.Equals(pair.Private)) { Fail("Failed private key read: " + name); } if (!kPair.Public.Equals(pair.Public)) { Fail("Failed private key public read: " + name); } } private void doOpenSslTests( string baseName) { doOpenSslDsaModesTest(baseName); doOpenSslRsaModesTest(baseName); } private void doOpenSslDsaModesTest( string baseName) { doOpenSslDsaTest(baseName + "_cbc"); doOpenSslDsaTest(baseName + "_cfb"); doOpenSslDsaTest(baseName + "_ecb"); doOpenSslDsaTest(baseName + "_ofb"); } private void doOpenSslRsaModesTest( string baseName) { doOpenSslRsaTest(baseName + "_cbc"); doOpenSslRsaTest(baseName + "_cfb"); doOpenSslRsaTest(baseName + "_ecb"); doOpenSslRsaTest(baseName + "_ofb"); } private void doOpenSslDsaTest( string name) { string fileName = "dsa.openssl_dsa_" + name + ".pem"; doOpenSslTestFile(fileName, typeof(DsaPrivateKeyParameters)); } private void doOpenSslRsaTest( string name) { string fileName = "rsa.openssl_rsa_" + name + ".pem"; doOpenSslTestFile(fileName, typeof(RsaPrivateCrtKeyParameters)); } private void doOpenSslTestFile( string fileName, Type expectedPrivKeyType) { PemReader pr = OpenPemResource(fileName, new Password("changeit".ToCharArray())); AsymmetricCipherKeyPair kp = pr.ReadObject() as AsymmetricCipherKeyPair; pr.Reader.Close(); if (kp == null) { Fail("Didn't find OpenSSL key"); } if (!expectedPrivKeyType.IsInstanceOfType(kp.Private)) { Fail("Returned key not of correct type"); } } private void doDudPasswordTest(string password, int index, string message) { // illegal state exception check - in this case the wrong password will // cause an underlying class cast exception. try { IPasswordFinder pGet = new Password(password.ToCharArray()); PemReader pemRd = OpenPemResource("test.pem", pGet); Object o; while ((o = pemRd.ReadObject()) != null) { } Fail("issue not detected: " + index); } catch (IOException e) { if (e.Message.IndexOf(message) < 0) { Console.Error.WriteLine(message); Console.Error.WriteLine(e.Message); Fail("issue " + index + " exception thrown, but wrong message"); } } } private static PemReader OpenPemResource( string fileName, IPasswordFinder pGet) { Stream data = GetTestDataAsStream("openssl." + fileName); TextReader tr = new StreamReader(data); return new PemReader(tr, pGet); } public static void Main( string[] args) { RunTest(new ReaderTest()); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace DotNetty.Transport.Channels { using System; using System.Diagnostics.Contracts; using System.Net; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading.Tasks; using DotNetty.Buffers; abstract class AbstractChannelHandlerContext : IChannelHandlerContext { static readonly ConditionalWeakTable<Type, Tuple<PropagationDirections>> SkipTable = new ConditionalWeakTable<Type, Tuple<PropagationDirections>>(); protected static PropagationDirections GetSkipPropagationFlags(IChannelHandler handler) { Tuple<PropagationDirections> skipDirection = SkipTable.GetValue( handler.GetType(), handlerType => Tuple.Create(CalculateSkipPropagationFlags(handlerType))); return skipDirection == null ? PropagationDirections.None : skipDirection.Item1; } protected static PropagationDirections CalculateSkipPropagationFlags(Type handlerType) { bool skipOutbound = true; bool skipInbound = true; InterfaceMapping mapping = handlerType.GetInterfaceMap(typeof(IChannelHandler)); for (int index = 0; index < mapping.InterfaceMethods.Length && (skipInbound || skipOutbound); index++) { MethodInfo method = mapping.InterfaceMethods[index]; var propagationAttribute = method.GetCustomAttribute<PipelinePropagationAttribute>(); if (propagationAttribute == null) { continue; } MethodInfo implMethod = mapping.TargetMethods[index]; if (implMethod.GetCustomAttribute<SkipAttribute>(false) == null) { switch (propagationAttribute.Direction) { case PropagationDirections.Inbound: skipInbound = false; break; case PropagationDirections.Outbound: skipOutbound = false; break; default: throw new NotSupportedException(string.Format("PropagationDirection value of {0} is not supported.", propagationAttribute.Direction)); } } } var result = PropagationDirections.None; if (skipInbound) { result |= PropagationDirections.Inbound; } if (skipOutbound) { result |= PropagationDirections.Outbound; } return result; } internal volatile AbstractChannelHandlerContext Next; internal volatile AbstractChannelHandlerContext Prev; readonly PropagationDirections skipPropagationFlags; readonly IChannelHandlerInvoker invoker; protected AbstractChannelHandlerContext(IChannelPipeline pipeline, IChannelHandlerInvoker invoker, string name, PropagationDirections skipPropagationDirections) { Contract.Requires(pipeline != null); Contract.Requires(name != null); this.Channel = pipeline.Channel(); this.invoker = invoker; this.skipPropagationFlags = skipPropagationDirections; this.Name = name; } public IChannel Channel { get; private set; } public IByteBufferAllocator Allocator { get { return this.Channel.Allocator; } } public bool Removed { get; internal set; } public string Name { get; private set; } public IChannelHandlerInvoker Invoker { get { if (this.invoker == null) { return this.Channel.EventLoop.Invoker; } else { throw new NotImplementedException(); //return wrappedEventLoop(); } } } public IChannelHandlerContext FireChannelRegistered() { AbstractChannelHandlerContext next = this.FindContextInbound(); next.Invoker.InvokeChannelRegistered(next); return this; } public IChannelHandlerContext FireChannelUnregistered() { AbstractChannelHandlerContext next = this.FindContextInbound(); next.Invoker.InvokeChannelUnregistered(next); return this; } public IChannelHandlerContext FireChannelActive() { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeChannelActive(target); return this; } public IChannelHandlerContext FireChannelInactive() { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeChannelInactive(target); return this; } public virtual IChannelHandlerContext FireExceptionCaught(Exception cause) { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeExceptionCaught(target, cause); return this; } public abstract IChannelHandler Handler { get; } public IChannelHandlerContext FireChannelRead(object msg) { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeChannelRead(target, msg); return this; } public IChannelHandlerContext FireChannelReadComplete() { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeChannelReadComplete(target); return this; } public IChannelHandlerContext FireChannelWritabilityChanged() { AbstractChannelHandlerContext next = this.FindContextInbound(); next.Invoker.InvokeChannelWritabilityChanged(next); return this; } public IChannelHandlerContext FireUserEventTriggered(object evt) { AbstractChannelHandlerContext target = this.FindContextInbound(); target.Invoker.InvokeUserEventTriggered(target, evt); return this; } public Task DeregisterAsync() { AbstractChannelHandlerContext next = this.FindContextOutbound(); return next.Invoker.InvokeDeregisterAsync(next); } public IChannelHandlerContext Read() { AbstractChannelHandlerContext target = this.FindContextOutbound(); target.Invoker.InvokeRead(target); return this; } public Task WriteAsync(object msg) // todo: cancellationToken? { AbstractChannelHandlerContext target = this.FindContextOutbound(); return target.Invoker.InvokeWriteAsync(target, msg); } public IChannelHandlerContext Flush() { AbstractChannelHandlerContext target = this.FindContextOutbound(); target.Invoker.InvokeFlush(target); return this; } public Task WriteAndFlushAsync(object message) // todo: cancellationToken? { AbstractChannelHandlerContext target; target = this.FindContextOutbound(); Task writeFuture = target.Invoker.InvokeWriteAsync(target, message); target = this.FindContextOutbound(); target.Invoker.InvokeFlush(target); return writeFuture; } public Task BindAsync(EndPoint localAddress) { AbstractChannelHandlerContext next = this.FindContextOutbound(); return next.Invoker.InvokeBindAsync(next, localAddress); } public Task ConnectAsync(EndPoint remoteAddress) { return this.ConnectAsync(remoteAddress, null); } public Task ConnectAsync(EndPoint remoteAddress, EndPoint localAddress) { AbstractChannelHandlerContext next = this.FindContextOutbound(); return next.Invoker.InvokeConnectAsync(next, remoteAddress, localAddress); } public Task DisconnectAsync() { if (!this.Channel.DisconnectSupported) { return this.CloseAsync(); } AbstractChannelHandlerContext next = this.FindContextOutbound(); return next.Invoker.InvokeDisconnectAsync(next); } public Task CloseAsync() // todo: cancellationToken? { AbstractChannelHandlerContext target = this.FindContextOutbound(); return target.Invoker.InvokeCloseAsync(target); } AbstractChannelHandlerContext FindContextInbound() { AbstractChannelHandlerContext ctx = this; do { ctx = ctx.Next; } while ((ctx.skipPropagationFlags & PropagationDirections.Inbound) == PropagationDirections.Inbound); return ctx; } AbstractChannelHandlerContext FindContextOutbound() { AbstractChannelHandlerContext ctx = this; do { ctx = ctx.Prev; } while ((ctx.skipPropagationFlags & PropagationDirections.Outbound) == PropagationDirections.Outbound); return ctx; } public override string ToString() { return string.Format("{0} ({1}, {2})", typeof(IChannelHandlerContext).Name, this.Name, this.Channel); } } }
using UnityEngine; using UnityEditor; using System; using System.Linq; using System.Collections.Generic; using Model=UnityEngine.AssetGraph.DataModel.Version2; namespace UnityEngine.AssetGraph { [Serializable] public class ConnectionGUI : ScriptableObject { [SerializeField] private Model.ConnectionData m_data; [SerializeField] private Model.ConnectionPointData m_outputPoint; [SerializeField] private Model.ConnectionPointData m_inputPoint; [SerializeField] private string m_connectionButtonStyle; [SerializeField] private GroupViewContext m_groupViewContext; private Dictionary<string, List<AssetReference>> m_assetGroups; public string Label { get { return m_data.Label; } set { m_data.Label = value; this.name = value; } } public string Id { get { return m_data.Id; } } public string OutputNodeId { get { return m_outputPoint.NodeId; } } public string InputNodeId { get { return m_inputPoint.NodeId; } } public Model.ConnectionPointData OutputPoint { get { return m_outputPoint; } } public Model.ConnectionPointData InputPoint { get { return m_inputPoint; } } public Model.ConnectionData Data { get { return m_data; } } public bool IsSelected { get { return (Selection.activeObject == this); } } public Dictionary<string, List<AssetReference>> AssetGroups { get { return m_assetGroups; } set { m_assetGroups = value; } } public GroupViewContext AssetGroupViewContext { get { return m_groupViewContext; } } private Rect m_buttonRect; public static ConnectionGUI LoadConnection (Model.ConnectionData data, Model.ConnectionPointData output, Model.ConnectionPointData input) { var newCon = ScriptableObject.CreateInstance<ConnectionGUI> (); newCon.Init (data, output, input); return newCon; } public static ConnectionGUI CreateConnection (string label, Model.ConnectionPointData output, Model.ConnectionPointData input) { var newCon = ScriptableObject.CreateInstance<ConnectionGUI> (); newCon.Init ( new Model.ConnectionData(label, output, input), output, input ); return newCon; } private void Init (Model.ConnectionData data, Model.ConnectionPointData output, Model.ConnectionPointData input) { UnityEngine.Assertions.Assert.IsTrue(output.IsOutput, "Given Output point is not output."); UnityEngine.Assertions.Assert.IsTrue(input.IsInput, "Given Input point is not input."); m_data = data; m_outputPoint = output; m_inputPoint = input; this.name = m_data.Label; m_groupViewContext = new GroupViewContext (); m_connectionButtonStyle = "sv_label_0"; } public Rect GetRect () { return m_buttonRect; } public void DrawConnection (List<NodeGUI> nodes, Dictionary<string, List<AssetReference>> assetGroups) { var startNode = nodes.Find(node => node.Id == OutputNodeId); if (startNode == null) { return; } var endNode = nodes.Find(node => node.Id == InputNodeId); if (endNode == null) { return; } var startPoint = m_outputPoint.GetGlobalPosition(startNode); var startV3 = new Vector3(startPoint.x, startPoint.y, 0f); var endPoint = m_inputPoint.GetGlobalPosition(endNode); var endV3 = new Vector3(endPoint.x, endPoint.y, 0f); var centerPoint = startPoint + ((endPoint - startPoint) / 2); var centerPointV3 = new Vector3(centerPoint.x, centerPoint.y, 0f); var pointDistanceX = Model.Settings.GUI.CONNECTION_CURVE_LENGTH; var startTan = new Vector3(startPoint.x + pointDistanceX, centerPoint.y, 0f); var endTan = new Vector3(endPoint.x - pointDistanceX, centerPoint.y, 0f); var totalAssets = 0; var totalGroups = 0; if(assetGroups != null) { totalAssets = assetGroups.Select(v => v.Value.Count).Sum(); totalGroups = assetGroups.Keys.Count; } Color lineColor; var lineWidth = (totalAssets > 0) ? 3f : 2f; if(IsSelected) { lineColor = Model.Settings.GUI.COLOR_ENABLED; } else { lineColor = (totalAssets > 0) ? Model.Settings.GUI.COLOR_CONNECTED : Model.Settings.GUI.COLOR_NOT_CONNECTED; } ConnectionGUIUtility.HandleMaterial.SetPass(0); Handles.DrawBezier(startV3, endV3, startTan, endTan, lineColor, null, lineWidth); // draw connection label if connection's label is not normal. GUIStyle labelStyle = new GUIStyle("WhiteMiniLabel"); labelStyle.alignment = TextAnchor.MiddleLeft; switch (Label){ case Model.Settings.DEFAULT_OUTPUTPOINT_LABEL: { // show nothing break; } case Model.Settings.BUNDLECONFIG_BUNDLE_OUTPUTPOINT_LABEL: { var labelWidth = labelStyle.CalcSize(new GUIContent(Model.Settings.BUNDLECONFIG_BUNDLE_OUTPUTPOINT_LABEL)); var labelPointV3 = new Vector3(centerPointV3.x - (labelWidth.x / 2), centerPointV3.y - 24f, 0f) ; Handles.Label(labelPointV3, Model.Settings.BUNDLECONFIG_BUNDLE_OUTPUTPOINT_LABEL, labelStyle); break; } default: { var labelWidth = labelStyle.CalcSize(new GUIContent(Label)); var labelPointV3 = new Vector3(centerPointV3.x - (labelWidth.x / 2), centerPointV3.y - 24f, 0f) ; Handles.Label(labelPointV3, Label, labelStyle); break; } } string connectionLabel; if(totalGroups > 1) { connectionLabel = string.Format("{0}:{1}", totalAssets, totalGroups); } else { connectionLabel = string.Format("{0}", totalAssets); } var style = new GUIStyle(m_connectionButtonStyle); var labelSize = style.CalcSize(new GUIContent(connectionLabel)); m_buttonRect = new Rect(centerPointV3.x - labelSize.x/2f, centerPointV3.y - labelSize.y/2f, labelSize.x, 30f); if ( Event.current.type == EventType.ContextClick || (Event.current.type == EventType.MouseUp && Event.current.button == 1) ) { var rightClickPos = Event.current.mousePosition; if (m_buttonRect.Contains(rightClickPos)) { var menu = new GenericMenu(); menu.AddItem( new GUIContent("Delete"), false, () => { Delete(); } ); menu.ShowAsContext(); Event.current.Use(); } } if (GUI.Button(m_buttonRect, connectionLabel, style)) { this.m_assetGroups = assetGroups; ConnectionGUIUtility.ConnectionEventHandler(new ConnectionEvent(ConnectionEvent.EventType.EVENT_CONNECTION_TAPPED, this)); } } public bool IsEqual (Model.ConnectionPointData from, Model.ConnectionPointData to) { return (m_outputPoint == from && m_inputPoint == to); } public void SetActive (bool active) { if(active) { Selection.activeObject = this; m_connectionButtonStyle = "sv_label_1"; } else { m_connectionButtonStyle = "sv_label_0"; } } public void Delete () { ConnectionGUIUtility.ConnectionEventHandler(new ConnectionEvent(ConnectionEvent.EventType.EVENT_CONNECTION_DELETED, this)); } } public static class NodeEditor_ConnectionListExtension { public static bool ContainsConnection(this List<ConnectionGUI> connections, Model.ConnectionPointData output, Model.ConnectionPointData input) { foreach (var con in connections) { if (con.IsEqual(output, input)) { return true; } } return false; } } }
#region License /* * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion #region Imports using System.Xml; using Spring.Data.Common; using Spring.Core.TypeResolution; using Spring.Objects; using Spring.Objects.Factory.Config; using Spring.Objects.Factory.Support; using Spring.Objects.Factory.Xml; using Spring.Util; #endregion namespace Spring.Data.Config { /// <summary> /// Implementation of the custom configuration parser for database definitions. /// </summary> /// <author>Mark Pollack</author> [ NamespaceParser( Namespace = "http://www.springframework.net/database", SchemaLocationAssemblyHint = typeof(DatabaseNamespaceParser), SchemaLocation = "/Spring.Data.Config/spring-database-1.3.xsd") ] public class DatabaseNamespaceParser : ObjectsNamespaceParser { private const string DatabaseTypePrefix = "database: "; static DatabaseNamespaceParser() { TypeRegistry.RegisterType( DatabaseTypePrefix + DbProviderConfigurerConstants.DbProviderConfigurerElement, typeof(DbProviderConfigurer)); TypeRegistry.RegisterType( DatabaseTypePrefix + DbProviderFactoryObjectConstants.DbProviderFactoryObjectElement, typeof (DbProviderFactoryObject)); } /// <summary> /// Initializes a new instance of the <see cref="DatabaseNamespaceParser"/> class. /// </summary> public DatabaseNamespaceParser() { } /// <summary> /// Parse the specified element and register any resulting /// IObjectDefinitions with the IObjectDefinitionRegistry that is /// embedded in the supplied ParserContext. /// </summary> /// <param name="element">The element to be parsed into one or more IObjectDefinitions</param> /// <param name="parserContext">The object encapsulating the current state of the parsing /// process.</param> /// <returns> /// The primary IObjectDefinition (can be null as explained above) /// </returns> /// <remarks> /// Implementations should return the primary IObjectDefinition /// that results from the parse phase if they wish to used nested /// inside (for example) a <code>&lt;property&gt;</code> tag. /// <para>Implementations may return null if they will not /// be used in a nested scenario. /// </para> /// </remarks> public override IObjectDefinition ParseElement(XmlElement element, ParserContext parserContext) { string name = element.GetAttribute(ObjectDefinitionConstants.IdAttribute); IConfigurableObjectDefinition remotingDefinition = ParseDbProviderDefinition(element, name, parserContext); if (!StringUtils.HasText(name)) { name = ObjectDefinitionReaderUtils.GenerateObjectName(remotingDefinition, parserContext.Registry); } parserContext.Registry.RegisterObjectDefinition(name, remotingDefinition); return null; } /// <summary> /// Parses database provider definitions. /// </summary> /// <param name="element">Validator XML element.</param> /// <param name="name">The name of the object definition.</param> /// <param name="parserContext">The parser context.</param> /// <returns>A database provider object definition.</returns> private IConfigurableObjectDefinition ParseDbProviderDefinition( XmlElement element, string name, ParserContext parserContext) { switch (element.LocalName) { case DbProviderConfigurerConstants.DbProviderConfigurerElement: return ParseDbProviderConfigurer(element, name, parserContext); case DbProviderFactoryObjectConstants.DbProviderFactoryObjectElement: return ParseDbProviderFactoryObject(element, name, parserContext); } return null; } private IConfigurableObjectDefinition ParseDbProviderConfigurer(XmlElement element, string name, ParserContext parserContext) { string typeName = GetTypeName(element); string resource = GetAttributeValue(element, DbProviderConfigurerConstants.ResourceAttribute); MutablePropertyValues propertyValues = new MutablePropertyValues(); if (StringUtils.HasText(resource)) { propertyValues.Add("ProviderResource", resource); } IConfigurableObjectDefinition cod = parserContext.ReaderContext.ObjectDefinitionFactory.CreateObjectDefinition( typeName, null, parserContext.ReaderContext.Reader.Domain); cod.PropertyValues = propertyValues; return cod; } private IConfigurableObjectDefinition ParseDbProviderFactoryObject(XmlElement element, string name, ParserContext parserContext) { string typeName = GetTypeName(element); string providerNameAttribute = GetAttributeValue(element, DbProviderFactoryObjectConstants.ProviderNameAttribute); string connectionString = GetAttributeValue(element, DbProviderFactoryObjectConstants.ConnectionStringAttribute); MutablePropertyValues propertyValues = new MutablePropertyValues(); if (StringUtils.HasText(providerNameAttribute)) { propertyValues.Add("Provider", providerNameAttribute); } if (StringUtils.HasText(connectionString)) { propertyValues.Add("ConnectionString", connectionString); } IConfigurableObjectDefinition cod = parserContext.ReaderContext.ObjectDefinitionFactory.CreateObjectDefinition( typeName, null, parserContext.ReaderContext.Reader.Domain); cod.PropertyValues = propertyValues; return cod; } /* protected override void DoParse(XmlElement element, ParserContext parserContext, ObjectDefinitionBuilder builder) { switch (element.LocalName) { case DbProviderFactoryObjectConstants.DbProviderFactoryObjectElement: { ParseDatabaseConfigurer(element, parserContext, builder); return; } } } */ /* private void ParseDatabaseConfigurer(XmlElement element, ParserContext parserContext, ObjectDefinitionBuilder builder) { string providerNameAttribute = GetAttributeValue(element, DbProviderFactoryObjectConstants.ProviderNameAttribute); string connectionString = GetAttributeValue(element, DbProviderFactoryObjectConstants.ConnectionStringAttribute); if (StringUtils.HasText(providerNameAttribute)) { builder.AddPropertyValue("Provider", providerNameAttribute); } if (StringUtils.HasText(connectionString)) { builder.AddPropertyValue("ConnectionString", connectionString); } } */ /// <summary> /// Gets the name of the object type for the specified element. /// </summary> /// <param name="element">The element.</param> /// <returns>The name of the object type.</returns> private string GetTypeName(XmlElement element) { string typeName = GetAttributeValue(element, ObjectDefinitionConstants.TypeAttribute); if (StringUtils.IsNullOrEmpty(typeName)) { return DatabaseTypePrefix + element.LocalName; } return typeName; } /* protected override string GetObjectTypeName(XmlElement element) { string typeName = GetAttributeValue(element, ObjectDefinitionConstants.TypeAttribute); if (StringUtils.IsNullOrEmpty(typeName)) { return DatabaseTypePrefix + element.LocalName; } return typeName; }*/ private class DbProviderFactoryObjectConstants { public const string DbProviderFactoryObjectElement = "provider"; public const string ProviderNameAttribute = "provider"; public const string ConnectionStringAttribute = "connectionString"; } private class DbProviderConfigurerConstants { public const string DbProviderConfigurerElement = "additionalProviders"; public const string ResourceAttribute = "resource"; } } }
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // Extract from MonoDevelop.TextTemplating engine. // We are only using the tokenizer here and a simplified T4 implementation. // Tokeniser.cs // // Author: // Michael Hutchinson <[email protected]> // // Copyright (c) 2009 Novell, Inc. (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; namespace SharpGen.TextTemplating { public struct Location : IEquatable<Location> { public Location(string fileName, int line, int column) : this() { FileName = fileName; Column = column; Line = line; } public int Line { get; private set; } public int Column { get; private set; } public string FileName { get; private set; } public static Location Empty { get { return new Location(null, -1, -1); } } public Location AddLine() { return new Location(this.FileName, this.Line + 1, 1); } public Location AddCol() { return AddCols(1); } public Location AddCols(int number) { return new Location(this.FileName, this.Line, this.Column + number); } public override string ToString() { return string.Format(System.Globalization.CultureInfo.InvariantCulture, "[{0} ({1},{2})]", FileName, Line, Column); } public bool Equals(Location other) { return other.Line == Line && other.Column == Column && other.FileName == FileName; } } public class Tokeniser { string content; int position = 0; string value; State nextState = State.Content; Location nextStateLocation; Location nextStateTagStartLocation; public Tokeniser(string fileName, string content) { State = State.Content; this.content = content; this.Location = this.nextStateLocation = this.nextStateTagStartLocation = new Location(fileName, 1, 1); } public bool Advance() { value = null; State = nextState; Location = nextStateLocation; TagStartLocation = nextStateTagStartLocation; if (nextState == State.EOF) return false; nextState = GetNextStateAndCurrentValue(); return true; } State GetNextStateAndCurrentValue() { switch (State) { case State.Block: case State.Expression: case State.Helper: return GetBlockEnd(); case State.Directive: return NextStateInDirective(); case State.Content: return NextStateInContent(); case State.DirectiveName: return GetDirectiveName(); case State.DirectiveValue: return GetDirectiveValue(); default: throw new InvalidOperationException("Unexpected state '" + State.ToString() + "'"); } } State GetBlockEnd() { int start = position; for (; position < content.Length; position++) { char c = content[position]; nextStateTagStartLocation = nextStateLocation; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (c == '>' && content[position - 1] == '#' && content[position - 2] != '\\') { value = content.Substring(start, position - start - 1); position++; TagEndLocation = nextStateLocation; //skip newlines directly after blocks, unless they're expressions if (State != State.Expression && (position += IsNewLine()) > 0) { nextStateLocation = nextStateLocation.AddLine(); } return State.Content; } } throw new ParserException("Unexpected end of file.", nextStateLocation); } State GetDirectiveName() { int start = position; for (; position < content.Length; position++) { char c = content[position]; if (!Char.IsLetterOrDigit(c)) { value = content.Substring(start, position - start); return State.Directive; } else { nextStateLocation = nextStateLocation.AddCol(); } } throw new ParserException("Unexpected end of file.", nextStateLocation); } State GetDirectiveValue() { int start = position; int delimiter = '\0'; for (; position < content.Length; position++) { char c = content[position]; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') nextStateLocation = nextStateLocation.AddLine(); if (delimiter == '\0') { if (c == '\'' || c == '"') { start = position; delimiter = c; } else if (!Char.IsWhiteSpace(c)) { throw new ParserException("Unexpected character '" + c + "'. Expecting attribute value.", nextStateLocation); } continue; } if (c == delimiter) { value = content.Substring(start + 1, position - start - 1); position++; return State.Directive; } } throw new ParserException("Unexpected end of file.", nextStateLocation); ; } State NextStateInContent() { int start = position; for (; position < content.Length; position++) { char c = content[position]; nextStateTagStartLocation = nextStateLocation; nextStateLocation = nextStateLocation.AddCol(); if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (c == '<' && position + 2 < content.Length && content[position + 1] == '#') { TagEndLocation = nextStateLocation; char type = content[position + 2]; if (type == '@') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Directive; } else if (type == '=') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Expression; } else if (type == '+') { nextStateLocation = nextStateLocation.AddCols(2); value = content.Substring(start, position - start); position += 3; return State.Helper; } else { value = content.Substring(start, position - start); nextStateLocation = nextStateLocation.AddCol(); position += 2; return State.Block; } } } //EOF is only valid when we're in content value = content.Substring(start); return State.EOF; } int IsNewLine() { int found = 0; if (position < content.Length && content[position] == '\r') { found++; } if (position + found < content.Length && content[position + found] == '\n') { found++; } return found; } State NextStateInDirective() { for (; position < content.Length; position++) { char c = content[position]; if (c == '\r') { if (position + 1 < content.Length && content[position + 1] == '\n') position++; nextStateLocation = nextStateLocation.AddLine(); } else if (c == '\n') { nextStateLocation = nextStateLocation.AddLine(); } else if (Char.IsLetter(c)) { return State.DirectiveName; } else if (c == '=') { nextStateLocation = nextStateLocation.AddCol(); position++; return State.DirectiveValue; } else if (c == '#' && position + 1 < content.Length && content[position + 1] == '>') { position += 2; TagEndLocation = nextStateLocation.AddCols(2); nextStateLocation = nextStateLocation.AddCols(3); //skip newlines directly after directives if ((position += IsNewLine()) > 0) { nextStateLocation = nextStateLocation.AddLine(); } return State.Content; } else if (!Char.IsWhiteSpace(c)) { throw new ParserException("Directive ended unexpectedly with character '" + c + "'", nextStateLocation); } else { nextStateLocation = nextStateLocation.AddCol(); } } throw new ParserException("Unexpected end of file.", nextStateLocation); } public State State { get; private set; } public int Position { get { return position; } } public string Content { get { return content; } } public string Value { get { return value; } } public Location Location { get; private set; } public Location TagStartLocation { get; private set; } public Location TagEndLocation { get; private set; } } public enum State { Content = 0, Directive, Expression, Block, Helper, DirectiveName, DirectiveValue, Name, EOF } public class ParserException : Exception { public ParserException(string message, Location location) : base(message) { Location = location; } public Location Location { get; private set; } } }
using System; using System.Collections; using System.IO; using Microsoft.SPOT; namespace RainMakr.Core.Web { using System.Text; using System.Threading; using System.Net; using System.Net.Sockets; using Microsoft.SPOT.Hardware; using Microsoft.SPOT.Net.NetworkInformation; using RainMakr.Core.Utilities; using SecretLabs.NETMF.Hardware.Netduino; class WebServer { private bool _cancel; private readonly Thread _serverThread; private readonly bool _enableLedStatus = true; private OutputPort _led; /// <summary> /// Instantiates a new webserver. /// </summary> /// <param name="port">Port number to listen on.</param> /// <param name="enableLedStatus"></param> public WebServer(int port, bool enableLedStatus = true) { Port = port; _serverThread = new Thread(StartServer); _enableLedStatus = enableLedStatus; Debug.Print("WebControl started on port " + port); } /// <summary> /// Delegate for the EndPointReceived event. /// </summary> public delegate void EndPointReceivedHandler(object source, EndPointEventArgs e); /// <summary> /// EndPointReceived event is triggered when a valid command (plus parameters) is received. /// Valid commands are defined in the AllowedEndPoints property. /// </summary> public event EndPointReceivedHandler EndPointReceived; /// <summary> /// Initialize the multithreaded server. /// </summary> public void Start() { // List ethernet interfaces, so we can determine the server's address ListInterfaces(); // start server _cancel = false; _serverThread.Start(); //var timer = new System.Threading.Timer((e) => //{ // this.UpdateExternalIp(); //}, null, TimeSpan.Zero, new TimeSpan(0, 0, 0, 5)); Debug.Print("Started server in thread " + _serverThread.GetHashCode()); } /// <summary> /// Parses a raw web request and filters out the command and arguments. /// </summary> /// <param name="rawData">The raw web request (including headers).</param> /// <returns>The parsed WebCommand if the request is valid, otherwise Null.</returns> private EndPoint InterpretRequest(string rawData) { if (rawData == null) { return null; } string commandData; // Remove GET/POST + Space if (rawData.Length > 5 || rawData.Length > 6) commandData = rawData.Substring(0, 3).ToLower() == "get" ? rawData.Substring(5, rawData.Length - 5) : rawData.Substring(6, rawData.Length - 6); else return null; // Remove everything after first space int idx = commandData.IndexOf("HTTP/1.1"); commandData = commandData.Substring(0, idx - 1); // Split command and arguments string[] parts = commandData.Split('/'); string command = null; if (parts.Length > 0) { // Parse first part to command command = parts[0].ToLower(); } // http://url/foo/test // Check if this is a valid command EndPoint returnEndPoint = null; foreach (EndPoint endPoint in _allowedEndPoints) { if (command != null && endPoint.Name.ToLower() == command.ToLower()) { returnEndPoint = endPoint; break; } } if (returnEndPoint == null) { return null; } var arguments = new string[parts.Length - 1]; for (int i = 1; i < parts.Length; i++) { arguments[i - 1] = parts[i]; } returnEndPoint.Arguments = arguments; return returnEndPoint; } /// <summary> /// Starts the server. /// </summary> private void StartServer() { using (var server = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { server.Bind(new IPEndPoint(IPAddress.Any, Port)); server.Listen(1); while (!_cancel) { var connection = server.Accept(); if (connection.Poll(-1, SelectMode.SelectRead)) { // Create buffer and receive raw bytes. var bytes = new byte[connection.Available]; connection.Receive(bytes); // Convert to string, will include HTTP headers. var rawData = new string(Encoding.UTF8.GetChars(bytes)); EndPoint endPoint = InterpretRequest(rawData); if (endPoint != null) { if (_enableLedStatus) { PingLed(); } // dispatch the endpoint var e = new EndPointEventArgs(endPoint, connection); if (EndPointReceived != null) { ThreadUtil.SafeQueueWorkItem(() => { EndPointReceived(null, e); if (e.ManualSent) { // the client should close the socket } else { var response = e.ReturnString; SendResponse(response, connection); } }); } } else { SendResponse(GetApiList(), connection); } } } } } private void GetExternalIp() { var request = (HttpWebRequest)WebRequest.Create(new Uri("http://checkip.dyndns.org")); request.Method = "Get"; var streamReader = new StreamReader(request.GetResponse().GetResponseStream()); var publicIp = streamReader.ReadToEnd().Split(':')[1].Substring(1).Split('<')[0]; Debug.Print(publicIp); } private void UpdateExternalIp() { try { //WebProxy proxy = new WebProxy("10.0.0.?", 8080); Debug.Print("Updating ip address"); using (HttpWebRequest request = (HttpWebRequest)WebRequest.Create(new Uri("http://rainmakrweb-test.azurewebsites.net/api/devices/updateIpAddress"))) { string postData = "macAddress=5C-86-4A-00-CE-40"; byte[] buffer = Encoding.UTF8.GetBytes(postData); request.Method = "POST"; request.ContentType = "application/x-www-form-urlencoded"; request.ContentLength = buffer.Length; //request.KeepAlive = false; //request.Timeout = 100; //request.ReadWriteTimeout = 100; // request body using (Stream stream = request.GetRequestStream()) { stream.Write(buffer, 0, buffer.Length); } using (var response = (HttpWebResponse)request.GetResponse()) { Debug.Print("HTTP Status:" + response.StatusCode + " : " + response.StatusDescription); } } } catch (Exception ex) { Debug.Print(ex.Message); } } private string GetApiList() { try { string returnString = @" <body> <head> <style type=""text/css"">" + GetStylings() + @"</style> </head> <div class=""container""> <div class=""title"">Netduino Api List</div> <div class=""main""> <ul> "; foreach (EndPoint endpoint in _allowedEndPoints) { returnString += @" <li><a href=""" + endpoint.Name + "\">" + endpoint.Name + "</a><span class=\"description\">(" + endpoint.Description + ")</span></li>"; returnString += "\r\n"; } returnString += "</ul></body>"; return returnString; } catch (Exception) { return ""; } } private static string GetStylings() { return @" body{ } ul { list-style-type: circle; font-size:20px; } .container { height:100%; } .description{ font-size:12px; padding:10px; } .main{ height:100%; padding:5px; border-bottom-left-radius: 15px; border-bottom-right-radius: 15px; -moz-border-botom-left-radius: 15px; -moz-border-botom-right-radius: 15px; } .title{ font-size:50px; font-variant: small-caps; padding:20px; border-top-left-radius: 15px; border-top-right-radius: 15px; -moz-border-botom-left-radius: 15px; -moz-border-botom-right-radius: 15px; } a:link {color: black;} a:visited {color: #998700;} a:active {color: black;} a:hover {color: #0F00B8;} a { text-decoration: underline; font-variant:small-caps; } "; } private static void WriteBytes(byte[] bytes, Socket connection) { try { connection.Send(bytes, 0, bytes.Length, SocketFlags.None); using (connection) { } } catch (Exception) { } } private static void SendResponse(string response, Socket connection) { try { var header = "HTTP/1.1 200 OK\r\nConnection: close\r\nContent-Length: " + response.Length + "\r\nContent-Type: text/plain\r\n\r\n" + response; byte[] returnBytes = Encoding.UTF8.GetBytes(header); WriteBytes(returnBytes, connection); } catch (Exception) { } } private void PingLed() { if (_led == null) { _led = new OutputPort(Pins.ONBOARD_LED, false); } _led.Write(true); Thread.Sleep(50); _led.Write(false); } private static void ListInterfaces() { NetworkInterface[] ifaces = NetworkInterface.GetAllNetworkInterfaces(); Debug.Print("Number of Interfaces: " + ifaces.Length); foreach (NetworkInterface iface in ifaces) { Debug.Print("IP: " + iface.IPAddress + "/" + iface.SubnetMask); } } /// <summary> /// Gets or sets the port the server listens on. /// </summary> private int Port { get; set; } /// <summary> /// List of commands that can be handled by the server. /// </summary> private readonly ArrayList _allowedEndPoints = new ArrayList(); public void RegisterEndPoint(EndPoint endPoint) { _allowedEndPoints.Add(endPoint); } } }
using System; using System.Runtime.Serialization; using System.Text; using Newtonsoft.Json; namespace DocDBAPIRest.Models { /// <summary> /// </summary> public class Database : IEquatable<Database> { /// <summary> /// The user generated unique name for the database. This is a string that must not be more than 255 characters. /// </summary> /// <value>The user generated unique name for the database. This is a string that must not be more than 255 characters.</value> public string Id { get; set; } /// <summary> /// This is a system generated property. The resource id (_rid) is a unique identifier that is also hierarchical per /// the resource stack on the resource model. It is used internally for placement of and navigation to the database /// resource. /// </summary> /// <value> /// This is a system generated property. The resource id (_rid) is a unique identifier that is also hierarchical per /// the resource stack on the resource model. It is used internally for placement of and navigation to the database /// resource. /// </value> public string Rid { get; set; } /// <summary> /// This is a system generated property. It specifies the last updated timestamp of the resource. The value is a /// timestamp. /// </summary> /// <value> /// This is a system generated property. It specifies the last updated timestamp of the resource. The value is a /// timestamp. /// </value> public string Ts { get; set; } /// <summary> /// This is a system generated property. It is the unique addressable URI for the resource. /// </summary> /// <value>This is a system generated property. It is the unique addressable URI for the resource.</value> public string Self { get; set; } /// <summary> /// This is a system generated property representing the resource etag required for optimistic concurrency control. /// </summary> /// <value>This is a system generated property representing the resource etag required for optimistic concurrency control.</value> public string Etag { get; set; } /// <summary> /// This is a system generated property that specifies the addressable path of the collections resource. /// </summary> /// <value>This is a system generated property that specifies the addressable path of the collections resource.</value> public string Colls { get; set; } /// <summary> /// This is a system generated property that specifies the addressable path of the users resource. /// </summary> /// <value>This is a system generated property that specifies the addressable path of the users resource.</value> public string Users { get; set; } /// <summary> /// Returns true if Database instances are equal /// </summary> /// <param name="other">Instance of Database to be compared</param> /// <returns>Boolean</returns> public bool Equals(Database other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( Id == other.Id || Id != null && Id.Equals(other.Id) ) && ( Rid == other.Rid || Rid != null && Rid.Equals(other.Rid) ) && ( Ts == other.Ts || Ts != null && Ts.Equals(other.Ts) ) && ( Self == other.Self || Self != null && Self.Equals(other.Self) ) && ( Etag == other.Etag || Etag != null && Etag.Equals(other.Etag) ) && ( Colls == other.Colls || Colls != null && Colls.Equals(other.Colls) ) && ( Users == other.Users || Users != null && Users.Equals(other.Users) ); } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class Database {\n"); sb.Append(" Id: ").Append(Id).Append("\n"); sb.Append(" Rid: ").Append(Rid).Append("\n"); sb.Append(" Ts: ").Append(Ts).Append("\n"); sb.Append(" Self: ").Append(Self).Append("\n"); sb.Append(" Etag: ").Append(Etag).Append("\n"); sb.Append(" Colls: ").Append(Colls).Append("\n"); sb.Append(" Users: ").Append(Users).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return Equals(obj as Database); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { var hash = 41; // Suitable nullity checks etc, of course :) if (Id != null) hash = hash*57 + Id.GetHashCode(); if (Rid != null) hash = hash*57 + Rid.GetHashCode(); if (Ts != null) hash = hash*57 + Ts.GetHashCode(); if (Self != null) hash = hash*57 + Self.GetHashCode(); if (Etag != null) hash = hash*57 + Etag.GetHashCode(); if (Colls != null) hash = hash*57 + Colls.GetHashCode(); if (Users != null) hash = hash*57 + Users.GetHashCode(); return hash; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Runtime.Serialization; using System.Runtime.Serialization.Formatters.Binary; using Xunit; namespace System.Threading.Tests { public static class ExecutionContextTests { [Fact] public static void CreateCopyTest() { ThreadTestHelpers.RunTestInBackgroundThread(() => { var asyncLocal = new AsyncLocal<int>(); ExecutionContext executionContext = ExecutionContext.Capture(); VerifyExecutionContext(executionContext, asyncLocal, 0); executionContext = ExecutionContext.Capture(); ExecutionContext executionContextCopy0 = executionContext.CreateCopy(); asyncLocal.Value = 1; executionContext = ExecutionContext.Capture(); VerifyExecutionContext(executionContext, asyncLocal, 1); VerifyExecutionContext(executionContextCopy0, asyncLocal, 0); executionContext = ExecutionContext.Capture(); ExecutionContext executionContextCopy1 = executionContext.CreateCopy(); VerifyExecutionContext(executionContextCopy1, asyncLocal, 1); }); } [Fact] public static void DisposeTest() { ExecutionContext executionContext = ExecutionContext.Capture(); executionContext.CreateCopy().Dispose(); executionContext.CreateCopy().Dispose(); } [Fact] public static void FlowTest() { ThreadTestHelpers.RunTestInBackgroundThread(() => { var asyncLocal = new AsyncLocal<int>(); asyncLocal.Value = 1; var asyncFlowControl = default(AsyncFlowControl); Action<Action, Action> verifySuppressRestore = (suppressFlow, restoreFlow) => { VerifyExecutionContextFlow(asyncLocal, 1); ExecutionContext executionContext2 = ExecutionContext.Capture(); suppressFlow(); VerifyExecutionContextFlow(asyncLocal, 0); VerifyExecutionContext(executionContext2, asyncLocal, 1); executionContext2 = ExecutionContext.Capture(); restoreFlow(); VerifyExecutionContextFlow(asyncLocal, 1); VerifyExecutionContext(executionContext2, asyncLocal, 0); }; verifySuppressRestore( () => asyncFlowControl = ExecutionContext.SuppressFlow(), () => asyncFlowControl.Undo()); verifySuppressRestore( () => asyncFlowControl = ExecutionContext.SuppressFlow(), () => asyncFlowControl.Dispose()); verifySuppressRestore( () => ExecutionContext.SuppressFlow(), () => ExecutionContext.RestoreFlow()); Assert.Throws<InvalidOperationException>(() => ExecutionContext.RestoreFlow()); asyncFlowControl = ExecutionContext.SuppressFlow(); Assert.Throws<InvalidOperationException>(() => ExecutionContext.SuppressFlow()); ThreadTestHelpers.RunTestInBackgroundThread(() => { ExecutionContext.SuppressFlow(); Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Undo()); Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Dispose()); ExecutionContext.RestoreFlow(); }); asyncFlowControl.Undo(); Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Undo()); Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Dispose()); // Changing an async local value does not prevent undoing a flow-suppressed execution context. In .NET Core, the // execution context is immutable, so changing an async local value changes the execution context instance, // contrary to the desktop framework. asyncFlowControl = ExecutionContext.SuppressFlow(); asyncLocal.Value = 2; asyncFlowControl.Undo(); VerifyExecutionContextFlow(asyncLocal, 2); asyncFlowControl = ExecutionContext.SuppressFlow(); asyncLocal.Value = 3; asyncFlowControl.Dispose(); VerifyExecutionContextFlow(asyncLocal, 3); ExecutionContext.SuppressFlow(); asyncLocal.Value = 4; ExecutionContext.RestoreFlow(); VerifyExecutionContextFlow(asyncLocal, 4); // An async flow control cannot be undone when a different execution context is applied. The desktop framework // mutates the execution context when its state changes, and only changes the instance when an execution context // is applied (for instance, through ExecutionContext.Run). The framework prevents a suppressed-flow execution // context from being applied by returning null from ExecutionContext.Capture, so the only type of execution // context that can be applied is one whose flow is not suppressed. After suppressing flow and changing an async // local's value, the desktop framework verifies that a different execution context has not been applied by // checking the execution context instance against the one saved from when flow was suppressed. In .NET Core, // since the execution context instance will change after changing the async local's value, it verifies that a // different execution context has not been applied, by instead ensuring that the current execution context's // flow is suppressed. { ExecutionContext executionContext = null; Action verifyCannotUndoAsyncFlowControlAfterChangingExecutionContext = () => { ExecutionContext.Run( executionContext, state => { Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Undo()); Assert.Throws<InvalidOperationException>(() => asyncFlowControl.Dispose()); }, null); }; executionContext = ExecutionContext.Capture(); asyncFlowControl = ExecutionContext.SuppressFlow(); verifyCannotUndoAsyncFlowControlAfterChangingExecutionContext(); asyncFlowControl.Undo(); executionContext = ExecutionContext.Capture(); asyncFlowControl = ExecutionContext.SuppressFlow(); asyncLocal.Value = 5; verifyCannotUndoAsyncFlowControlAfterChangingExecutionContext(); asyncFlowControl.Undo(); VerifyExecutionContextFlow(asyncLocal, 5); } }); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework)] // desktop framework has a bug [SkipOnTargetFramework(TargetFrameworkMonikers.Mono)] public static void CaptureThenSuppressThenRunFlowTest() { ThreadTestHelpers.RunTestInBackgroundThread(() => { var asyncLocal = new AsyncLocal<int>(); asyncLocal.Value = 1; ExecutionContext executionContext = ExecutionContext.Capture(); ExecutionContext.SuppressFlow(); ExecutionContext.Run( executionContext, state => { Assert.Equal(1, asyncLocal.Value); VerifyExecutionContextFlow(asyncLocal, 1); }, null); Assert.Equal(1, asyncLocal.Value); VerifyExecutionContextFlow(asyncLocal, 0); ExecutionContext.RestoreFlow(); VerifyExecutionContextFlow(asyncLocal, 1); executionContext = ExecutionContext.Capture(); asyncLocal.Value = 2; ExecutionContext.SuppressFlow(); Assert.True(ExecutionContext.IsFlowSuppressed()); ExecutionContext.Run( executionContext, state => { Assert.Equal(1, asyncLocal.Value); VerifyExecutionContextFlow(asyncLocal, 1); }, null); Assert.Equal(2, asyncLocal.Value); VerifyExecutionContextFlow(asyncLocal, 0); ExecutionContext.RestoreFlow(); VerifyExecutionContextFlow(asyncLocal, 2); }); } private static void VerifyExecutionContext( ExecutionContext executionContext, AsyncLocal<int> asyncLocal, int expectedValue) { int actualValue = 0; Action run = () => ExecutionContext.Run(executionContext, state => actualValue = asyncLocal.Value, null); if (executionContext == null) { Assert.Throws<InvalidOperationException>(() => run()); } else { run(); } Assert.Equal(expectedValue, actualValue); } private static void VerifyExecutionContextFlow(AsyncLocal<int> asyncLocal, int expectedValue) { Assert.Equal(expectedValue == 0, ExecutionContext.IsFlowSuppressed()); if (ExecutionContext.IsFlowSuppressed()) { Assert.Null(ExecutionContext.Capture()); } VerifyExecutionContext(ExecutionContext.Capture(), asyncLocal, expectedValue); int asyncLocalValue = -1; var done = new ManualResetEvent(false); ThreadPool.QueueUserWorkItem( state => { asyncLocalValue = asyncLocal.Value; done.Set(); }); done.CheckedWait(); Assert.Equal(expectedValue, asyncLocalValue); } [Fact] public static void AsyncFlowControlTest() { ThreadTestHelpers.RunTestInBackgroundThread(() => { Action<AsyncFlowControl, AsyncFlowControl, bool> verifyEquality = (afc0, afc1, areExpectedToBeEqual) => { Assert.Equal(areExpectedToBeEqual, afc0.Equals(afc1)); Assert.Equal(areExpectedToBeEqual, afc0.Equals((object)afc1)); Assert.Equal(areExpectedToBeEqual, afc0 == afc1); Assert.NotEqual(areExpectedToBeEqual, afc0 != afc1); }; AsyncFlowControl asyncFlowControl0 = ExecutionContext.SuppressFlow(); ExecutionContext.RestoreFlow(); AsyncFlowControl asyncFlowControl1 = ExecutionContext.SuppressFlow(); ExecutionContext.RestoreFlow(); verifyEquality(asyncFlowControl0, asyncFlowControl1, true); verifyEquality(asyncFlowControl1, asyncFlowControl0, true); var asyncLocal = new AsyncLocal<int>(); asyncLocal.Value = 1; asyncFlowControl1 = ExecutionContext.SuppressFlow(); ExecutionContext.RestoreFlow(); verifyEquality(asyncFlowControl0, asyncFlowControl1, true); verifyEquality(asyncFlowControl1, asyncFlowControl0, true); asyncFlowControl1 = new AsyncFlowControl(); verifyEquality(asyncFlowControl0, asyncFlowControl1, false); verifyEquality(asyncFlowControl1, asyncFlowControl0, false); ThreadTestHelpers.RunTestInBackgroundThread(() => asyncFlowControl1 = ExecutionContext.SuppressFlow()); verifyEquality(asyncFlowControl0, asyncFlowControl1, false); verifyEquality(asyncFlowControl1, asyncFlowControl0, false); }); } } }
#region Copyright and license information // Copyright 2001-2009 Stephen Colebourne // Copyright 2009-2011 Jon Skeet // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using NodaTime.Utility; namespace NodaTime.TimeZones { /// <summary> /// Provides a basic daylight savings time zone. A DST time zone has a simple recurrence /// where an extra offset is applied between two dates of a year. /// </summary> /// <remarks> /// IMPORTANT: This class *accepts* recurrences which start from a particular year /// rather than being infinite back to the start of time, but *treats* them as if /// they were infinite. This makes various calculations easier, but this zone should /// only be used as part of a PrecalculatedDateTimeZone which will only ask it for /// values within the right portion of the timeline. /// </remarks> [Serializable] internal class DaylightSavingsTimeZone : DateTimeZone, IEquatable<DaylightSavingsTimeZone> { private readonly ZoneRecurrence standardRecurrence; private readonly Offset standardOffset; private readonly ZoneRecurrence dstRecurrence; /// <summary> /// Initializes a new instance of the <see cref="DaylightSavingsTimeZone"/> class. /// </summary> /// <remarks> /// At least one of the recurrences (it doesn't matter which) must be a "standard", i.e. not have any savings /// applied. The other may still not have any savings (e.g. for America/Resolute) but any savings must be /// non-negative. /// </remarks> /// <param name="id">The id.</param> /// <param name="standardOffset">The standard offset.</param> /// <param name="startRecurrence">The start recurrence.</param> /// <param name="endRecurrence">The end recurrence.</param> internal DaylightSavingsTimeZone(String id, Offset standardOffset, ZoneRecurrence startRecurrence, ZoneRecurrence endRecurrence) : base(id, false, standardOffset + Offset.Min(startRecurrence.Savings, endRecurrence.Savings), standardOffset + Offset.Max(startRecurrence.Savings, endRecurrence.Savings)) { this.standardOffset = standardOffset; // Treat the recurrences as if they extended to the start of time. startRecurrence = startRecurrence.ToStartOfTime(); endRecurrence = endRecurrence.ToStartOfTime(); var dst = startRecurrence; var standard = endRecurrence; if (!dst.IsInfinite || !standard.IsInfinite) { throw new ArgumentException("Both recurrences must extend to the end of time"); } if (startRecurrence.Savings == Offset.Zero) { dst = endRecurrence; standard = startRecurrence; } if (dst.Savings < Offset.Zero) { // Not necessarily positive... America/Resolute ends up switching // between two different zone names, neither of which has daylight savings... throw new ArgumentException("Daylight savings must be non-negative"); } if (standard.Savings != Offset.Zero) { throw new ArgumentException("At least one recurrence must not have savings applied"); } if (dst.Name == standard.Name) { dst = dst.RenameAppend("-Summer"); } dstRecurrence = dst; standardRecurrence = standard; } #region IEquatable<DaylightSavingsTimeZone> Members /// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false. /// </returns> /// <param name="other">An object to compare with this object.</param> public bool Equals(DaylightSavingsTimeZone other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return Id == other.Id && standardOffset == other.standardOffset && dstRecurrence.Equals(other.dstRecurrence) && standardRecurrence.Equals(other.standardRecurrence); } #endregion #region Object overrides /// <summary> /// Determines whether the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>. /// </summary> /// <returns> /// true if the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>; otherwise, false. /// </returns> /// <param name="obj">The <see cref="T:System.Object"/> to compare with the current <see cref="T:System.Object"/>. /// </param><exception cref="T:System.NullReferenceException">The <paramref name="obj"/> parameter is null. /// </exception><filterpriority>2</filterpriority> public override bool Equals(Object obj) { return Equals(obj as DaylightSavingsTimeZone); } /// <summary> /// Serves as a hash function for a particular type. /// </summary> /// <returns> /// A hash code for the current <see cref="T:System.Object"/>. /// </returns> /// <filterpriority>2</filterpriority> public override int GetHashCode() { int hashCode = HashCodeHelper.Initialize(); hashCode = HashCodeHelper.Hash(hashCode, Id); hashCode = HashCodeHelper.Hash(hashCode, standardOffset); hashCode = HashCodeHelper.Hash(hashCode, dstRecurrence); hashCode = HashCodeHelper.Hash(hashCode, standardRecurrence); return hashCode; } #endregion // Object overrides /// <summary> /// Gets the zone interval for the given instant. /// </summary> /// <param name="instant">The Instant to test.</param> /// <returns>The ZoneInterval in effect at the given instant.</returns> /// <exception cref="ArgumentOutOfRangeException">The instant falls outside the bounds /// of the recurrence rules of the zone.</exception> public override ZoneInterval GetZoneInterval(Instant instant) { var previous = PreviousTransition(instant + Duration.One); var next = NextTransition(instant); var recurrence = FindMatchingRecurrence(instant); return new ZoneInterval(recurrence.Name, previous.Instant, next.Instant, standardOffset + recurrence.Savings, recurrence.Savings); } /// <summary> /// Finds the recurrence containing the given instant, if any. /// </summary> /// <returns>The recurrence containing the given instant, or null if /// the instant occurs before the start of the earlier recurrence.</returns> private ZoneRecurrence FindMatchingRecurrence(Instant instant) { // Find the transitions which start *after* the one we're currently in - then // pick the later of them, which will be the same "polarity" as the one we're currently // in. Transition? nextDstStart = dstRecurrence.Next(instant, standardOffset, standardRecurrence.Savings); Transition? nextStandardStart = standardRecurrence.Next(instant, standardOffset, dstRecurrence.Savings); // Both transitions must be non-null, as our recurrences are infinite. return nextDstStart.Value.Instant > nextStandardStart.Value.Instant ? dstRecurrence : standardRecurrence; } /// <summary> /// Returns the transition occurring strictly after the specified instant /// </summary> /// <param name="instant">The instant after which to consider transitions.</param> private Transition NextTransition(Instant instant) { Transition? dstTransition = dstRecurrence.Next(instant, standardOffset, standardRecurrence.Savings); Transition? standardTransition = standardRecurrence.Next(instant, standardOffset, dstRecurrence.Savings); if (dstTransition.HasValue) { if (standardTransition.HasValue) { return (dstTransition.Value.Instant > standardTransition.Value.Instant) ? standardTransition.Value : dstTransition.Value; } return dstTransition.Value; } if (standardTransition.HasValue) { return standardTransition.Value; } throw new ArgumentOutOfRangeException("instant", "Infinite recurrences should always have a next transition"); } /// <summary> /// Returns the transition occurring strictly before the specified instant. /// </summary> /// <param name="instant">The instant before which to consider transitions.</param> /// <returns> /// The instant of the previous transition, or null if there are no further transitions. /// </returns> private Transition PreviousTransition(Instant instant) { Transition? dstTransition = dstRecurrence.Previous(instant, standardOffset, standardRecurrence.Savings); Transition? standardTransition = standardRecurrence.Previous(instant, standardOffset, dstRecurrence.Savings); if (dstTransition.HasValue) { if (standardTransition.HasValue) { return (dstTransition.Value.Instant > standardTransition.Value.Instant) ? dstTransition.Value : standardTransition.Value; } else { return dstTransition.Value; } } if (standardTransition.HasValue) { return standardTransition.Value; } throw new ArgumentOutOfRangeException("instant", "Infinite (start of time) recurrences should always have a previous transition"); } /// <summary> /// Returns the offset from UTC, where a positive duration indicates that local time is later /// than UTC. In other words, local time = UTC + offset. /// </summary> /// <param name="instant">The instant for which to calculate the offset.</param> /// <returns> /// The offset from UTC at the specified instant. /// </returns> public override Offset GetOffsetFromUtc(Instant instant) { return FindMatchingRecurrence(instant).Savings + standardOffset; } /// <summary> /// Writes the time zone to the specified writer. /// </summary> /// <param name="writer">The writer to write to.</param> internal override void Write(DateTimeZoneWriter writer) { if (writer == null) { throw new ArgumentNullException("writer"); } writer.WriteOffset(standardOffset); dstRecurrence.Write(writer); standardRecurrence.Write(writer); } internal static DateTimeZone Read(DateTimeZoneReader reader, string id) { if (reader == null) { throw new ArgumentNullException("reader"); } Offset offset = reader.ReadOffset(); ZoneRecurrence start = ZoneRecurrence.Read(reader); ZoneRecurrence end = ZoneRecurrence.Read(reader); return new DaylightSavingsTimeZone(id, offset, start, end); } } }
#region License /* Copyright (c) 2010-2014 Danko Kozar Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion License using System; using System.Collections.Generic; using UnityEngine; namespace eDriven.Core.Events { /// <summary> /// The EventDispatcher /// Base class for all classes that dispatch events /// </summary> /// <remarks>Coded by Danko Kozar</remarks> public class EventDispatcher : IEventDispatcher, IEventQueue, IDisposable { /// <summary> /// Constructor /// </summary> public EventDispatcher() { _eventDispatcherTarget = this; } /// <summary> /// Constructor /// </summary> public EventDispatcher(object target) { if (null == target) throw new Exception("Target cannot be null"); _eventDispatcherTarget = target; } #region Properties #if DEBUG /// <summary> /// Debug mode /// </summary> public static bool DebugMode; #endif #endregion #region Members private readonly object _eventDispatcherTarget; /// <summary> /// A dictionary holding the references to event listeners /// For each event type, more that one event listener could be subscribed (one-to-many relationship) /// </summary> private readonly Dictionary<EventTypePhase, List<PriorityGroup>> _eventHandlerDict = new Dictionary<EventTypePhase, List<PriorityGroup>>(); /// <summary> /// The queue used for delayed processing /// </summary> private readonly List<Event> _queue = new List<Event>(); #endregion #region Subscribing / unsubscribing /// <summary> /// Adds the event listener /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler</param> /// <param name="phases">Event bubbling phases that we listen to</param> /// <param name="priority">Event priority</param> public virtual void AddEventListener(string eventType, EventHandler handler, EventPhase phases, int priority) { var arr = EventPhaseHelper.BreakUpPhases(phases); foreach (EventPhase phase in arr) { EventTypePhase key = new EventTypePhase(eventType, phase); if (!_eventHandlerDict.ContainsKey(key)) // avoid key duplication _eventHandlerDict.Add(key, new List<PriorityGroup>()); var group = _eventHandlerDict[key].Find(delegate(PriorityGroup g) { return g.Priority == priority; }); if (null == group) { //if (0 != priority) // Debug.Log("Creating new group with priority " + priority); group = new PriorityGroup { Priority = priority }; // add and sort _eventHandlerDict[key].Add(group); // if having multiple priorities, sort now if (_eventHandlerDict[key].Count > 0) _eventHandlerDict[key].Sort(PriorityComparer); } if (!group.Contains(handler)) // avoid key + value duplication group.Add(handler); } } /// <summary> /// Adds the event listener /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler</param> /// <param name="phases">Event bubbling phases that we listen to</param> public virtual void AddEventListener(string eventType, EventHandler handler, EventPhase phases) { AddEventListener(eventType, handler, phases, 0); } /// <summary> /// AddEventListener Overload /// Assumes that useCapturePhase is false /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler (function)</param> public virtual void AddEventListener(string eventType, EventHandler handler) { AddEventListener(eventType, handler, EventPhase.Target | EventPhase.Bubbling); // | EventPhase.Bubbling added back 20121216 } /// <summary> /// Adds the event listener /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler</param> /// <param name="priority">Event priority</param> public virtual void AddEventListener(string eventType, EventHandler handler, int priority) { AddEventListener(eventType, handler, EventPhase.Target | EventPhase.Bubbling, priority); } private List<PriorityGroup> _tempList; /// <summary> /// Removes the event listener /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler (function)</param> /// <param name="phases">Event bubbling phases that we listen to</param> public virtual void RemoveEventListener(string eventType, EventHandler handler, EventPhase phases) { var arr = EventPhaseHelper.BreakUpPhases(phases); foreach (EventPhase phase in arr) { EventTypePhase key = new EventTypePhase(eventType, phase); if (_eventHandlerDict.ContainsKey(key)) { foreach (PriorityGroup group in _eventHandlerDict[key]) { if (group.Contains(handler)) group.Remove(handler); if (group.Count == 0) { if (null == _tempList) _tempList = new List<PriorityGroup>(); _tempList.Add(group); } } if (null != _tempList) { foreach (PriorityGroup @group in _tempList) { _eventHandlerDict[key].Remove(@group); // cleanup } _tempList.Clear(); } if (_eventHandlerDict[key].Count == 0) _eventHandlerDict.Remove(key); // cleanup } } } /// <summary> /// Removes the event listener /// </summary> /// <param name="eventType">Event type</param> /// <param name="handler">Event handler (function)</param> public virtual void RemoveEventListener(string eventType, EventHandler handler) { RemoveEventListener(eventType, handler, EventPhase.Target | EventPhase.Bubbling); // this is the default // | EventPhase.Bubbling added back 20121216 } /// <summary> /// Returns true if handler is mapped to any of the specified phases /// </summary> /// <param name="eventType"></param> /// <param name="handler"></param> /// <param name="phases"></param> /// <returns></returns> public virtual bool MappedToAnyPhase(string eventType, EventHandler handler, EventPhase phases) { var arr = EventPhaseHelper.BreakUpPhases(phases); foreach (EventPhase phase in arr) { EventTypePhase key = new EventTypePhase(eventType, phase); //if (_eventHandlerDict.ContainsKey(key) && _eventHandlerDict[key].Contains(handler)) if (_eventHandlerDict.ContainsKey(key)) { var exists = _eventHandlerDict[key].Exists(delegate(PriorityGroup group) { return group.Contains(handler); }); //foreach (PriorityGroup group in _eventHandlerDict[key]) //{ // //if (_eventHandlerDict[key].Contains(handler)) // // return true; //} return exists; } } return false; } /// <summary> /// Removes all listeners for the spacified event type (both capture and bubbling phase) /// </summary> /// <param name="eventType">Event type</param> public virtual void RemoveAllListeners(string eventType) { RemoveAllListeners(eventType, EventPhase.Bubbling); RemoveAllListeners(eventType, EventPhase.Target); RemoveAllListeners(eventType, EventPhase.Capture); } /// <summary> /// Removes all listeners for the spacified event type and phases /// </summary> /// <param name="eventType">Event type</param> /// /// <param name="phases"></param> public virtual void RemoveAllListeners(string eventType, EventPhase phases) { var arr = EventPhaseHelper.BreakUpPhases(phases); foreach (EventPhase phase in arr) { EventTypePhase key = new EventTypePhase(eventType, phase); if (_eventHandlerDict.ContainsKey(key)) { _eventHandlerDict[key].Clear(); _eventHandlerDict.Remove(key); } } } /// <summary> /// Returns true if EventDispatcher has any registered listeners for a specific type and phase /// </summary> /// <param name="eventType"></param> /// <returns></returns> public bool HasEventListener(string eventType) { return _eventHandlerDict.ContainsKey(new EventTypePhase(eventType, EventPhase.Target)); // TODO: Optimize } /// <summary> /// Returns true if there are any subscribers in bubbling hierarchy<br/> /// Override in superclass /// </summary> /// <param name="eventType"></param> /// <returns></returns> public virtual bool HasBubblingEventListener(string eventType) { return HasEventListener(eventType); } #endregion #region Dispatching /// <summary> /// Dispatches an event with the option of late processing (immediate = TRUE/FALSE) /// </summary> /// <param name="e">Event</param> /// <param name="immediate">Process immediatelly or delayed?</param> /// <returns>If after the event object finishes propagating through the DOM event flow its Event.DefaultPrevented attribute is false, then this method returns true. Otherwise this method returns false.</returns> public virtual void DispatchEvent(Event e, bool immediate) { #if DEBUG if (DebugMode) Debug.Log(string.Format("Dispatching event [{0}]", e)); #endif // do nothing if event has already been canceled if (e.Canceled) return; // set target if not already set if (null == e.Target) e.Target = _eventDispatcherTarget; // set current target if not already set if (null == e.CurrentTarget) e.CurrentTarget = e.Target; if (immediate) { /** * 1) Immediate dispatching * The code from the event listener is being run just NOW * */ ProcessEvent(e); } else { /** * 2) Delayed dispatching * Processed when ProcessQueue is called by the external code * */ EnqueueEvent(e); } } /// <summary> /// Dispatches an event immediatelly /// </summary> /// <param name="e">Event</param> public virtual void DispatchEvent(Event e) { DispatchEvent(e, true); } #endregion #region Event processing /// <summary> /// Could be overriden in a subclass (for instance to implement event bubbling) /// </summary> /// <param name="e">Event to dispatch</param> protected virtual void ProcessEvent(Event e) { ExecuteListeners(e); } /// <summary> /// Executes event handlers listening for a particular event type /// </summary> /// <param name="e">Event to dispatch</param> /// <remarks>NOTE: Public by purpose</remarks> public void ExecuteListeners(Event e) { // return if event canceled if (e.Canceled) return; EventTypePhase key = new EventTypePhase(e.Type, e.Phase); // find event handlers subscribed for this event if (_eventHandlerDict.ContainsKey(key) && null != _eventHandlerDict[key]) { _eventHandlerDict[key].ForEach( delegate(PriorityGroup group) { group.ForEach(delegate (EventHandler handler) { if (e.Canceled) // the event might have been canceled by the previous listener in the collection return; handler(e); // execute the handler with an event as argument }); } ); } } #endregion #region Queued processing /// <summary> /// Adds an event to the queue /// The queue will be processed when ProcessQueue() manually executed /// </summary> /// <param name="e"></param> public virtual void EnqueueEvent(Event e) { #if DEBUG if (DebugMode) Debug.Log(string.Format("Enqueueing event [{0}]. Queue count: {1}", e, _queue.Count)); #endif _queue.Add(e); } /// <summary> /// If events are added to queue, they are waiting to be fired<br/> /// in the same order they are added /// </summary> public virtual void ProcessQueue() { // return if nothing to process if (0 == _queue.Count) { #if DEBUG if (DebugMode) Debug.Log(string.Format("No enqueued events to process")); #endif return; } // dispatch each event in the queue now _queue.ForEach(delegate(Event e) { ExecuteListeners(e); }); #if DEBUG if (DebugMode) Debug.Log(string.Format("Processed {0} enqueued events", _queue.Count)); #endif // empty the queue _queue.Clear(); } #endregion #region Implementation of IDisposable /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public virtual void Dispose() { List<EventTypePhase> keysToRemove = new List<EventTypePhase>(); foreach (EventTypePhase key in _eventHandlerDict.Keys) { keysToRemove.Add(key); } keysToRemove.ForEach(key => RemoveAllListeners(key.EventType, key.Phase)); _eventHandlerDict.Clear(); } #endregion #region Preventing default /// <summary> /// Exposes the cancelable event to the outside if there are listeners for that event type /// If default prevented, returns false /// If not, returns true /// </summary> /// <param name="eventType"></param> /// <param name="bubbles"></param> /// <returns></returns> public bool IsDefaultPrevented(string eventType, bool bubbles) { // prevent removing if (HasEventListener(eventType)) { var e = new Event(eventType, bubbles, true); DispatchEvent(e); return e.DefaultPrevented; } return false; // return false because there are no listeners to prevent default } /// <summary> /// No-bubbling version /// </summary> /// <param name="eventType"></param> /// <returns></returns> public bool IsDefaultPrevented(string eventType) { return IsDefaultPrevented(eventType, false); } #endregion #region Helper /// <summary> /// /// </summary> private static readonly Comparison<PriorityGroup> PriorityComparer = delegate(PriorityGroup group1, PriorityGroup group2) { if (group1.Priority > group2.Priority) return -1; if (group1.Priority < group2.Priority) return 1; return 0; }; #endregion } internal class PriorityGroup : List<EventHandler> { public int Priority; } }
using System; using System.Collections.Generic; using System.Linq; using IdeaFactory.Util; using Powercards.Core.Cards; namespace Powercards.Core { public class Player { #region fields private readonly string name; private readonly Random random; private readonly Deck deck; private readonly PlayerOwnedZone discardArea; private readonly PlayerOwnedZone hand; private readonly PlayerOwnedZone playArea; private readonly PlayerOwnedZone setAsideArea; private readonly PlayerOwnedZone nativeVillageMat; private readonly List<IDurationEffect> durationEffects; private readonly HashSet<string> recentGainedCardNamesOnMyTurn; #endregion #region properties public string Name { get { return name; } } public ICardZone Deck { get { return deck; } } public int DeckCardCount { get { return deck.CardCount; } } public PlayerOwnedZone DiscardArea { get { return discardArea; } } public PlayerOwnedZone Hand { get { return hand; } } public PlayerOwnedZone PlayArea { get { return playArea; } } /// <summary> /// Short-term set aside should use TransitionalZone, this is for long-term set aside such as Haven, Island, etc /// </summary> public PlayerOwnedZone SetAsideArea { get { return setAsideArea; } } public PlayerOwnedZone NativeVillageMat { get { return nativeVillageMat; } } public int TurnCount { get; private set; } public int CoinTokens { get; set; } public int VPTokens { get; set; } #endregion #region constructors public Player(string name) { Enforce.ArgumentNotEmptyOrNull(name); this.name = name; this.random = new Random(Maths.RandomInt32()); this.discardArea = new PlayerOwnedZone(this); this.deck = new Deck(this); this.hand = new PlayerOwnedZone(this); this.playArea = new PlayerOwnedZone(this); this.setAsideArea = new PlayerOwnedZone(this); this.nativeVillageMat = new PlayerOwnedZone(this); this.durationEffects = new List<IDurationEffect>(); this.recentGainedCardNamesOnMyTurn = new HashSet<string>(StringComparer.Ordinal); } #endregion #region methods public void Init() { InitDeckAndHand(); } private void InitDeckAndHand() { var cardTypes = new Type[10]; var index = 0; for (int i = 0; i < 3; i++) { cardTypes[index++] = typeof(Estate); } for (int i = 0; i < 7; i++) { cardTypes[index++] = typeof(Copper); } Enforce.IsTrue(index == cardTypes.Length); Shuffle(cardTypes); index = 0; for (int i = 0; i < 5; i++) { CardCreator.Create(cardTypes[index++], this.Deck); } for (int i = 0; i < 5; i++) { CardCreator.Create(cardTypes[index++], this.Hand); } Enforce.IsTrue(index == cardTypes.Length); } public void BeginTurn(TurnContext context) { this.TurnCount++; this.recentGainedCardNamesOnMyTurn.Clear(); foreach (var effect in this.durationEffects) { effect.OnTurnStarting(context); } this.durationEffects.Clear(); } public void DrawCards(int numberOfCards, TurnContext context) { MoveFromTopDeck(numberOfCards, this.Hand, CardMovementVerb.Draw, context); } public void DrawCardsTill(int handCardCountUpTo, TurnContext context) { MoveFromTopDeckTill(new NullValidator<ICard>(), handCardCountUpTo, this.Hand, CardMovementVerb.Draw, context); } public ICard MoveOneFromTopDeck(ICardZone zone, CardMovementVerb verb, TurnContext context) { ICard movedCard; TryMoveFromDeckImpl(true, zone, verb, context, out movedCard); return movedCard; } public void MoveFromTopDeck(int numberOfCards, ICardZone targetZone, CardMovementVerb verb, TurnContext context) { for (int i = 0; i < numberOfCards; i++) { ICard movedCard; if (!TryMoveFromDeckImpl(true, targetZone, verb, context, out movedCard)) break; } } public void MoveFromBottomDeck(int numberOfCards, ICardZone targetZone, CardMovementVerb verb, TurnContext context) { for (int i = 0; i < numberOfCards; i++) { ICard movedCard; if (!TryMoveFromDeckImpl(false, targetZone, verb, context, out movedCard)) break; } } public void MoveFromTopDeckTill<TZone>(IValidator<ICard> validator, int validatedCountUpTo, TZone targetZone, CardMovementVerb verb, TurnContext context) where TZone : ICardZone, IEnumerable<ICard> { while (targetZone.Count(validator.Validate) < validatedCountUpTo) { ICard movedCard; if (!TryMoveFromDeckImpl(true, targetZone, verb, context, out movedCard)) break; } } /// <returns>return false if the move failed. It also means that the deck cannot be drawn anymore</returns> private bool TryMoveFromDeckImpl(bool trueForTopCardFalseForButtom, ICardZone targetZone, CardMovementVerb verb, TurnContext context, out ICard movedCard) { RecycleDeckIfEmpty(context); if (this.deck.IsEmpty) { movedCard = null; return false; } movedCard = (trueForTopCardFalseForButtom ? this.deck.TopCard : this.deck.BottomCard); movedCard.MoveTo(this.Deck, targetZone, verb, context); return true; } private void RecycleDeckIfEmpty(TurnContext context) { if (this.deck.IsEmpty) { if (!this.DiscardArea.IsEmpty) { var discardCards = this.DiscardArea.ToArray(); Shuffle(discardCards); discardCards.MoveAll(this.DiscardArea, this.Deck, CardMovementVerb.Shuffle, context); Enforce.IsTrue(this.DiscardArea.IsEmpty); } } } public void DiscardDeck(TurnContext context) { this.deck.MoveAll(this.Deck, this.DiscardArea, CardMovementVerb.Discard, context); } /// <returns>Can continue the attack?</returns> public bool OnAttack(TurnContext context) { var continueAttack = true; HashSet<IDefenceCard> resolvedCards = null; while (true) { List<IDefenceCard> cardsToSelect = null; IDefenceCard cardToResolve = null; foreach (var card in this.Hand.Concat(this.PlayArea).OfType<IDefenceCard>()) { if (!card.IsDefenceUsable(this)) continue; if (resolvedCards == null) resolvedCards = new HashSet<IDefenceCard>(); if (resolvedCards.Contains(card)) continue; if (!card.IsDefenceOptional) { cardToResolve = card; break; } if (cardsToSelect == null) cardsToSelect = new List<IDefenceCard>(1); cardsToSelect.Add(card); } if (cardToResolve == null) { if (cardsToSelect != null) { cardToResolve = (IDefenceCard)context.Game.Dialog.Select(context, this, cardsToSelect.ToArray(), new CountValidator<ICard>(0, 1), "Select a reaction card to use. None to skip all").SingleOrDefault(); } } if (cardToResolve == null) break; continueAttack = continueAttack && cardToResolve.ResolveAttack(context, this); resolvedCards.Add(cardToResolve); } return continueAttack; } public void AfterBuy(TurnContext context, ICard boughtCard) { foreach (var card in this.PlayArea.OfType<IPostBuyOtherCardEventCard>()) { card.AfterBuy(context, this, boughtCard); } } public void Cleanup(TurnContext context) { List<IPreCleanupSelfMovementCard> cardsToMove = null; foreach (var card in context.ActivePlayer.PlayArea.OfType<IPreCleanupSelfMovementCard>()) { if (card.ShouldMove(context, context.ActivePlayer)) { if (cardsToMove == null) cardsToMove = new List<IPreCleanupSelfMovementCard>(1); cardsToMove.Add(card); } } if (cardsToMove != null) { foreach (var card in cardsToMove) { card.MoveBeforeCleanup(context, context.ActivePlayer); } } this.PlayArea.Where(context.ShouldNotRemainInPlay).MoveAll(this.PlayArea, this.DiscardArea, CardMovementVerb.Discard, context); this.Hand.MoveAll(this.Hand, this.DiscardArea, CardMovementVerb.Discard, context); if (context.IsNextTurnExtraTurnForActivePlayer && this == context.ActivePlayer) { this.DrawCards(3, context); } else { this.DrawCards(5, context); } } public void AddDurationEffect(IDurationEffect effect) { this.durationEffects.Add(effect); } public void AddGainedCardName(string cardName, TurnContext context) { if (context.ActivePlayer == this) { this.recentGainedCardNamesOnMyTurn.Add(cardName); } } public bool HasRecentlyGainedOnSelfTurn(CardSupplyPile pile) { return this.recentGainedCardNamesOnMyTurn.Contains(pile.Name); } public int Score() { var allCards = this.deck .Concat(this.Hand) .Concat(this.PlayArea) .Concat(this.DiscardArea) .Concat(this.SetAsideArea) .Concat(this.NativeVillageMat); return allCards.OfType<IScoringCard>().Sum(x => x.Score(allCards)) + this.VPTokens; } private void Shuffle<T>(T[] array) { for (int i = 0; i < 3; i++) { CollectionUtil.Shuffle(array, random); } } #endregion } }
// SPDX-License-Identifier: MIT // Copyright [email protected] // Copyright iced contributors using System; using System.Collections.Generic; using System.IO; using System.Text; using Iced.Intel; namespace Iced.UnitTests.Intel.DecoderTests.MemoryTestGenImpl { readonly struct MemInfo : IEquatable<MemInfo> { public readonly string HexBytes; public readonly int Length; public readonly string Code; public readonly Register Register; public readonly Register PrefixSeg; public readonly Register Segment; public readonly Register BaseReg; public readonly Register IndexReg; public readonly int Scale; public readonly int DisplSize; public readonly uint Displ; public readonly bool IsInvalid; public MemInfo(string hexBytes, int length, string code, Register register, Register prefixSeg, Register baseReg, Register indexReg, int scale, int displSize, uint displ, bool isInvalid) { HexBytes = hexBytes; Length = length; Code = code; Register = register; PrefixSeg = prefixSeg; BaseReg = baseReg; IndexReg = indexReg; Scale = scale; DisplSize = displSize; Displ = displ; IsInvalid = isInvalid; if (prefixSeg != Register.None) Segment = prefixSeg; else if (baseReg == Register.BP || baseReg == Register.ESP || baseReg == Register.RSP || baseReg == Register.EBP || baseReg == Register.RBP) Segment = Register.SS; else Segment = Register.DS; } public bool Equals(MemInfo other) { if (HexBytes != other.HexBytes) return false; if (Length != other.Length || Code != other.Code || Register != other.Register || PrefixSeg != other.PrefixSeg || Segment != other.Segment || BaseReg != other.BaseReg || IndexReg != other.IndexReg || Scale != other.Scale || DisplSize != other.DisplSize || Displ != other.Displ || IsInvalid != other.IsInvalid) throw new InvalidOperationException(); return true; } public override bool Equals(object obj) => obj is MemInfo other && Equals(other); public override int GetHashCode() => HexBytes.GetHashCode(); } class Program { static void Main2(string[] args) => new Program().DoIt(); const string Legacy_Code_16 = nameof(Code.Add_rm16_r16); const string Legacy_Code_32 = nameof(Code.Add_rm32_r32); const string Legacy_Code_64 = nameof(Code.Add_rm32_r32); const Register Legacy_Register_16 = Register.AX; const Register Legacy_Register_32 = Register.EAX; const Register Legacy_Register_64 = Register.EAX; const byte Legacy_OpCode = 0x01; #if !NO_EVEX const string EVEX_Code = nameof(Code.EVEX_Vpscatterdd_vm32x_k1_xmm); #else const string EVEX_Code = "EVEX_Vpscatterdd_vm32x_k1_xmm"; #endif const int EVEX_LL = 0; const int EVEX_pp = 1; const int EVEX_mm = 2; const int EVEX_W = 0; const int EVEX_aaa = 1; const byte EVEX_OpCode = 0xA0; const int EVEX_Displ8N = 4; const Register EVEX_Register = Register.XMM0; readonly StringBuilder sb = new StringBuilder(); int length; string GetCodeValue(int bitness, bool isVsib) => isVsib ? EVEX_Code : bitness switch { 16 => Legacy_Code_16, 32 => Legacy_Code_32, 64 => Legacy_Code_64, _ => throw new InvalidOperationException(), }; string GetHexBytes(out int length) { length = this.length; this.length = 0; var res = sb.ToString(); sb.Clear(); return res; } void AddSpace() => sb.Append(" "); void AddByteSpace(byte value) { AddByte(value); AddSpace(); } void AddByte(byte value) { sb.Append(value.ToString("X2")); length++; } void AddUInt16(ushort value) { AddByte((byte)value); AddByte((byte)(value >> 8)); } void AddUInt32(uint value) { AddByte((byte)value); AddByte((byte)(value >> 8)); AddByte((byte)(value >> 16)); AddByte((byte)(value >> 24)); } void EvexEncode(bool addrSizePrefix, byte modRM, int sib, int displSize, uint displ, int regNum, int baseRegNum, int indexRegNum) { if (addrSizePrefix) AddByteSpace(0x67); AddByteSpace(0x62); byte p0 = EVEX_mm; byte p1 = 4 | EVEX_pp | (EVEX_W << 7); byte p2 = EVEX_aaa | (EVEX_LL << 5); if (baseRegNum >= 0) { if ((uint)baseRegNum > 31) throw new InvalidOperationException(); if ((baseRegNum & 8) != 0) p0 |= 0x20; if ((baseRegNum & 0x10) != 0) p0 |= 0x10; } if (indexRegNum >= 0) { if ((uint)indexRegNum > 31) throw new InvalidOperationException(); if ((indexRegNum & 8) != 0) p0 |= 0x40; if ((indexRegNum & 0x10) != 0) p2 |= 8; } if ((uint)regNum > 15) throw new InvalidOperationException(); if ((regNum & 8) != 0) p0 |= 0x80; p0 ^= 0xF0; p1 ^= 0x78; p2 ^= 8; AddByte(p0); AddByte(p1); AddByteSpace(p2); AddByteSpace(EVEX_OpCode); AddByte(modRM); if (sib >= 0) { AddSpace(); AddByte((byte)sib); } EncodeDispl(displSize, displ); } void LegacyEncode(bool addrSizePrefix, int rex, byte modRM, int sib, int displSize, uint displ) { if (addrSizePrefix) AddByteSpace(0x67); if (rex != 0) { if (!(0x40 <= rex && rex <= 0x4F)) throw new InvalidOperationException(); AddByteSpace((byte)rex); } AddByteSpace(Legacy_OpCode); AddByte(modRM); if (sib >= 0) { AddSpace(); AddByte((byte)sib); } EncodeDispl(displSize, displ); } void EncodeDispl(int displSize, uint displ) { switch (displSize) { case 0: break; case 1: AddSpace(); AddByte((byte)displ); break; case 2: AddSpace(); AddUInt16((ushort)displ); break; case 4: case 8: AddSpace(); AddUInt32(displ); break; default: throw new InvalidOperationException(); } } Register GetLegacyRegister(int bitness) => bitness switch { 16 => Legacy_Register_16, 32 => Legacy_Register_32, 64 => Legacy_Register_64, _ => throw new InvalidOperationException(), }; uint GetDispl(int addrSize, int displSize, byte rand) { bool b = ((rand & 8) != 0); switch (displSize) { case 0: return 0; case 1: if (b) return 0x5A; else { if (addrSize == 16) return 0xFFA5; return 0xFFFFFFA5; } case 2: return b ? 0x5AA5U : 0xA55A; case 4: case 8: return b ? 0x5AA56789 : 0xA55A1234; default: throw new InvalidOperationException(); } } void DoIt() { (int bitness, int addrSize, bool isVsib)[] memInfos = new(int bitness, int addrSize, bool isVsib)[] { (16, 16, false), (16, 32, false), (16, 32, true), (32, 16, false), (32, 32, false), (32, 32, true), (64, 32, false), (64, 64, false), (64, 32, true), (64, 64, true), }; foreach (var memInfo in memInfos) { Console.WriteLine($"Bitness: {memInfo.bitness}, AddrSize: {memInfo.addrSize}, VSIB={memInfo.isVsib}"); var filename = $@"C:\memtestgen\out_cs{memInfo.bitness}_as{memInfo.addrSize}"; if (memInfo.isVsib) filename += "-vsib"; filename += ".bin"; Console.WriteLine($"Filename: {filename}"); if (File.Exists(filename)) File.Delete(filename); using (var file = File.OpenWrite(filename)) { foreach (var info in GetMemInfo(memInfo.bitness, memInfo.addrSize, memInfo.isVsib)) { string displString; switch (info.DisplSize) { case 0: displString = "0"; break; case 1: if (memInfo.addrSize == 16) displString = $"0x{info.Displ:X4}"; else displString = $"0x{info.Displ:X8}"; break; case 2: displString = $"0x{info.Displ:X4}"; break; case 4: case 8: displString = $"0x{info.Displ:X8}"; break; default: throw new InvalidOperationException(); } Console.WriteLine($"{info.HexBytes}, {info.Length}, {info.Code}, {ToString(info.Register)}, {ToString(info.PrefixSeg)}, {ToString(info.Segment)}, {ToString(info.BaseReg)}, {ToString(info.IndexReg)}, {info.Scale}, {displString}, {info.DisplSize}"); var data = HexUtils.ToByteArray(info.HexBytes); file.Write(data, 0, data.Length); } } Console.WriteLine(); } } static string ToString(Register register) => register.ToString().ToLowerInvariant(); IEnumerable<MemInfo> GetMemInfo(int bitness, int addrSize, bool isVsib) { var hash = new HashSet<MemInfo>(); if (addrSize == 16) { foreach (var info in GetMemInfo16(bitness, addrSize, isVsib)) { if (!hash.Add(info)) continue; yield return info; } } else { foreach (var info in GetMemInfo3264(bitness, addrSize, isVsib)) { if (!hash.Add(info)) continue; yield return info; } } } IEnumerable<MemInfo> GetMemInfo16(int bitness, int addrSize, bool isVsib) { if (isVsib) throw new InvalidOperationException(); for (uint i = 0; i < 0x100; i++) { byte modRM = (byte)i; var mod = (modRM >> 6) & 3; if (mod == 3) continue; int reg = (int)((modRM >> 3) & 7); int rm = (int)(modRM & 7); var prefixSeg = Register.None; int scale = 0; var register = GetLegacyRegister(bitness) + reg; Register baseReg, indexReg; switch (rm) { case 0: baseReg = Register.BX; indexReg = Register.SI; break; case 1: baseReg = Register.BX; indexReg = Register.DI; break; case 2: baseReg = Register.BP; indexReg = Register.SI; break; case 3: baseReg = Register.BP; indexReg = Register.DI; break; case 4: baseReg = Register.SI; indexReg = Register.None; break; case 5: baseReg = Register.DI; indexReg = Register.None; break; case 6: baseReg = Register.BP; indexReg = Register.None; break; case 7: baseReg = Register.BX; indexReg = Register.None; break; default: throw new InvalidOperationException(); } int displSize; switch (mod) { case 0: if (rm == 6) { displSize = 2; baseReg = Register.None; indexReg = Register.None; } else displSize = 0; break; case 1: displSize = 1; break; case 2: displSize = 2; break; default: throw new InvalidOperationException(); } uint displ = GetDispl(addrSize, displSize, modRM); LegacyEncode(bitness != addrSize, 0, modRM, -1, displSize, displ); var hexBytes = GetHexBytes(out int length); yield return new MemInfo(hexBytes, length, GetCodeValue(bitness, isVsib), register, prefixSeg, baseReg, indexReg, scale, displSize, displ, isInvalid: false); } } IEnumerable<MemInfo> GetMemInfo3264(int bitness, int addrSize, bool isVsib) { if (isVsib) { foreach (var info in GetMemInfo3264(bitness, addrSize, isVsib, onlySib: true)) yield return info; } else { foreach (var info in GetMemInfo3264(bitness, addrSize, isVsib, onlySib: false)) yield return info; foreach (var info in GetMemInfo3264(bitness, addrSize, isVsib, onlySib: true)) yield return info; } } IEnumerable<MemInfo> GetMemInfo3264(int bitness, int addrSize, bool isVsib, bool onlySib) { Register defaultBaseReg; if (addrSize == 32) defaultBaseReg = Register.EAX; else defaultBaseReg = Register.RAX; var defaultIndexReg = isVsib ? Register.XMM0 : defaultBaseReg; uint max; // [7:0] = modRM // [8] = 'register' register bit 3 (bit 4 isn't used if it's XMM) // [9] = 'baseReg' register bit 3 // [11:10] = 'indexReg' register bits [4:3] if (bitness == 64) { if (isVsib) max = 0x100 * 16; else max = 0x100 * 8; } else max = 0x100; for (uint i = 0; i < max; i++) { byte modRM = (byte)i; var mod = (modRM >> 6) & 3; if (mod == 3) continue; int reg = (int)((modRM >> 3) & 7); int rm = (int)(modRM & 7); bool hasSib = rm == 4; if (hasSib) { if (!onlySib) continue; } else { if (onlySib) continue; } uint extraRegister = ((i >> 8) & 1) << 3; uint extraBaseReg = ((i >> 9) & 1) << 3; uint extraIndexReg = ((i >> 10) & 3) << 3; int regNum = (int)extraRegister + reg; var register = isVsib ? regNum + EVEX_Register : GetLegacyRegister(bitness) + regNum; var baseReg = defaultBaseReg + (int)extraBaseReg + rm; // Only test the memory operand if (regNum != 6) continue; int displSize; switch (mod) { case 0: displSize = 0; if (rm == 5) { displSize = addrSize == 64 ? 8 : 4; if (bitness == 64) baseReg = addrSize == 32 ? Register.EIP : Register.RIP; else baseReg = Register.None; } break; case 1: displSize = 1; break; case 2: displSize = addrSize == 64 ? 8 : 4; break; default: throw new InvalidOperationException(); } if (!hasSib) yield return GetMemInfo3264(bitness, addrSize, isVsib, modRM, -1, defaultBaseReg, defaultIndexReg, register, baseReg, extraBaseReg, extraIndexReg, displSize); else { for (int sib = 0; sib < 0x100; sib++) yield return GetMemInfo3264(bitness, addrSize, isVsib, modRM, sib, defaultBaseReg, defaultIndexReg, register, baseReg, extraBaseReg, extraIndexReg, displSize); } } } MemInfo GetMemInfo3264(int bitness, int addrSize, bool isVsib, byte modRM, int sib, Register defaultBaseReg, Register defaultIndexReg, Register register, Register baseReg, uint extraBaseReg, uint extraIndexReg, int displSize) { var mod = (modRM >> 6) & 3; Register indexReg; int scale; if (sib >= 0) { scale = (sib >> 6) & 3; int baseNum = (int)extraBaseReg + (sib & 7); baseReg = defaultBaseReg + baseNum; int indexNum = (int)extraIndexReg + ((sib >> 3) & 7); indexReg = defaultIndexReg + indexNum; if (!isVsib && indexNum == 4) indexReg = Register.None; if (mod == 0 && (baseNum & 7) == 5) { baseReg = Register.None; displSize = addrSize == 64 ? 8 : 4; } } else { scale = 0; indexReg = Register.None; } bool isInvalid = false; int baseRegNum = baseReg == Register.None || baseReg == Register.RIP || baseReg == Register.EIP ? -1 : baseReg - defaultBaseReg; int indexRegNum = indexReg == Register.None ? -1 : indexReg - defaultIndexReg; var regBase = isVsib ? EVEX_Register : GetLegacyRegister(bitness); int regNum = register - regBase; var prefixSeg = Register.None; uint displ = GetDispl(addrSize, displSize, sib >= 0 ? (byte)sib : modRM); if (isVsib) { EvexEncode(bitness != addrSize, modRM, sib, displSize, displ, regNum, baseRegNum, indexRegNum); if (displSize == 1) displ = (uint)((int)(sbyte)displ * EVEX_Displ8N); } else { int rex = 0; if (baseRegNum >= 0) { if ((uint)baseRegNum > 15) throw new InvalidOperationException(); if (baseRegNum >= 8) rex |= 1; } if (indexRegNum >= 0) { if ((uint)indexRegNum > 15) throw new InvalidOperationException(); if (indexRegNum >= 8) rex |= 2; } if ((uint)regNum > 15) throw new InvalidOperationException(); if (regNum >= 8) rex |= 4; if (rex != 0) rex |= 0x40; LegacyEncode(bitness != addrSize, rex, modRM, sib, displSize, displ); } var hexBytes = GetHexBytes(out int length); return new MemInfo(hexBytes, length, GetCodeValue(bitness, isVsib), register, prefixSeg, baseReg, indexReg, scale, displSize, displ, isInvalid); } } }
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Targets { using System; using System.Collections.Generic; using NLog.Common; using NLog.LogReceiverService; using NLog.Config; using NLog.Targets; using Xunit; using NLog.Targets.Wrappers; using System.Threading; public class LogReceiverWebServiceTargetTests : NLogTestBase { [Fact] public void LogReceiverWebServiceTargetSingleEventTest() { var logger = LogManager.GetLogger("loggerName"); var target = new MyLogReceiverWebServiceTarget(); target.EndpointAddress = "http://notimportant:9999/"; target.Parameters.Add(new MethodCallParameter("message", "${message}")); target.Parameters.Add(new MethodCallParameter("lvl", "${level}")); SimpleConfigurator.ConfigureForTargetLogging(target); logger.Info("message text"); var payload = target.LastPayload; Assert.Equal(2, payload.LayoutNames.Count); Assert.Equal("message", payload.LayoutNames[0]); Assert.Equal("lvl", payload.LayoutNames[1]); Assert.Equal(3, payload.Strings.Count); Assert.Equal(1, payload.Events.Length); Assert.Equal("message text", payload.Strings[payload.Events[0].ValueIndexes[0]]); Assert.Equal("Info", payload.Strings[payload.Events[0].ValueIndexes[1]]); Assert.Equal("loggerName", payload.Strings[payload.Events[0].LoggerOrdinal]); } [Fact] public void LogReceiverWebServiceTargetMultipleEventTest() { var target = new MyLogReceiverWebServiceTarget(); target.EndpointAddress = "http://notimportant:9999/"; target.Parameters.Add(new MethodCallParameter("message", "${message}")); target.Parameters.Add(new MethodCallParameter("lvl", "${level}")); var exceptions = new List<Exception>(); var events = new[] { LogEventInfo.Create(LogLevel.Info, "logger1", "message1").WithContinuation(exceptions.Add), LogEventInfo.Create(LogLevel.Debug, "logger2", "message2").WithContinuation(exceptions.Add), LogEventInfo.Create(LogLevel.Fatal, "logger1", "message2").WithContinuation(exceptions.Add), }; var configuration = new LoggingConfiguration(); target.Initialize(configuration); target.WriteAsyncLogEvents(events); // with multiple events, we should get string caching var payload = target.LastPayload; Assert.Equal(2, payload.LayoutNames.Count); Assert.Equal("message", payload.LayoutNames[0]); Assert.Equal("lvl", payload.LayoutNames[1]); // 7 strings instead of 9 since 'logger1' and 'message2' are being reused Assert.Equal(7, payload.Strings.Count); Assert.Equal(3, payload.Events.Length); Assert.Equal("message1", payload.Strings[payload.Events[0].ValueIndexes[0]]); Assert.Equal("message2", payload.Strings[payload.Events[1].ValueIndexes[0]]); Assert.Equal("message2", payload.Strings[payload.Events[2].ValueIndexes[0]]); Assert.Equal("Info", payload.Strings[payload.Events[0].ValueIndexes[1]]); Assert.Equal("Debug", payload.Strings[payload.Events[1].ValueIndexes[1]]); Assert.Equal("Fatal", payload.Strings[payload.Events[2].ValueIndexes[1]]); Assert.Equal("logger1", payload.Strings[payload.Events[0].LoggerOrdinal]); Assert.Equal("logger2", payload.Strings[payload.Events[1].LoggerOrdinal]); Assert.Equal("logger1", payload.Strings[payload.Events[2].LoggerOrdinal]); Assert.Equal(payload.Events[0].LoggerOrdinal, payload.Events[2].LoggerOrdinal); } [Fact] public void LogReceiverWebServiceTargetMultipleEventWithPerEventPropertiesTest() { var target = new MyLogReceiverWebServiceTarget(); target.IncludeEventProperties = true; target.EndpointAddress = "http://notimportant:9999/"; target.Parameters.Add(new MethodCallParameter("message", "${message}")); target.Parameters.Add(new MethodCallParameter("lvl", "${level}")); var exceptions = new List<Exception>(); var events = new[] { LogEventInfo.Create(LogLevel.Info, "logger1", "message1").WithContinuation(exceptions.Add), LogEventInfo.Create(LogLevel.Debug, "logger2", "message2").WithContinuation(exceptions.Add), LogEventInfo.Create(LogLevel.Fatal, "logger1", "message2").WithContinuation(exceptions.Add), }; events[0].LogEvent.Properties["prop1"] = "value1"; events[1].LogEvent.Properties["prop1"] = "value2"; events[2].LogEvent.Properties["prop1"] = "value3"; events[0].LogEvent.Properties["prop2"] = "value2a"; var configuration = new LoggingConfiguration(); target.Initialize(configuration); target.WriteAsyncLogEvents(events); // with multiple events, we should get string caching var payload = target.LastPayload; // 4 layout names - 2 from Parameters, 2 from unique properties in events Assert.Equal(4, payload.LayoutNames.Count); Assert.Equal("message", payload.LayoutNames[0]); Assert.Equal("lvl", payload.LayoutNames[1]); Assert.Equal("prop1", payload.LayoutNames[2]); Assert.Equal("prop2", payload.LayoutNames[3]); Assert.Equal(12, payload.Strings.Count); Assert.Equal(3, payload.Events.Length); Assert.Equal("message1", payload.Strings[payload.Events[0].ValueIndexes[0]]); Assert.Equal("message2", payload.Strings[payload.Events[1].ValueIndexes[0]]); Assert.Equal("message2", payload.Strings[payload.Events[2].ValueIndexes[0]]); Assert.Equal("Info", payload.Strings[payload.Events[0].ValueIndexes[1]]); Assert.Equal("Debug", payload.Strings[payload.Events[1].ValueIndexes[1]]); Assert.Equal("Fatal", payload.Strings[payload.Events[2].ValueIndexes[1]]); Assert.Equal("value1", payload.Strings[payload.Events[0].ValueIndexes[2]]); Assert.Equal("value2", payload.Strings[payload.Events[1].ValueIndexes[2]]); Assert.Equal("value3", payload.Strings[payload.Events[2].ValueIndexes[2]]); Assert.Equal("value2a", payload.Strings[payload.Events[0].ValueIndexes[3]]); Assert.Equal("", payload.Strings[payload.Events[1].ValueIndexes[3]]); Assert.Equal("", payload.Strings[payload.Events[2].ValueIndexes[3]]); Assert.Equal("logger1", payload.Strings[payload.Events[0].LoggerOrdinal]); Assert.Equal("logger2", payload.Strings[payload.Events[1].LoggerOrdinal]); Assert.Equal("logger1", payload.Strings[payload.Events[2].LoggerOrdinal]); Assert.Equal(payload.Events[0].LoggerOrdinal, payload.Events[2].LoggerOrdinal); } [Fact] public void NoEmptyEventLists() { var configuration = new LoggingConfiguration(); var target = new MyLogReceiverWebServiceTarget(); target.EndpointAddress = "http://notimportant:9999/"; target.Initialize(configuration); var asyncTarget = new AsyncTargetWrapper(target) { Name = "NoEmptyEventLists_wrapper" }; try { asyncTarget.Initialize(configuration); asyncTarget.WriteAsyncLogEvents(new[] { LogEventInfo.Create(LogLevel.Info, "logger1", "message1").WithContinuation(ex => { }) }); Thread.Sleep(1000); Assert.Equal(1, target.SendCount); } finally { asyncTarget.Close(); target.Close(); } } public class MyLogReceiverWebServiceTarget : LogReceiverWebServiceTarget { public NLogEvents LastPayload; public int SendCount; public MyLogReceiverWebServiceTarget() : base() { } public MyLogReceiverWebServiceTarget(string name) : base(name) { } protected internal override bool OnSend(NLogEvents events, IEnumerable<AsyncLogEventInfo> asyncContinuations) { this.LastPayload = events; ++this.SendCount; foreach (var ac in asyncContinuations) { ac.Continuation(null); } return false; } } } }
using System; using System.Collections.Generic; namespace Algorithms.Utils { /// <summary> /// Visit the node /// </summary> /// <param name="sourceLabel">Node label</param> public delegate void VisitNode(int sourceLabel); /// <summary> /// Visit the edge /// </summary> /// <param name="source">Source node label</param> /// <param name="target">Target node label</param> /// <param name="start">Start of the suffix</param> /// <param name="end">End of the suffix</param> public delegate void VisitEdge(int source, int target, int start, int end); public class SuffixTree { Node _activeNode; // keep track of last branch nodes to add suffix pointers Node _lastBranchNode; int _activeLength = 0; int _lastBranchIndex = 0; uint _currentNodeNumber = 0; int _minDistance = 0; /// <summary> /// Root node for walking the tree /// </summary> internal Node RootNode { get; private set; } /// <summary> /// Original text /// </summary> public static string Text { get; private set; } /// <summary> /// Try finding the suffix in the tree /// </summary> /// <param name="suffix"></param> /// <returns></returns> public bool TryFind(string suffix, ref int start, ref int end) { if (string.IsNullOrWhiteSpace(suffix)) { return false; } Node current = this.RootNode; Edge edge = null; bool setEnd = true; for (int i = 0; i < suffix.Length; ) { edge = current.FindEdgeByChar(suffix[i]); if (edge == null) { return false; } // we skip one character in the edge as well as the // suffix since it was already picked up by the call above int j = edge.Walk(suffix, i, 1); i += j; // if we have walked the edge... if (i >= suffix.Length) { // ... and terminated before reaching a node if (j < edge.Route.Length) { end = NormalizeEndValue(edge.End) - (edge.Route.Length - j); setEnd = false; } break; } if (j < edge.Route.Length) { return false; } current = edge.EndNode; } if (setEnd) { end = NormalizeEndValue(edge.End); } start = end - suffix.Length + 1; return true; } /// <summary> /// Construct the tree for a given string of text /// </summary> /// <param name="text">text from which the tree is constructed</param> public SuffixTree(string text) { if (string.IsNullOrWhiteSpace(text)) { throw new ArgumentNullException(text); } Text = text; _activeNode = new Node(_currentNodeNumber); RootNode = _activeNode; } /// <summary> /// Creates the actual suffix tree /// </summary> public void Create() { // some of our loop iterations actually constitute one route // in that case we should not chose to accidentally slip and // follow the suffix node bool followSuffixNode = false; for (int i=0; i < Text.Length;) { // make sure the lower bound remains within its boundaries ValidateAndUpdateMinDistance(i); var nodeEdge = _activeNode.FindNextRoute(i + _activeLength, followSuffixNode); //if we have terminated in a non-leaf node we are done if (i + _activeLength >= Text.Length && nodeEdge == null) { break; } // we could not find anything, add to the tree if (nodeEdge == null) { _activeNode.AddNode(++_currentNodeNumber, i + _activeLength); _lastBranchIndex = i + _activeLength; i++; followSuffixNode = true; continue; } var node = nodeEdge.Item1; var edge = nodeEdge.Item2; if (edge == null) { //we found a suffix node _activeNode = node; _activeLength--; followSuffixNode = false; continue; } else if(node != null) { //we found a new active _activeNode = node; _activeLength++; followSuffixNode = false; continue; } // now walk the chosen path and see where the current suffix diverges var edgePosTuple = edge.WalkTheEdge(i, ref _activeLength, ref _minDistance, ref _activeNode); edge = edgePosTuple.Item1; int j = edgePosTuple.Item2; if (j == edge.Route.Length) { _activeNode = edge.EndNode; _activeLength += edge.Route.Length; followSuffixNode = false; continue; } // we now need to insert a new branch node _minDistance = j; _lastBranchIndex = i + j + _activeLength; if (_lastBranchIndex >= Text.Length) { i++; followSuffixNode = true; continue; } // we are inserting a new branch node var newBranchNode = edge.Split(edge.Start + j - 1, ++_currentNodeNumber); // if we have reached this branch node through a route of just // one character - the last branch node should be set as the if (edge.Route.Length == 1) { newBranchNode.SuffixPointer = _activeNode; } // the second check is because of the root-node suffix pointer special case // above if (null != _lastBranchNode && _lastBranchNode.SuffixPointer == null) { _lastBranchNode.SuffixPointer = newBranchNode; } newBranchNode.AddNode(++_currentNodeNumber, _lastBranchIndex); _lastBranchNode = newBranchNode; i++; followSuffixNode = true; } } /// <summary> /// Breadth-first walk of the tree /// </summary> /// <param name="visit">Visit type delegate</param> public void WalkTree(VisitNode visitNode, VisitEdge visitEdge) { Queue<Node> walkingQueue = new Queue<Node>(); for (walkingQueue.Enqueue(this.RootNode); walkingQueue.Count > 0; ) { var currentNode = walkingQueue.Dequeue(); visitNode((int)currentNode.Label); foreach (var edge in currentNode.Edges) { walkingQueue.Enqueue(edge.Value.EndNode); visitEdge((int)currentNode.Label, (int)edge.Value.EndNode.Label, edge.Value.Start, NormalizeEndValue(edge.Value.End)); } } } private int NormalizeEndValue(int end) { if (end < 0) { return Text.Length - 1; } return end; } // makes sure that minDistance remains a lower bound // in the equation lastBranchIndex >= i + activeLength + minDistance private void ValidateAndUpdateMinDistance(int index) { if (_lastBranchIndex < _activeLength + _minDistance + index) { _minDistance = Math.Max(0, _lastBranchIndex - _activeLength - index); } } } class Edge { internal Node EndNode { get; private set; } internal int Start { get; private set; } internal int End { get; private set; } internal string Route { get { return GetSubstring(); } } /// <summary> /// constructor that takes relative text position /// </summary> /// <param name="start"></param> /// <param name="end"></param> public Edge(Node node, int start, int end = -1) { if (node == null) { throw new ArgumentNullException("node"); } if (start < 0) { throw new ArgumentOutOfRangeException("start", "start cannot be negative"); } // pretend that "end" can be infinite, and then compare with start if (start > (uint)end) { throw new ArgumentOutOfRangeException("start", "cannot start the string after its end"); } // infinity is just -1 if (end < 0) { end = -1; } this.Start = start; this.End = end; this.EndNode = node; } private int GetLength() { return this.End < 0 ? SuffixTree.Text.Length - this.Start : this.End - this.Start + 1; } private string GetSubstring() { return SuffixTree.Text.Substring(this.Start, GetLength()); } /// <summary> /// Splits the edge into two new edges. /// </summary> /// <param name="end">Index of the end of the old edge</param> /// <returns></returns> internal Node Split(int end, uint currentNodeNumber) { int nextStart = end + 1; var oldNode = this.EndNode; var newEdge = new Edge(oldNode, nextStart, this.End); Node newNode = new Node(currentNodeNumber); this.End = end; this.EndNode = newNode; newNode.Edges.Add(newEdge.Route[0], newEdge); return newNode; } /// <summary> /// Keep comparing original text from position i /// with what is in the edge /// </summary> /// <param name="i">Index of comparison start in the original text</param> /// <param name="skipCharacters"> How many characters are guaranteed equal</param> /// <returns>(edge, index) - the edje the character in it where the walk ended</returns> internal Tuple<Edge,int> WalkTheEdge(int i, ref int activeLength, ref int minDistance, ref Node activeNode) { string text = SuffixTree.Text; int skipCharacters = minDistance; int index = i + activeLength; // we know we do not need any comparisons on this edge if (skipCharacters >= this.Route.Length) { var edge = this.EndNode.FindEdgeByChar(i + this.Route.Length); activeLength += this.Route.Length; minDistance -= this.Route.Length; activeNode = this.EndNode; return edge.WalkTheEdge(i, ref activeLength, ref minDistance, ref activeNode); } int j = Walk(text, index, skipCharacters); return new Tuple<Edge, int>(this, j); } /// <summary> /// Walk this single edge to see whether it matches the substring /// </summary> /// <param name="suffix">Search string</param> /// <param name="i">Starting index</param> /// <returns></returns> internal int Walk(string suffix, int i, int skip = 0) { int j; for (j = skip, i += j; j < Route.Length && i < suffix.Length; j++, i++) { if (Route[j] != suffix[i]) { break; } } return j; } } class Node { internal uint Label { get; set; } internal Dictionary<char, Edge> Edges { get; private set; } internal Node SuffixPointer { get; set; } public Node(uint label) { this.Label = label; this.Edges = new Dictionary<char, Edge>(); this.SuffixPointer = null; } /// <summary> /// finds next route starting from the current node /// </summary> /// <param name="start"></param> /// <returns></returns> internal Tuple<Node, Edge> FindNextRoute(int start, bool followSuffixNode) { if (followSuffixNode && null != SuffixPointer) { return new Tuple<Node,Edge>(SuffixPointer, null); } var edge = FindEdgeByChar(start); if (null == edge) { return null; } // search terminated in a node if (edge.Route.Length == 1) { return new Tuple<Node, Edge>(edge.EndNode, edge); } //search did not terminate in a node return new Tuple<Node, Edge>(null, edge); } /// <summary> /// Adds a new node to the tree /// </summary> /// <param name="label">Node label</param> /// <param name="start">Start position in the text</param> /// <param name="end">End position in the text</param> internal void AddNode(uint label, int start, int end = -1) { var newNode = new Node(label); var newEdge = new Edge(newNode, start, end); this.Edges.Add(newEdge.Route[0], newEdge); } internal Edge FindEdgeByChar(int start) { //we have reached the end of the string if (start >= SuffixTree.Text.Length) { return null; } return FindEdgeByChar(SuffixTree.Text[start]); } internal Edge FindEdgeByChar(char c) { if (!this.Edges.ContainsKey(c)) { return null; } return this.Edges[c]; } } }
// // AddinScanFolderInfo.cs // // Author: // Lluis Sanchez Gual // // Copyright (C) 2007 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.IO; using System.Collections; using System.Collections.Specialized; using Mono.Addins.Serialization; namespace Mono.Addins.Database { class AddinScanFolderInfo: IBinaryXmlElement { Hashtable files = new Hashtable (); string folder; string fileName; string domain; bool sharedFolder = true; static BinaryXmlTypeMap typeMap = new BinaryXmlTypeMap ( typeof(AddinScanFolderInfo), typeof(AddinFileInfo) ); internal AddinScanFolderInfo () { } public AddinScanFolderInfo (string folder) { this.folder = folder; } public string FileName { get { return fileName; } } public static AddinScanFolderInfo Read (FileDatabase filedb, string file) { AddinScanFolderInfo finfo = (AddinScanFolderInfo) filedb.ReadSharedObject (file, typeMap); if (finfo != null) finfo.fileName = file; return finfo; } public static AddinScanFolderInfo Read (FileDatabase filedb, string basePath, string folderPath) { string fileName; AddinScanFolderInfo finfo = (AddinScanFolderInfo) filedb.ReadSharedObject (basePath, GetDomain (folderPath), ".data", Path.GetFullPath (folderPath), typeMap, out fileName); if (finfo != null) finfo.fileName = fileName; return finfo; } static string GetDomain (string path) { path = Path.GetFullPath (path); string s = path.Replace (Path.DirectorySeparatorChar, '_'); s = s.Replace (Path.AltDirectorySeparatorChar, '_'); s = s.Replace (Path.VolumeSeparatorChar, '_'); s = s.Trim ('_'); return s; } public void Write (FileDatabase filedb, string basePath) { filedb.WriteSharedObject (basePath, GetDomain (folder), ".data", Path.GetFullPath (folder), fileName, typeMap, this); } public string GetExistingLocalDomain () { foreach (AddinFileInfo info in files.Values) { if (info.Domain != null && info.Domain != AddinDatabase.GlobalDomain) return info.Domain; } return AddinDatabase.GlobalDomain; } public string Folder { get { return folder; } } public string Domain { get { if (sharedFolder) return AddinDatabase.GlobalDomain; else return domain; } set { domain = value; sharedFolder = true; } } public string RootsDomain { get { return domain; } set { domain = value; } } public string GetDomain (bool isRoot) { if (isRoot) return RootsDomain; else return Domain; } public bool SharedFolder { get { return sharedFolder; } set { sharedFolder = value; } } public DateTime GetLastScanTime (string file) { AddinFileInfo info = (AddinFileInfo) files [file]; if (info == null) return DateTime.MinValue; else return info.LastScan; } public AddinFileInfo GetAddinFileInfo (string file) { return (AddinFileInfo) files [file]; } public AddinFileInfo SetLastScanTime (string file, string addinId, bool isRoot, DateTime time, bool scanError) { AddinFileInfo info = (AddinFileInfo) files [file]; if (info == null) { info = new AddinFileInfo (); info.File = file; files [file] = info; } info.LastScan = time; info.AddinId = addinId; info.IsRoot = isRoot; info.ScanError = scanError; if (addinId != null) info.Domain = GetDomain (isRoot); else info.Domain = null; return info; } public ArrayList GetMissingAddins (AddinFileSystemExtension fs) { ArrayList missing = new ArrayList (); if (!fs.DirectoryExists (folder)) { // All deleted foreach (AddinFileInfo info in files.Values) { if (info.IsAddin) missing.Add (info); } files.Clear (); return missing; } ArrayList toDelete = new ArrayList (); foreach (AddinFileInfo info in files.Values) { if (!fs.FileExists (info.File)) { if (info.IsAddin) missing.Add (info); toDelete.Add (info.File); } else if (info.IsAddin && info.Domain != GetDomain (info.IsRoot)) { missing.Add (info); } } foreach (string file in toDelete) files.Remove (file); return missing; } void IBinaryXmlElement.Write (BinaryXmlWriter writer) { if (files.Count == 0) { domain = null; sharedFolder = true; } writer.WriteValue ("folder", folder); writer.WriteValue ("files", files); writer.WriteValue ("domain", domain); writer.WriteValue ("sharedFolder", sharedFolder); } void IBinaryXmlElement.Read (BinaryXmlReader reader) { folder = reader.ReadStringValue ("folder"); reader.ReadValue ("files", files); domain = reader.ReadStringValue ("domain"); sharedFolder = reader.ReadBooleanValue ("sharedFolder"); } } class AddinFileInfo: IBinaryXmlElement { public string File; public DateTime LastScan; public string AddinId; public bool IsRoot; public bool ScanError; public string Domain; public StringCollection IgnorePaths; public bool IsAddin { get { return AddinId != null && AddinId.Length != 0; } } public void AddPathToIgnore (string path) { if (IgnorePaths == null) IgnorePaths = new StringCollection (); IgnorePaths.Add (path); } void IBinaryXmlElement.Write (BinaryXmlWriter writer) { writer.WriteValue ("File", File); writer.WriteValue ("LastScan", LastScan); writer.WriteValue ("AddinId", AddinId); writer.WriteValue ("IsRoot", IsRoot); writer.WriteValue ("ScanError", ScanError); writer.WriteValue ("Domain", Domain); if (IgnorePaths != null && IgnorePaths.Count > 0) writer.WriteValue ("IgnorePaths", IgnorePaths); } void IBinaryXmlElement.Read (BinaryXmlReader reader) { File = reader.ReadStringValue ("File"); LastScan = reader.ReadDateTimeValue ("LastScan"); AddinId = reader.ReadStringValue ("AddinId"); IsRoot = reader.ReadBooleanValue ("IsRoot"); ScanError = reader.ReadBooleanValue ("ScanError"); Domain = reader.ReadStringValue ("Domain"); IgnorePaths = (StringCollection) reader.ReadValue ("IgnorePaths", new StringCollection ()); } } }
// // Copyright 2014 Gustavo J Knuppe (https://github.com/knuppe) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // - May you do good and not evil. - // - May you find forgiveness for yourself and forgive others. - // - May you share freely, never taking more than you give. - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // /* Copyright (c) 2001, Dr Martin Porter Copyright (c) 2002, Richard Boulton All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * Neither the name of the copyright holders nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // This file was generated automatically by the Snowball to OpenNLP and // ported to SharpNL namespace SharpNL.Stemmer.Snowball { public class HungarianStemmer : SnowballStemmer { private static HungarianStemmer instance; /// <summary> /// Gets the <see cref="HungarianStemmer"/> instance. /// </summary> /// <value>The <see cref="HungarianStemmer"/> instance.</value> public static HungarianStemmer Instance => instance ?? (instance = new HungarianStemmer()); private HungarianStemmer() { } /// <summary> /// Reduces the given word into its stem. /// </summary> /// <param name="word">The word.</param> /// <returns>The stemmed word.</returns> public override string Stem(string word) { Current = word.ToLowerInvariant(); CanStem(); return Current; } private static readonly Among[] a_0 = { new Among("cs", -1, -1, null), new Among("dzs", -1, -1, null), new Among("gy", -1, -1, null), new Among("ly", -1, -1, null), new Among("ny", -1, -1, null), new Among("sz", -1, -1, null), new Among("ty", -1, -1, null), new Among("zs", -1, -1, null) }; private static readonly Among[] a_1 = { new Among("\u00E1", -1, 1, null), new Among("\u00E9", -1, 2, null) }; private static readonly Among[] a_2 = { new Among("bb", -1, -1, null), new Among("cc", -1, -1, null), new Among("dd", -1, -1, null), new Among("ff", -1, -1, null), new Among("gg", -1, -1, null), new Among("jj", -1, -1, null), new Among("kk", -1, -1, null), new Among("ll", -1, -1, null), new Among("mm", -1, -1, null), new Among("nn", -1, -1, null), new Among("pp", -1, -1, null), new Among("rr", -1, -1, null), new Among("ccs", -1, -1, null), new Among("ss", -1, -1, null), new Among("zzs", -1, -1, null), new Among("tt", -1, -1, null), new Among("vv", -1, -1, null), new Among("ggy", -1, -1, null), new Among("lly", -1, -1, null), new Among("nny", -1, -1, null), new Among("tty", -1, -1, null), new Among("ssz", -1, -1, null), new Among("zz", -1, -1, null) }; private static readonly Among[] a_3 = { new Among("al", -1, 1, null), new Among("el", -1, 2, null) }; private static readonly Among[] a_4 = { new Among("ba", -1, -1, null), new Among("ra", -1, -1, null), new Among("be", -1, -1, null), new Among("re", -1, -1, null), new Among("ig", -1, -1, null), new Among("nak", -1, -1, null), new Among("nek", -1, -1, null), new Among("val", -1, -1, null), new Among("vel", -1, -1, null), new Among("ul", -1, -1, null), new Among("n\u00E1l", -1, -1, null), new Among("n\u00E9l", -1, -1, null), new Among("b\u00F3l", -1, -1, null), new Among("r\u00F3l", -1, -1, null), new Among("t\u00F3l", -1, -1, null), new Among("b\u00F5l", -1, -1, null), new Among("r\u00F5l", -1, -1, null), new Among("t\u00F5l", -1, -1, null), new Among("\u00FCl", -1, -1, null), new Among("n", -1, -1, null), new Among("an", 19, -1, null), new Among("ban", 20, -1, null), new Among("en", 19, -1, null), new Among("ben", 22, -1, null), new Among("k\u00E9ppen", 22, -1, null), new Among("on", 19, -1, null), new Among("\u00F6n", 19, -1, null), new Among("k\u00E9pp", -1, -1, null), new Among("kor", -1, -1, null), new Among("t", -1, -1, null), new Among("at", 29, -1, null), new Among("et", 29, -1, null), new Among("k\u00E9nt", 29, -1, null), new Among("ank\u00E9nt", 32, -1, null), new Among("enk\u00E9nt", 32, -1, null), new Among("onk\u00E9nt", 32, -1, null), new Among("ot", 29, -1, null), new Among("\u00E9rt", 29, -1, null), new Among("\u00F6t", 29, -1, null), new Among("hez", -1, -1, null), new Among("hoz", -1, -1, null), new Among("h\u00F6z", -1, -1, null), new Among("v\u00E1", -1, -1, null), new Among("v\u00E9", -1, -1, null) }; private static readonly Among[] a_5 = { new Among("\u00E1n", -1, 2, null), new Among("\u00E9n", -1, 1, null), new Among("\u00E1nk\u00E9nt", -1, 3, null) }; private static readonly Among[] a_6 = { new Among("stul", -1, 2, null), new Among("astul", 0, 1, null), new Among("\u00E1stul", 0, 3, null), new Among("st\u00FCl", -1, 2, null), new Among("est\u00FCl", 3, 1, null), new Among("\u00E9st\u00FCl", 3, 4, null) }; private static readonly Among[] a_7 = { new Among("\u00E1", -1, 1, null), new Among("\u00E9", -1, 2, null) }; private static readonly Among[] a_8 = { new Among("k", -1, 7, null), new Among("ak", 0, 4, null), new Among("ek", 0, 6, null), new Among("ok", 0, 5, null), new Among("\u00E1k", 0, 1, null), new Among("\u00E9k", 0, 2, null), new Among("\u00F6k", 0, 3, null) }; private static readonly Among[] a_9 = { new Among("\u00E9i", -1, 7, null), new Among("\u00E1\u00E9i", 0, 6, null), new Among("\u00E9\u00E9i", 0, 5, null), new Among("\u00E9", -1, 9, null), new Among("k\u00E9", 3, 4, null), new Among("ak\u00E9", 4, 1, null), new Among("ek\u00E9", 4, 1, null), new Among("ok\u00E9", 4, 1, null), new Among("\u00E1k\u00E9", 4, 3, null), new Among("\u00E9k\u00E9", 4, 2, null), new Among("\u00F6k\u00E9", 4, 1, null), new Among("\u00E9\u00E9", 3, 8, null) }; private static readonly Among[] a_10 = { new Among("a", -1, 18, null), new Among("ja", 0, 17, null), new Among("d", -1, 16, null), new Among("ad", 2, 13, null), new Among("ed", 2, 13, null), new Among("od", 2, 13, null), new Among("\u00E1d", 2, 14, null), new Among("\u00E9d", 2, 15, null), new Among("\u00F6d", 2, 13, null), new Among("e", -1, 18, null), new Among("je", 9, 17, null), new Among("nk", -1, 4, null), new Among("unk", 11, 1, null), new Among("\u00E1nk", 11, 2, null), new Among("\u00E9nk", 11, 3, null), new Among("\u00FCnk", 11, 1, null), new Among("uk", -1, 8, null), new Among("juk", 16, 7, null), new Among("\u00E1juk", 17, 5, null), new Among("\u00FCk", -1, 8, null), new Among("j\u00FCk", 19, 7, null), new Among("\u00E9j\u00FCk", 20, 6, null), new Among("m", -1, 12, null), new Among("am", 22, 9, null), new Among("em", 22, 9, null), new Among("om", 22, 9, null), new Among("\u00E1m", 22, 10, null), new Among("\u00E9m", 22, 11, null), new Among("o", -1, 18, null), new Among("\u00E1", -1, 19, null), new Among("\u00E9", -1, 20, null) }; private static readonly Among[] a_11 = { new Among("id", -1, 10, null), new Among("aid", 0, 9, null), new Among("jaid", 1, 6, null), new Among("eid", 0, 9, null), new Among("jeid", 3, 6, null), new Among("\u00E1id", 0, 7, null), new Among("\u00E9id", 0, 8, null), new Among("i", -1, 15, null), new Among("ai", 7, 14, null), new Among("jai", 8, 11, null), new Among("ei", 7, 14, null), new Among("jei", 10, 11, null), new Among("\u00E1i", 7, 12, null), new Among("\u00E9i", 7, 13, null), new Among("itek", -1, 24, null), new Among("eitek", 14, 21, null), new Among("jeitek", 15, 20, null), new Among("\u00E9itek", 14, 23, null), new Among("ik", -1, 29, null), new Among("aik", 18, 26, null), new Among("jaik", 19, 25, null), new Among("eik", 18, 26, null), new Among("jeik", 21, 25, null), new Among("\u00E1ik", 18, 27, null), new Among("\u00E9ik", 18, 28, null), new Among("ink", -1, 20, null), new Among("aink", 25, 17, null), new Among("jaink", 26, 16, null), new Among("eink", 25, 17, null), new Among("jeink", 28, 16, null), new Among("\u00E1ink", 25, 18, null), new Among("\u00E9ink", 25, 19, null), new Among("aitok", -1, 21, null), new Among("jaitok", 32, 20, null), new Among("\u00E1itok", -1, 22, null), new Among("im", -1, 5, null), new Among("aim", 35, 4, null), new Among("jaim", 36, 1, null), new Among("eim", 35, 4, null), new Among("jeim", 38, 1, null), new Among("\u00E1im", 35, 2, null), new Among("\u00E9im", 35, 3, null) }; private static readonly char[] g_v = { (char) 17, (char) 65, (char) 16, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 0, (char) 1, (char) 17, (char) 52, (char) 14 }; private int I_p1; private void copy_from(HungarianStemmer other) { I_p1 = other.I_p1; base.copy_from(other); } private bool r_mark_regions() { bool subroot = false; int v_1; int v_2; int v_3; // (, line 44 I_p1 = limit; // or, line 51 do { v_1 = cursor; do { // (, line 48 if (!(in_grouping(g_v, 97, 252))) { break; } // goto, line 48 while (true) { v_2 = cursor; do { if (!(out_grouping(g_v, 97, 252))) { break; } cursor = v_2; subroot = true; if (subroot) break; } while (false); if (subroot) { subroot = false; break; } cursor = v_2; if (cursor >= limit) { subroot = true; break; } cursor++; } if (subroot) { subroot = false; break; } // or, line 49 do { v_3 = cursor; do { // among, line 49 if (find_among(a_0, 8) == 0) { break; } subroot = true; if (subroot) break; } while (false); if (subroot) { subroot = false; break; } cursor = v_3; // next, line 49 if (cursor >= limit) { subroot = true; break; } cursor++; } while (false); if (subroot) { subroot = false; break; } // setmark p1, line 50 I_p1 = cursor; subroot = true; if (subroot) break; } while (false); if (subroot) { subroot = false; break; } cursor = v_1; // (, line 53 if (!(out_grouping(g_v, 97, 252))) { return false; } // gopast, line 53 while (true) { do { if (!(in_grouping(g_v, 97, 252))) { break; } subroot = true; if (subroot) break; } while (false); if (subroot) { subroot = false; break; } if (cursor >= limit) { return false; } cursor++; } // setmark p1, line 53 I_p1 = cursor; } while (false); return true; } private bool r_R1() { if (!(I_p1 <= cursor)) { return false; } return true; } private bool r_v_ending() { int among_var; // (, line 60 // [, line 61 ket = cursor; // substring, line 61 among_var = find_among_b(a_1, 2); if (among_var == 0) { return false; } // ], line 61 bra = cursor; // call R1, line 61 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 62 // <-, line 62 slice_from("a"); break; case 2: // (, line 63 // <-, line 63 slice_from("e"); break; } return true; } private bool r_double() { int v_1; // (, line 67 // test, line 68 v_1 = limit - cursor; // among, line 68 if (find_among_b(a_2, 23) == 0) { return false; } cursor = limit - v_1; return true; } private bool r_undouble() { // (, line 72 // next, line 73 if (cursor <= limit_backward) { return false; } cursor--; // [, line 73 ket = cursor; // hop, line 73 { int c = cursor - 1; if (limit_backward > c || c > limit) { return false; } cursor = c; } // ], line 73 bra = cursor; // delete, line 73 slice_del(); return true; } private bool r_instrum() { int among_var; // (, line 76 // [, line 77 ket = cursor; // substring, line 77 among_var = find_among_b(a_3, 2); if (among_var == 0) { return false; } // ], line 77 bra = cursor; // call R1, line 77 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 78 // call double, line 78 if (!r_double()) { return false; } break; case 2: // (, line 79 // call double, line 79 if (!r_double()) { return false; } break; } // delete, line 81 slice_del(); // call undouble, line 82 if (!r_undouble()) { return false; } return true; } private bool r_case() { // (, line 86 // [, line 87 ket = cursor; // substring, line 87 if (find_among_b(a_4, 44) == 0) { return false; } // ], line 87 bra = cursor; // call R1, line 87 if (!r_R1()) { return false; } // delete, line 111 slice_del(); // call v_ending, line 112 if (!r_v_ending()) { return false; } return true; } private bool r_case_special() { int among_var; // (, line 115 // [, line 116 ket = cursor; // substring, line 116 among_var = find_among_b(a_5, 3); if (among_var == 0) { return false; } // ], line 116 bra = cursor; // call R1, line 116 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 117 // <-, line 117 slice_from("e"); break; case 2: // (, line 118 // <-, line 118 slice_from("a"); break; case 3: // (, line 119 // <-, line 119 slice_from("a"); break; } return true; } private bool r_case_other() { int among_var; // (, line 123 // [, line 124 ket = cursor; // substring, line 124 among_var = find_among_b(a_6, 6); if (among_var == 0) { return false; } // ], line 124 bra = cursor; // call R1, line 124 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 125 // delete, line 125 slice_del(); break; case 2: // (, line 126 // delete, line 126 slice_del(); break; case 3: // (, line 127 // <-, line 127 slice_from("a"); break; case 4: // (, line 128 // <-, line 128 slice_from("e"); break; } return true; } private bool r_factive() { int among_var; // (, line 132 // [, line 133 ket = cursor; // substring, line 133 among_var = find_among_b(a_7, 2); if (among_var == 0) { return false; } // ], line 133 bra = cursor; // call R1, line 133 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 134 // call double, line 134 if (!r_double()) { return false; } break; case 2: // (, line 135 // call double, line 135 if (!r_double()) { return false; } break; } // delete, line 137 slice_del(); // call undouble, line 138 if (!r_undouble()) { return false; } return true; } private bool r_plural() { int among_var; // (, line 141 // [, line 142 ket = cursor; // substring, line 142 among_var = find_among_b(a_8, 7); if (among_var == 0) { return false; } // ], line 142 bra = cursor; // call R1, line 142 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 143 // <-, line 143 slice_from("a"); break; case 2: // (, line 144 // <-, line 144 slice_from("e"); break; case 3: // (, line 145 // delete, line 145 slice_del(); break; case 4: // (, line 146 // delete, line 146 slice_del(); break; case 5: // (, line 147 // delete, line 147 slice_del(); break; case 6: // (, line 148 // delete, line 148 slice_del(); break; case 7: // (, line 149 // delete, line 149 slice_del(); break; } return true; } private bool r_owned() { int among_var; // (, line 153 // [, line 154 ket = cursor; // substring, line 154 among_var = find_among_b(a_9, 12); if (among_var == 0) { return false; } // ], line 154 bra = cursor; // call R1, line 154 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 155 // delete, line 155 slice_del(); break; case 2: // (, line 156 // <-, line 156 slice_from("e"); break; case 3: // (, line 157 // <-, line 157 slice_from("a"); break; case 4: // (, line 158 // delete, line 158 slice_del(); break; case 5: // (, line 159 // <-, line 159 slice_from("e"); break; case 6: // (, line 160 // <-, line 160 slice_from("a"); break; case 7: // (, line 161 // delete, line 161 slice_del(); break; case 8: // (, line 162 // <-, line 162 slice_from("e"); break; case 9: // (, line 163 // delete, line 163 slice_del(); break; } return true; } private bool r_sing_owner() { int among_var; // (, line 167 // [, line 168 ket = cursor; // substring, line 168 among_var = find_among_b(a_10, 31); if (among_var == 0) { return false; } // ], line 168 bra = cursor; // call R1, line 168 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 169 // delete, line 169 slice_del(); break; case 2: // (, line 170 // <-, line 170 slice_from("a"); break; case 3: // (, line 171 // <-, line 171 slice_from("e"); break; case 4: // (, line 172 // delete, line 172 slice_del(); break; case 5: // (, line 173 // <-, line 173 slice_from("a"); break; case 6: // (, line 174 // <-, line 174 slice_from("e"); break; case 7: // (, line 175 // delete, line 175 slice_del(); break; case 8: // (, line 176 // delete, line 176 slice_del(); break; case 9: // (, line 177 // delete, line 177 slice_del(); break; case 10: // (, line 178 // <-, line 178 slice_from("a"); break; case 11: // (, line 179 // <-, line 179 slice_from("e"); break; case 12: // (, line 180 // delete, line 180 slice_del(); break; case 13: // (, line 181 // delete, line 181 slice_del(); break; case 14: // (, line 182 // <-, line 182 slice_from("a"); break; case 15: // (, line 183 // <-, line 183 slice_from("e"); break; case 16: // (, line 184 // delete, line 184 slice_del(); break; case 17: // (, line 185 // delete, line 185 slice_del(); break; case 18: // (, line 186 // delete, line 186 slice_del(); break; case 19: // (, line 187 // <-, line 187 slice_from("a"); break; case 20: // (, line 188 // <-, line 188 slice_from("e"); break; } return true; } private bool r_plur_owner() { int among_var; // (, line 192 // [, line 193 ket = cursor; // substring, line 193 among_var = find_among_b(a_11, 42); if (among_var == 0) { return false; } // ], line 193 bra = cursor; // call R1, line 193 if (!r_R1()) { return false; } switch (among_var) { case 0: return false; case 1: // (, line 194 // delete, line 194 slice_del(); break; case 2: // (, line 195 // <-, line 195 slice_from("a"); break; case 3: // (, line 196 // <-, line 196 slice_from("e"); break; case 4: // (, line 197 // delete, line 197 slice_del(); break; case 5: // (, line 198 // delete, line 198 slice_del(); break; case 6: // (, line 199 // delete, line 199 slice_del(); break; case 7: // (, line 200 // <-, line 200 slice_from("a"); break; case 8: // (, line 201 // <-, line 201 slice_from("e"); break; case 9: // (, line 202 // delete, line 202 slice_del(); break; case 10: // (, line 203 // delete, line 203 slice_del(); break; case 11: // (, line 204 // delete, line 204 slice_del(); break; case 12: // (, line 205 // <-, line 205 slice_from("a"); break; case 13: // (, line 206 // <-, line 206 slice_from("e"); break; case 14: // (, line 207 // delete, line 207 slice_del(); break; case 15: // (, line 208 // delete, line 208 slice_del(); break; case 16: // (, line 209 // delete, line 209 slice_del(); break; case 17: // (, line 210 // delete, line 210 slice_del(); break; case 18: // (, line 211 // <-, line 211 slice_from("a"); break; case 19: // (, line 212 // <-, line 212 slice_from("e"); break; case 20: // (, line 214 // delete, line 214 slice_del(); break; case 21: // (, line 215 // delete, line 215 slice_del(); break; case 22: // (, line 216 // <-, line 216 slice_from("a"); break; case 23: // (, line 217 // <-, line 217 slice_from("e"); break; case 24: // (, line 218 // delete, line 218 slice_del(); break; case 25: // (, line 219 // delete, line 219 slice_del(); break; case 26: // (, line 220 // delete, line 220 slice_del(); break; case 27: // (, line 221 // <-, line 221 slice_from("a"); break; case 28: // (, line 222 // <-, line 222 slice_from("e"); break; case 29: // (, line 223 // delete, line 223 slice_del(); break; } return true; } public bool CanStem() { int v_1; int v_2; int v_3; int v_4; int v_5; int v_6; int v_7; int v_8; int v_9; int v_10; // (, line 228 // do, line 229 v_1 = cursor; do { // call mark_regions, line 229 if (!r_mark_regions()) { break; } } while (false); cursor = v_1; // backwards, line 230 limit_backward = cursor; cursor = limit; // (, line 230 // do, line 231 v_2 = limit - cursor; do { // call instrum, line 231 if (!r_instrum()) { break; } } while (false); cursor = limit - v_2; // do, line 232 v_3 = limit - cursor; do { // call case, line 232 if (!r_case()) { // break lab2; break; } } while (false); cursor = limit - v_3; // do, line 233 v_4 = limit - cursor; do { // call case_special, line 233 if (!r_case_special()) { // break lab3; break; } } while (false); cursor = limit - v_4; // do, line 234 v_5 = limit - cursor; do { // call case_other, line 234 if (!r_case_other()) { break; } } while (false); cursor = limit - v_5; // do, line 235 v_6 = limit - cursor; do { // call factive, line 235 if (!r_factive()) { break; } } while (false); cursor = limit - v_6; // do, line 236 v_7 = limit - cursor; do { // call owned, line 236 if (!r_owned()) { break; } } while (false); cursor = limit - v_7; // do, line 237 v_8 = limit - cursor; do { // call sing_owner, line 237 if (!r_sing_owner()) { break; } } while (false); cursor = limit - v_8; // do, line 238 v_9 = limit - cursor; do { // call plur_owner, line 238 if (!r_plur_owner()) { break; } } while (false); cursor = limit - v_9; // do, line 239 v_10 = limit - cursor; do { // call plural, line 239 if (!r_plural()) { break; } } while (false); cursor = limit - v_10; cursor = limit_backward; return true; } } }
namespace VersionOne.ServiceHost.ConfigurationTool.UI.Controls { partial class V1SettingsPageControl { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.lblV1ConnectionValidationResult = new System.Windows.Forms.Label(); this.btnVerifyV1Connection = new System.Windows.Forms.Button(); this.txtPassword = new System.Windows.Forms.TextBox(); this.lblPassword = new System.Windows.Forms.Label(); this.txtUsername = new System.Windows.Forms.TextBox(); this.lblUsername = new System.Windows.Forms.Label(); this.txtServerUrl = new System.Windows.Forms.TextBox(); this.lblServerUrl = new System.Windows.Forms.Label(); this.chkUseProxy = new System.Windows.Forms.CheckBox(); this.lblProxyUri = new System.Windows.Forms.Label(); this.lblProxyUserName = new System.Windows.Forms.Label(); this.lblProxyPassword = new System.Windows.Forms.Label(); this.txtProxyUri = new System.Windows.Forms.TextBox(); this.txtProxyUsername = new System.Windows.Forms.TextBox(); this.txtProxyPassword = new System.Windows.Forms.TextBox(); this.lblProxyDomain = new System.Windows.Forms.Label(); this.txtProxyDomain = new System.Windows.Forms.TextBox(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.rbtnIntegratedWithCredentialsAuth = new System.Windows.Forms.RadioButton(); this.rbtnIntegratedAuth = new System.Windows.Forms.RadioButton(); this.rbtnBasicAuth = new System.Windows.Forms.RadioButton(); this.rbtnAccessTokenAuth = new System.Windows.Forms.RadioButton(); this.txtAccessToken = new System.Windows.Forms.TextBox(); this.lblAccessToken = new System.Windows.Forms.Label(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // lblV1ConnectionValidationResult // this.lblV1ConnectionValidationResult.AutoSize = true; this.lblV1ConnectionValidationResult.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Italic, System.Drawing.GraphicsUnit.Point, ((byte)(204))); this.lblV1ConnectionValidationResult.Location = new System.Drawing.Point(17, 452); this.lblV1ConnectionValidationResult.Name = "lblV1ConnectionValidationResult"; this.lblV1ConnectionValidationResult.Size = new System.Drawing.Size(153, 13); this.lblV1ConnectionValidationResult.TabIndex = 16; this.lblV1ConnectionValidationResult.Text = "V1 Connection validation result"; this.lblV1ConnectionValidationResult.Visible = false; // // btnVerifyV1Connection // this.btnVerifyV1Connection.Location = new System.Drawing.Point(394, 438); this.btnVerifyV1Connection.Name = "btnVerifyV1Connection"; this.btnVerifyV1Connection.Size = new System.Drawing.Size(87, 27); this.btnVerifyV1Connection.TabIndex = 17; this.btnVerifyV1Connection.Text = "Validate"; this.btnVerifyV1Connection.UseVisualStyleBackColor = true; // // txtPassword // this.txtPassword.Location = new System.Drawing.Point(106, 215); this.txtPassword.Name = "txtPassword"; this.txtPassword.Size = new System.Drawing.Size(375, 20); this.txtPassword.TabIndex = 6; // // lblPassword // this.lblPassword.AutoSize = true; this.lblPassword.Location = new System.Drawing.Point(17, 218); this.lblPassword.Name = "lblPassword"; this.lblPassword.Size = new System.Drawing.Size(53, 13); this.lblPassword.TabIndex = 5; this.lblPassword.Text = "Password"; // // txtUsername // this.txtUsername.Location = new System.Drawing.Point(106, 175); this.txtUsername.Name = "txtUsername"; this.txtUsername.Size = new System.Drawing.Size(375, 20); this.txtUsername.TabIndex = 4; // // lblUsername // this.lblUsername.AutoSize = true; this.lblUsername.Location = new System.Drawing.Point(17, 178); this.lblUsername.Name = "lblUsername"; this.lblUsername.Size = new System.Drawing.Size(55, 13); this.lblUsername.TabIndex = 3; this.lblUsername.Text = "Username"; // // txtServerUrl // this.txtServerUrl.Location = new System.Drawing.Point(106, 135); this.txtServerUrl.Name = "txtServerUrl"; this.txtServerUrl.Size = new System.Drawing.Size(375, 20); this.txtServerUrl.TabIndex = 2; // // lblServerUrl // this.lblServerUrl.AutoSize = true; this.lblServerUrl.Location = new System.Drawing.Point(17, 138); this.lblServerUrl.Name = "lblServerUrl"; this.lblServerUrl.Size = new System.Drawing.Size(63, 13); this.lblServerUrl.TabIndex = 1; this.lblServerUrl.Text = "Server URL"; // // chkUseProxy // this.chkUseProxy.AutoSize = true; this.chkUseProxy.CheckAlign = System.Drawing.ContentAlignment.MiddleRight; this.chkUseProxy.Location = new System.Drawing.Point(332, 255); this.chkUseProxy.Name = "chkUseProxy"; this.chkUseProxy.Size = new System.Drawing.Size(149, 17); this.chkUseProxy.TabIndex = 7; this.chkUseProxy.Text = "Use Proxy For Connection"; this.chkUseProxy.UseVisualStyleBackColor = true; // // lblProxyUri // this.lblProxyUri.AutoSize = true; this.lblProxyUri.Location = new System.Drawing.Point(17, 281); this.lblProxyUri.Name = "lblProxyUri"; this.lblProxyUri.Size = new System.Drawing.Size(58, 13); this.lblProxyUri.TabIndex = 8; this.lblProxyUri.Text = "Proxy URL"; // // lblProxyUserName // this.lblProxyUserName.AutoSize = true; this.lblProxyUserName.Location = new System.Drawing.Point(17, 319); this.lblProxyUserName.Name = "lblProxyUserName"; this.lblProxyUserName.Size = new System.Drawing.Size(84, 13); this.lblProxyUserName.TabIndex = 10; this.lblProxyUserName.Text = "Proxy Username"; // // lblProxyPassword // this.lblProxyPassword.AutoSize = true; this.lblProxyPassword.Location = new System.Drawing.Point(17, 355); this.lblProxyPassword.Name = "lblProxyPassword"; this.lblProxyPassword.Size = new System.Drawing.Size(82, 13); this.lblProxyPassword.TabIndex = 12; this.lblProxyPassword.Text = "Proxy Password"; // // txtProxyUri // this.txtProxyUri.Location = new System.Drawing.Point(106, 278); this.txtProxyUri.Name = "txtProxyUri"; this.txtProxyUri.Size = new System.Drawing.Size(375, 20); this.txtProxyUri.TabIndex = 9; // // txtProxyUsername // this.txtProxyUsername.Location = new System.Drawing.Point(106, 316); this.txtProxyUsername.Name = "txtProxyUsername"; this.txtProxyUsername.Size = new System.Drawing.Size(375, 20); this.txtProxyUsername.TabIndex = 11; // // txtProxyPassword // this.txtProxyPassword.Location = new System.Drawing.Point(106, 352); this.txtProxyPassword.Name = "txtProxyPassword"; this.txtProxyPassword.Size = new System.Drawing.Size(375, 20); this.txtProxyPassword.TabIndex = 13; // // lblProxyDomain // this.lblProxyDomain.AutoSize = true; this.lblProxyDomain.Location = new System.Drawing.Point(17, 390); this.lblProxyDomain.Name = "lblProxyDomain"; this.lblProxyDomain.Size = new System.Drawing.Size(72, 13); this.lblProxyDomain.TabIndex = 14; this.lblProxyDomain.Text = "Proxy Domain"; // // txtProxyDomain // this.txtProxyDomain.Location = new System.Drawing.Point(106, 387); this.txtProxyDomain.Name = "txtProxyDomain"; this.txtProxyDomain.Size = new System.Drawing.Size(375, 20); this.txtProxyDomain.TabIndex = 15; // // groupBox1 // this.groupBox1.Controls.Add(this.rbtnIntegratedWithCredentialsAuth); this.groupBox1.Controls.Add(this.rbtnIntegratedAuth); this.groupBox1.Controls.Add(this.rbtnBasicAuth); this.groupBox1.Controls.Add(this.rbtnAccessTokenAuth); this.groupBox1.Location = new System.Drawing.Point(20, 18); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(461, 54); this.groupBox1.TabIndex = 18; this.groupBox1.TabStop = false; this.groupBox1.Text = "Authentication"; // // rbtnIntegratedWithCredentialsAuth // this.rbtnIntegratedWithCredentialsAuth.AutoSize = true; this.rbtnIntegratedWithCredentialsAuth.Location = new System.Drawing.Point(321, 19); this.rbtnIntegratedWithCredentialsAuth.Name = "rbtnIntegratedWithCredentialsAuth"; this.rbtnIntegratedWithCredentialsAuth.Size = new System.Drawing.Size(132, 17); this.rbtnIntegratedWithCredentialsAuth.TabIndex = 3; this.rbtnIntegratedWithCredentialsAuth.TabStop = true; this.rbtnIntegratedWithCredentialsAuth.Text = "NTLM with Credentials"; this.rbtnIntegratedWithCredentialsAuth.UseVisualStyleBackColor = true; // // rbtnIntegratedAuth // this.rbtnIntegratedAuth.AutoSize = true; this.rbtnIntegratedAuth.Location = new System.Drawing.Point(231, 19); this.rbtnIntegratedAuth.Name = "rbtnIntegratedAuth"; this.rbtnIntegratedAuth.Size = new System.Drawing.Size(55, 17); this.rbtnIntegratedAuth.TabIndex = 2; this.rbtnIntegratedAuth.TabStop = true; this.rbtnIntegratedAuth.Text = "NTLM"; this.rbtnIntegratedAuth.UseVisualStyleBackColor = true; // // rbtnBasicAuth // this.rbtnBasicAuth.AutoSize = true; this.rbtnBasicAuth.Location = new System.Drawing.Point(140, 19); this.rbtnBasicAuth.Name = "rbtnBasicAuth"; this.rbtnBasicAuth.Size = new System.Drawing.Size(51, 17); this.rbtnBasicAuth.TabIndex = 1; this.rbtnBasicAuth.TabStop = true; this.rbtnBasicAuth.Text = "Basic"; this.rbtnBasicAuth.UseVisualStyleBackColor = true; // // rbtnAccessTokenAuth // this.rbtnAccessTokenAuth.AutoSize = true; this.rbtnAccessTokenAuth.Location = new System.Drawing.Point(18, 19); this.rbtnAccessTokenAuth.Name = "rbtnAccessTokenAuth"; this.rbtnAccessTokenAuth.Size = new System.Drawing.Size(94, 17); this.rbtnAccessTokenAuth.TabIndex = 0; this.rbtnAccessTokenAuth.TabStop = true; this.rbtnAccessTokenAuth.Text = "Access Token"; this.rbtnAccessTokenAuth.UseVisualStyleBackColor = true; // // txtAccessToken // this.txtAccessToken.Location = new System.Drawing.Point(106, 98); this.txtAccessToken.Name = "txtAccessToken"; this.txtAccessToken.Size = new System.Drawing.Size(375, 20); this.txtAccessToken.TabIndex = 20; // // lblAccessToken // this.lblAccessToken.AutoSize = true; this.lblAccessToken.Location = new System.Drawing.Point(17, 101); this.lblAccessToken.Name = "lblAccessToken"; this.lblAccessToken.Size = new System.Drawing.Size(76, 13); this.lblAccessToken.TabIndex = 19; this.lblAccessToken.Text = "Access Token"; // // V1SettingsPageControl // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.Controls.Add(this.txtAccessToken); this.Controls.Add(this.lblAccessToken); this.Controls.Add(this.groupBox1); this.Controls.Add(this.txtProxyDomain); this.Controls.Add(this.lblProxyDomain); this.Controls.Add(this.txtProxyPassword); this.Controls.Add(this.txtProxyUsername); this.Controls.Add(this.txtProxyUri); this.Controls.Add(this.lblProxyPassword); this.Controls.Add(this.lblProxyUserName); this.Controls.Add(this.lblProxyUri); this.Controls.Add(this.chkUseProxy); this.Controls.Add(this.lblV1ConnectionValidationResult); this.Controls.Add(this.btnVerifyV1Connection); this.Controls.Add(this.txtPassword); this.Controls.Add(this.lblPassword); this.Controls.Add(this.txtUsername); this.Controls.Add(this.lblUsername); this.Controls.Add(this.txtServerUrl); this.Controls.Add(this.lblServerUrl); this.Name = "V1SettingsPageControl"; this.Size = new System.Drawing.Size(500, 490); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Label lblV1ConnectionValidationResult; private System.Windows.Forms.Button btnVerifyV1Connection; private System.Windows.Forms.TextBox txtPassword; private System.Windows.Forms.Label lblPassword; private System.Windows.Forms.TextBox txtUsername; private System.Windows.Forms.Label lblUsername; private System.Windows.Forms.TextBox txtServerUrl; private System.Windows.Forms.Label lblServerUrl; private System.Windows.Forms.CheckBox chkUseProxy; private System.Windows.Forms.Label lblProxyUri; private System.Windows.Forms.Label lblProxyUserName; private System.Windows.Forms.Label lblProxyPassword; private System.Windows.Forms.TextBox txtProxyUri; private System.Windows.Forms.TextBox txtProxyUsername; private System.Windows.Forms.TextBox txtProxyPassword; private System.Windows.Forms.Label lblProxyDomain; private System.Windows.Forms.TextBox txtProxyDomain; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.RadioButton rbtnIntegratedWithCredentialsAuth; private System.Windows.Forms.RadioButton rbtnIntegratedAuth; private System.Windows.Forms.RadioButton rbtnBasicAuth; private System.Windows.Forms.RadioButton rbtnAccessTokenAuth; private System.Windows.Forms.TextBox txtAccessToken; private System.Windows.Forms.Label lblAccessToken; } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using UnityEditor; using Poly2Tri; //using Poly2Tri.Triangulation; //using Poly2Tri.Triangulation.Delaunay; //using Poly2Tri.Triangulation.Delaunay.Sweep; //using Poly2Tri.Triangulation.Polygon; using System.Linq; public class Puppet2D_CreatePolygonFromSprite : Editor { private GameObject MeshedSprite; private MeshFilter mf; private MeshRenderer mr; private Mesh mesh; public Sprite mysprite; private Vector3[] finalVertices = {}; private int[] finalTriangles = {}; private Vector2[] finalUvs = {}; private Vector3[] finalNormals = {}; List<Vector3> results = new List<Vector3>(); List<int> resultsTriIndexes = new List<int>(); List<int> resultsTriIndexesReversed = new List<int>(); List<Vector2> uvs = new List<Vector2>(); List<Vector3> normals = new List<Vector3>(); //public bool ReverseNormals; public GameObject Run (Transform transform,bool ReverseNormals, int triangleIndex) { PolygonCollider2D polygonCollider = transform.GetComponent<PolygonCollider2D>(); //for(int path =0;path<polygonCollider.pathCount;path++) //{ int path =0; bool overwrite = false; MeshedSprite = new GameObject(); Undo.RegisterCreatedObjectUndo (MeshedSprite, "Created Mesh"); mf = MeshedSprite.AddComponent<MeshFilter>(); mr = MeshedSprite.AddComponent<MeshRenderer>(); mesh = new Mesh(); if(AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset",typeof(Mesh))) { if(EditorUtility.DisplayDialog("Overwrite Asset?","Do you want to overwrite the current Mesh & Material?","Yes, Overwrite","No, Create New Mesh & Material")) { //mf.mesh = AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset",typeof(Mesh))as Mesh; string meshPath = (Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); overwrite = true; } else { string meshPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); } } else { string meshPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); } Vector2[] vertsToCopy = polygonCollider.GetPath(path); CreateMesh(vertsToCopy, transform,triangleIndex ); mesh.vertices = finalVertices; mesh.uv = finalUvs; mesh.normals = finalNormals; mesh.triangles = finalTriangles; mesh.RecalculateBounds(); mesh = calculateMeshTangents (mesh); mf.mesh = mesh; results.Clear(); resultsTriIndexes.Clear(); resultsTriIndexesReversed.Clear(); uvs.Clear(); normals.Clear(); if(overwrite) { mr.material = AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/Materials/"+transform.name+"_MAT.mat",typeof(Material)) as Material; } else { Material newMat = new Material(Shader.Find("Unlit/Transparent")); string materialPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/Materials/"+transform.name+"_MAT.mat"); AssetDatabase.CreateAsset(newMat, materialPath); mr.material = newMat; } return MeshedSprite; } public void CreateMesh(Vector2[] vertsToCopy, Transform transform,int triangleIndex) { List<Vector3> resultsLocal = new List<Vector3>(); List<int> resultsTriIndexesLocal = new List<int>(); List<int> resultsTriIndexesReversedLocal = new List<int>(); List<Vector2> uvsLocal = new List<Vector2>(); List<Vector3> normalsLocal = new List<Vector3>(); Sprite spr = transform.GetComponent<SpriteRenderer>().sprite; Rect rec = spr.rect; Vector3 bound = transform.GetComponent<Renderer>().bounds.max- transform.GetComponent<Renderer>().bounds.min ; TextureImporter textureImporter = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(spr)) as TextureImporter; List<PolygonPoint> p2 = new List<PolygonPoint>(); if (triangleIndex > 0) { vertsToCopy = CreateSubVertPoints (spr.bounds,vertsToCopy.ToList(), triangleIndex).ToArray(); } int i = 0; for (i = 0; i < vertsToCopy.Count(); i ++) { p2.Add(new PolygonPoint(vertsToCopy [i].x, vertsToCopy [i].y)); } Polygon _polygon = new Polygon(p2); if (triangleIndex > 0) { List<TriangulationPoint> triPoints = GenerateGridPoints (spr.bounds, triangleIndex, _polygon); _polygon.AddSteinerPoints (triPoints); } P2T.Triangulate(_polygon); int idx = 0; foreach (DelaunayTriangle triangle in _polygon.Triangles) { Vector3 v = new Vector3(); foreach (TriangulationPoint p in triangle.Points) { v = new Vector3((float)p.X, (float)p.Y,0); if(!resultsLocal.Contains(v)) { resultsLocal.Add(v); resultsTriIndexesLocal.Add(idx); Vector2 newUv = new Vector2((v.x/bound.x) + 0.5f, (v.y /bound.y) + 0.5f); newUv.x *= rec.width/ spr.texture.width; newUv.y *= rec.height/ spr.texture.height; newUv.x += (rec.x)/ spr.texture.width; newUv.y += (rec.y) / spr.texture.height; SpriteMetaData[] smdArray = textureImporter.spritesheet; Vector2 pivot = new Vector2(.0f,.0f);; for (int k = 0; k < smdArray.Length; k++) { if (smdArray[k].name == spr.name) { switch(smdArray[k].alignment) { case(0): smdArray[k].pivot = Vector2.zero; break; case(1): smdArray[k].pivot = new Vector2(0f,1f) -new Vector2(.5f,.5f); break; case(2): smdArray[k].pivot = new Vector2(0.5f,1f) -new Vector2(.5f,.5f); break; case(3): smdArray[k].pivot = new Vector2(1f,1f) -new Vector2(.5f,.5f); break; case(4): smdArray[k].pivot = new Vector2(0f,.5f) -new Vector2(.5f,.5f); break; case(5): smdArray[k].pivot = new Vector2(1f,.5f) -new Vector2(.5f,.5f); break; case(6): smdArray[k].pivot = new Vector2(0f,0f) -new Vector2(.5f,.5f); break; case(7): smdArray[k].pivot = new Vector2(0.5f,0f) -new Vector2(.5f,.5f); break; case(8): smdArray[k].pivot = new Vector2(1f,0f) -new Vector2(.5f,.5f); break; case(9): smdArray[k].pivot -= new Vector2(.5f,.5f); break; } pivot = smdArray[k].pivot ; } } if(textureImporter.spriteImportMode == SpriteImportMode.Single) pivot = textureImporter.spritePivot-new Vector2(.5f,.5f); newUv.x += ((pivot.x)*rec.width)/ spr.texture.width; newUv.y += ((pivot.y)*rec.height)/ spr.texture.height; uvsLocal.Add(newUv); normalsLocal.Add(new Vector3(0,0,-1)); idx++; } else { resultsTriIndexesLocal.Add(resultsLocal.LastIndexOf(v)); } } } for (int j = resultsTriIndexesLocal.Count-1; j >=0; j--) { resultsTriIndexesReversedLocal.Add(resultsTriIndexesLocal[j]); } results.AddRange(resultsLocal); resultsTriIndexes.AddRange(resultsTriIndexesLocal); resultsTriIndexesReversed.AddRange(resultsTriIndexesReversedLocal); uvs.AddRange(uvsLocal); normals.AddRange(normalsLocal); resultsLocal.Clear(); resultsTriIndexesLocal.Clear(); resultsTriIndexesReversedLocal.Clear(); uvsLocal.Clear(); normalsLocal.Clear(); finalVertices = results.ToArray(); finalNormals = normals.ToArray(); finalUvs= uvs.ToArray(); finalTriangles = resultsTriIndexesReversed.ToArray(); } public List<Vector2> CreateSubVertPoints(Bounds bounds, List<Vector2> vertsToCopy, float subdivLevel) { List<Vector2> returnList = new List<Vector2> (); float numberDivisions = 6; float width = bounds.max.x - bounds.min.x; float subdivWidth = width / (subdivLevel*numberDivisions); vertsToCopy.Add (vertsToCopy [0]); returnList.Add (vertsToCopy [0]); for (int i = 1; i < vertsToCopy.Count; i++) { float distanceBetweenVerts = Vector2.Distance (vertsToCopy [i], vertsToCopy [i - 1]); int numberOfNewVerts =Mathf.RoundToInt(distanceBetweenVerts / subdivWidth); // add new verts if(numberOfNewVerts>=1) { for (int j=1; j<numberOfNewVerts; j++) { Vector2 newLengthVector = (vertsToCopy [i] - vertsToCopy [i - 1]) / numberOfNewVerts; Vector2 vert = vertsToCopy [i-1] + newLengthVector*j ; returnList.Add (vert); } } if(i<vertsToCopy.Count-1) returnList.Add (vertsToCopy [i]); } return returnList; } public List<TriangulationPoint> GenerateGridPoints(Bounds bounds, float subdivLevel, Polygon _polygon) { List<TriangulationPoint> GridPoints = new List<TriangulationPoint> (); float numberDivisions = 6; float width = bounds.max.x - bounds.min.x; float height = bounds.max.y - bounds.min.y; float subdivWidth = width / (subdivLevel*numberDivisions); float subdivHeight = height / ((subdivLevel*numberDivisions) ); float averagedLength = (subdivWidth + subdivHeight) / 2; float widthHeight = (width + height) / 2; for(int i=1;i<(subdivLevel*numberDivisions/widthHeight)*width;i++) { for(int j=1;j<(subdivLevel*numberDivisions/widthHeight)*height;j++) { float xPos = (i*averagedLength) + bounds.min.x; float yPos = (j*averagedLength) + bounds.min.y; TriangulationPoint t = new TriangulationPoint (xPos, yPos); if(_polygon.IsPointInside(t)) GridPoints.Add(t); } } return GridPoints; } public GameObject MakeFromVerts (bool ReverseNormals ,Vector3[] vertsToCopy, List<int> pathSplitIds, GameObject FFDGameObject) { bool overwrite = false; MeshedSprite =new GameObject(); Undo.RegisterCreatedObjectUndo (MeshedSprite, "Created Mesh"); mf = MeshedSprite.AddComponent<MeshFilter>(); mr = MeshedSprite.AddComponent<MeshRenderer>(); mesh = new Mesh(); if(AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/"+FFDGameObject.transform.name+"_MESH.asset",typeof(Mesh))) { if(EditorUtility.DisplayDialog("Overwrite Asset?","Do you want to overwrite the current Mesh & Material?","Yes, Overwrite","No, Create New Mesh & Material")) { //mf.mesh = AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/"+transform.name+"_MESH.asset",typeof(Mesh))as Mesh; string meshPath = (Puppet2D_Editor._puppet2DPath+"/Models/"+FFDGameObject.transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); overwrite = true; } else { string meshPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/"+FFDGameObject.transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); } } else { string meshPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/"+FFDGameObject.transform.name+"_MESH.asset"); AssetDatabase.CreateAsset(mesh,meshPath); } mesh = CreateMeshFromVerts(vertsToCopy, mesh, pathSplitIds, FFDGameObject.transform); mf.mesh = mesh; results.Clear(); resultsTriIndexes.Clear(); resultsTriIndexesReversed.Clear(); uvs.Clear(); normals.Clear(); if(overwrite) { mr.material = AssetDatabase.LoadAssetAtPath(Puppet2D_Editor._puppet2DPath+"/Models/Materials/"+FFDGameObject.transform.name+"_MAT.mat",typeof(Material)) as Material; } else { Material newMat = new Material(Shader.Find("Unlit/Transparent")); string materialPath = AssetDatabase.GenerateUniqueAssetPath(Puppet2D_Editor._puppet2DPath+"/Models/Materials/"+FFDGameObject.transform.name+"_MAT.mat"); AssetDatabase.CreateAsset(newMat, materialPath); mr.material = newMat; } return MeshedSprite; } public Mesh CreateMeshFromVerts(Vector3[] vertsToCopy, Mesh mesh, List<int> pathSplitIds, Transform SpriteGO = null) { List<Vector3> resultsLocal = new List<Vector3>(); List<int> resultsTriIndexesLocal = new List<int>(); List<int> resultsTriIndexesReversedLocal = new List<int>(); List<Vector2> uvsLocal = new List<Vector2>(); List<Vector3> normalsLocal = new List<Vector3>(); Sprite spr = new Sprite(); Rect rec = new Rect(); Vector3 bound = Vector3.zero; TextureImporter textureImporter = new TextureImporter(); if(SpriteGO !=null && SpriteGO.GetComponent<SpriteRenderer>() && SpriteGO.GetComponent<SpriteRenderer>().sprite) { spr = SpriteGO.GetComponent<SpriteRenderer>().sprite; rec = spr.rect; bound = SpriteGO.GetComponent<Renderer>().bounds.max- SpriteGO.GetComponent<Renderer>().bounds.min ; textureImporter = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(spr)) as TextureImporter; } List<PolygonPoint> p2 = new List<PolygonPoint>(); List<TriangulationPoint> extraPoints = new List<TriangulationPoint>(); int i = 0; for (i = 0; i < vertsToCopy.Count(); i ++) { if(i<pathSplitIds[0]) p2.Add(new PolygonPoint(vertsToCopy [i].x, vertsToCopy [i].y)); else extraPoints.Add(new TriangulationPoint(vertsToCopy [i].x, vertsToCopy [i].y)); } Polygon _polygon = new Polygon(p2); // this is how to add more points _polygon.AddSteinerPoints (extraPoints); P2T.Triangulate(_polygon); if (spr == null) { bound = new Vector3((float)(_polygon.Bounds.MaxX - _polygon.Bounds.MinX),(float)(_polygon.Bounds.MaxY - _polygon.Bounds.MinY),0 ) ; } int idx = 0; foreach (DelaunayTriangle triangle in _polygon.Triangles) { Vector3 v = new Vector3(); foreach (TriangulationPoint p in triangle.Points) { v = new Vector3((float)p.X, (float)p.Y,0); if(!resultsLocal.Contains(v)) { resultsLocal.Add(v); resultsTriIndexesLocal.Add(idx); Vector2 newUv = new Vector2(((v.x-(float)_polygon.Bounds.MinX) /bound.x) , ((v.y-(float)_polygon.Bounds.MinY) /bound.y) ); if (spr != null) { newUv = new Vector2 ((v.x / bound.x) + 0.5f, (v.y / bound.y) + 0.5f); newUv.x *= rec.width/ spr.texture.width; newUv.y *= rec.height/ spr.texture.height; newUv.x += (rec.x)/ spr.texture.width; newUv.y += (rec.y) / spr.texture.height; SpriteMetaData[] smdArray = textureImporter.spritesheet; Vector2 pivot = new Vector2(.0f,.0f);; for (int k = 0; k < smdArray.Length; k++) { if (smdArray[k].name == spr.name) { switch(smdArray[k].alignment) { case(0): smdArray[k].pivot = Vector2.zero; break; case(1): smdArray[k].pivot = new Vector2(0f,1f) -new Vector2(.5f,.5f); break; case(2): smdArray[k].pivot = new Vector2(0.5f,1f) -new Vector2(.5f,.5f); break; case(3): smdArray[k].pivot = new Vector2(1f,1f) -new Vector2(.5f,.5f); break; case(4): smdArray[k].pivot = new Vector2(0f,.5f) -new Vector2(.5f,.5f); break; case(5): smdArray[k].pivot = new Vector2(1f,.5f) -new Vector2(.5f,.5f); break; case(6): smdArray[k].pivot = new Vector2(0f,0f) -new Vector2(.5f,.5f); break; case(7): smdArray[k].pivot = new Vector2(0.5f,0f) -new Vector2(.5f,.5f); break; case(8): smdArray[k].pivot = new Vector2(1f,0f) -new Vector2(.5f,.5f); break; case(9): smdArray[k].pivot -= new Vector2(.5f,.5f); break; } pivot = smdArray[k].pivot ; } } if(textureImporter.spriteImportMode == SpriteImportMode.Single) pivot = textureImporter.spritePivot-new Vector2(.5f,.5f); newUv.x += ((pivot.x)*rec.width)/ spr.texture.width; newUv.y += ((pivot.y)*rec.height)/ spr.texture.height; } uvsLocal.Add(newUv); normalsLocal.Add(new Vector3(0,0,-1)); idx++; } else { resultsTriIndexesLocal.Add(resultsLocal.LastIndexOf(v)); } } } for (int j = resultsTriIndexesLocal.Count-1; j >=0; j--) { resultsTriIndexesReversedLocal.Add(resultsTriIndexesLocal[j]); } results.AddRange(resultsLocal); resultsTriIndexes.AddRange(resultsTriIndexesLocal); resultsTriIndexesReversed.AddRange(resultsTriIndexesReversedLocal); uvs.AddRange(uvsLocal); normals.AddRange(normalsLocal); resultsLocal.Clear(); resultsTriIndexesLocal.Clear(); resultsTriIndexesReversedLocal.Clear(); uvsLocal.Clear(); normalsLocal.Clear(); finalVertices = results.ToArray(); finalNormals = normals.ToArray(); finalUvs= uvs.ToArray(); finalTriangles = resultsTriIndexesReversed.ToArray(); mesh.vertices = finalVertices; mesh.triangles = finalTriangles; mesh.uv = finalUvs; mesh.normals = finalNormals; mesh = calculateMeshTangents (mesh); return mesh; } List<Vector2> randomizeArray(List<Vector2> arr ) { int counter = arr.Count; List <Vector2> reArr = new List <Vector2>(); while (counter-- >= 1) { int rndM = Random.Range(0, arr.Count-1); reArr.Add(arr[rndM]); arr.RemoveAt(rndM); } return reArr; } public static Mesh calculateMeshTangents(Mesh mesh) { //speed up math by copying the mesh arrays int[] triangles = mesh.triangles; Vector3[] vertices = mesh.vertices; Vector2[] uv = mesh.uv; Vector3[] normals = mesh.normals; //variable definitions int triangleCount = triangles.Length; int vertexCount = vertices.Length; Vector3[] tan1 = new Vector3[vertexCount]; Vector3[] tan2 = new Vector3[vertexCount]; Vector4[] tangents = new Vector4[vertexCount]; for (long a = 0; a < triangleCount; a += 3) { long i1 = triangles[a + 0]; long i2 = triangles[a + 1]; long i3 = triangles[a + 2]; Vector3 v1 = vertices[i1]; Vector3 v2 = vertices[i2]; Vector3 v3 = vertices[i3]; Vector2 w1 = uv[i1]; Vector2 w2 = uv[i2]; Vector2 w3 = uv[i3]; float x1 = v2.x - v1.x; float x2 = v3.x - v1.x; float y1 = v2.y - v1.y; float y2 = v3.y - v1.y; float z1 = v2.z - v1.z; float z2 = v3.z - v1.z; float s1 = w2.x - w1.x; float s2 = w3.x - w1.x; float t1 = w2.y - w1.y; float t2 = w3.y - w1.y; float r = 1.0f / (s1 * t2 - s2 * t1); Vector3 sdir = new Vector3((t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r); Vector3 tdir = new Vector3((s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r); tan1[i1] += sdir; tan1[i2] += sdir; tan1[i3] += sdir; tan2[i1] += tdir; tan2[i2] += tdir; tan2[i3] += tdir; } for (long a = 0; a < vertexCount; ++a) { Vector3 n = normals[a]; Vector3 t = tan1[a]; //Vector3 tmp = (t - n * Vector3.Dot(n, t)).normalized; //tangents[a] = new Vector4(tmp.x, tmp.y, tmp.z); Vector3.OrthoNormalize(ref n, ref t); tangents[a].x = t.x; tangents[a].y = t.y; tangents[a].z = t.z; tangents[a].w = (Vector3.Dot(Vector3.Cross(n, t), tan2[a]) < 0.0f) ? -1.0f : 1.0f; } mesh.tangents = tangents; return mesh; } }
//------------------------------------------------------------------------------ // <copyright file="ControlAdapter.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ using System; using System.Globalization; using System.IO; using System.Web; using System.Web.UI; using System.Web.UI.HtmlControls; using System.Web.Mobile; using RootMobile = System.Web.Mobile; using System.Web.UI.MobileControls; using System.Collections; using System.Collections.Specialized; using System.Text; using System.Security.Permissions; // We don't recompile this base class in the shipped source samples, as it // accesses some internal functionality and is a core utility (rather than an // extension itself). #if !COMPILING_FOR_SHIPPED_SOURCE namespace System.Web.UI.MobileControls.Adapters { /* * ControlAdapter base class. * * Copyright (c) 2000 Microsoft Corporation */ /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter"]/*' /> [AspNetHostingPermission(SecurityAction.LinkDemand, Level=AspNetHostingPermissionLevel.Minimal)] [AspNetHostingPermission(SecurityAction.InheritanceDemand, Level=AspNetHostingPermissionLevel.Minimal)] [Obsolete("The System.Web.Mobile.dll assembly has been deprecated and should no longer be used. For information about how to develop ASP.NET mobile applications, see http://go.microsoft.com/fwlink/?LinkId=157231.")] public abstract class ControlAdapter : IControlAdapter { private static readonly String[] LabelIDs = new String[] { RootMobile.SR.ControlAdapter_BackLabel, RootMobile.SR.ControlAdapter_GoLabel, RootMobile.SR.ControlAdapter_OKLabel, RootMobile.SR.ControlAdapter_MoreLabel, RootMobile.SR.ControlAdapter_OptionsLabel, RootMobile.SR.ControlAdapter_NextLabel, RootMobile.SR.ControlAdapter_PreviousLabel, RootMobile.SR.ControlAdapter_LinkLabel, RootMobile.SR.ControlAdapter_PhoneCallLabel }; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.BackLabel"]/*' /> protected static readonly int BackLabel = 0; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.GoLabel"]/*' /> protected static readonly int GoLabel = 1; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OKLabel"]/*' /> protected static readonly int OKLabel = 2; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.MoreLabel"]/*' /> protected static readonly int MoreLabel = 3; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OptionsLabel"]/*' /> protected static readonly int OptionsLabel = 4; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.NextLabel"]/*' /> protected static readonly int NextLabel = 5; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.PreviousLabel"]/*' /> protected static readonly int PreviousLabel = 6; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.LinkLabel"]/*' /> protected static readonly int LinkLabel = 7; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.CallLabel"]/*' /> protected static readonly int CallLabel = 8; private MobileControl _control; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.Control"]/*' /> public MobileControl Control { get { return _control; } set { _control = value; } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.Page"]/*' /> public virtual MobilePage Page { get { return Control.MobilePage; } set { // Do not expect to be called directly. Subclasses should // override this when needed. throw new Exception( SR.GetString( SR.ControlAdapterBasePagePropertyShouldNotBeSet)); } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.Device"]/*' /> public virtual MobileCapabilities Device { get { return (MobileCapabilities)Page.Request.Browser; } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OnInit"]/*' /> public virtual void OnInit(EventArgs e){} /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OnLoad"]/*' /> public virtual void OnLoad(EventArgs e){} /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OnPreRender"]/*' /> public virtual void OnPreRender(EventArgs e){} /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.Render"]/*' /> public virtual void Render(HtmlTextWriter writer) { RenderChildren(writer); } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.OnUnload"]/*' /> public virtual void OnUnload(EventArgs e){} /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.HandlePostBackEvent"]/*' /> public virtual bool HandlePostBackEvent(String eventArgument) { return false; } // By default, always return false, so the control itself will handle // it. /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.LoadPostData"]/*' /> public virtual bool LoadPostData(String key, NameValueCollection data, Object controlPrivateData, out bool dataChanged) { dataChanged = false; return false; } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.LoadAdapterState"]/*' /> public virtual void LoadAdapterState(Object state) { } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.SaveAdapterState"]/*' /> public virtual Object SaveAdapterState() { return null; } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.CreateTemplatedUI"]/*' /> public virtual void CreateTemplatedUI(bool doDataBind) { // No device specific templated UI to create. Control.CreateDefaultTemplatedUI(doDataBind); } // convenience methods here /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.Style"]/*' /> public Style Style { get { return Control.Style; } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.RenderChildren"]/*' /> protected void RenderChildren(HtmlTextWriter writer) { if (Control.HasControls()) { foreach (Control child in Control.Controls) { child.RenderControl(writer); } } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.VisibleWeight"]/*' /> public virtual int VisibleWeight { get { return ControlPager.UseDefaultWeight; } } /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.ItemWeight"]/*' /> public virtual int ItemWeight { get { return ControlPager.UseDefaultWeight; } } // The following method is used by PageAdapter subclasses of // ControlAdapter for determining the optimum page weight for // a given device. Algorithm is as follows: // 1) First look for the "optimumPageWeight" parameter set // for the device. If it exists, and can be converted // to an integer, use it. // 2) Otherwise, look for the "screenCharactersHeight" parameter. // If it exists, and can be converted to an integer, multiply // it by 100 and use the result. // 3) As a last resort, use the default provided by the calling // PageAdapter. /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.CalculateOptimumPageWeight"]/*' /> protected virtual int CalculateOptimumPageWeight(int defaultPageWeight) { int optimumPageWeight = 0; // Pull OptimumPageWeight from the web.config parameter of the same // name, when present. String pageWeight = Device[Constants.OptimumPageWeightParameter]; if (pageWeight != null) { try { optimumPageWeight = Convert.ToInt32(pageWeight, CultureInfo.InvariantCulture); } catch { optimumPageWeight = 0; } } if (optimumPageWeight <= 0) { // If OptimumPageWeight isn't established explicitly, attempt to // construct it as 100 * number of lines of characters. String numLinesStr = Device[Constants.ScreenCharactersHeightParameter]; if (numLinesStr != null) { try { int numLines = Convert.ToInt32(numLinesStr, CultureInfo.InvariantCulture); optimumPageWeight = 100 * numLines; } catch { optimumPageWeight = 0; } } } if (optimumPageWeight <= 0) { optimumPageWeight = defaultPageWeight; } return optimumPageWeight; } private String[] _defaultLabels = null; /// <include file='doc\ControlAdapter.uex' path='docs/doc[@for="ControlAdapter.GetDefaultLabel"]/*' /> protected String GetDefaultLabel(int labelID) { if ((labelID < 0) || (labelID >= LabelIDs.Length)) { throw new ArgumentException(System.Web.Mobile.SR.GetString( System.Web.Mobile.SR.ControlAdapter_InvalidDefaultLabel)); } MobilePage page = Page; if (page != null) { ControlAdapter pageAdapter = (ControlAdapter)page.Adapter; if (pageAdapter._defaultLabels == null) { pageAdapter._defaultLabels = new String[LabelIDs.Length]; } String labelValue = pageAdapter._defaultLabels[labelID]; if (labelValue == null) { labelValue = System.Web.Mobile.SR.GetString(LabelIDs[labelID]); pageAdapter._defaultLabels[labelID] = labelValue; } return labelValue; } else { return System.Web.Mobile.SR.GetString(LabelIDs[labelID]); } } } [Obsolete("The System.Web.Mobile.dll assembly has been deprecated and should no longer be used. For information about how to develop ASP.NET mobile applications, see http://go.microsoft.com/fwlink/?LinkId=157231.")] internal class EmptyControlAdapter : ControlAdapter { internal EmptyControlAdapter() {} } } #endif
using System; using Eto.Forms; using Ninject; using NLog; using SharpFlame.Core; using SharpFlame.Core.Domain; using SharpFlame.Core.Extensions; using SharpFlame.Domain; using SharpFlame.Gui.Dialogs; using SharpFlame.Gui.Forms; using SharpFlame.Mapping; using SharpFlame.Mapping.IO.Wz; using SharpFlame.Mapping.Objects; using SharpFlame.Maths; namespace SharpFlame.Gui.Actions { public class CompileMapCommand : Command { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); private readonly MainForm form; //hack to get reference. private Map Map { get { return form.MapPanel.MainMap; } } public CompileMapCommand(MainForm form) { this.form = form; ID = "compile"; MenuText = "&Compile Map ..."; ToolBarText = "&Compile Map ..."; ToolTip = "Compile Map"; Shortcut = Keys.C | Application.Instance.CommonModifier; } protected override void OnExecuted(EventArgs e) { base.OnExecuted(e); if( Map == null ) return; if( Map.CompileScreen != null ) return; var compileMap = new CompileMapDialog(); Map.CompileScreen = compileMap; compileMap.DataContext = Map.InterfaceOptions; var options = compileMap.ShowModal(); Map.SetChanged(); if( options == null ) { Map.CompileScreen = null; //canceled. return; } if( options.CompileType == CompileType.Campaign ) { CompileCampaign(); } else { CompileMultiPlayer(); } } private Result ValidateMap_UnitPositions() { var Result = new Result("Validate unit positions", false); logger.Info("Validate unit positions"); //check unit positions var TileHasUnit = new bool[Map.Terrain.TileSize.X, Map.Terrain.TileSize.Y]; var tileStructureTypeBase = new StructureTypeBase[Map.Terrain.TileSize.X, Map.Terrain.TileSize.Y]; var tileFeatureTypeBase = new FeatureTypeBase[Map.Terrain.TileSize.X, Map.Terrain.TileSize.Y]; var TileObjectGroup = new clsUnitGroup[Map.Terrain.TileSize.X, Map.Terrain.TileSize.Y]; var StartPos = new XYInt(); var FinishPos = new XYInt(); var CentrePos = new XYInt(); StructureType StructureTypeType; StructureTypeBase structureTypeBase; var Footprint = new XYInt(); var UnitIsStructureModule = new bool[Map.Units.Count]; bool IsValid; foreach( var unit in Map.Units ) { if( unit.TypeBase.Type == UnitType.PlayerStructure ) { structureTypeBase = (StructureTypeBase)unit.TypeBase; StructureTypeType = structureTypeBase.StructureType; UnitIsStructureModule[unit.MapLink.Position] = structureTypeBase.IsModule() | StructureTypeType == StructureType.ResourceExtractor; } } //check and store non-module units first. modules need to check for the underlying unit. foreach( var unit in Map.Units ) { if( !UnitIsStructureModule[unit.MapLink.Position] ) { Footprint = unit.TypeBase.GetGetFootprintSelected(unit.Rotation); Map.GetFootprintTileRange(unit.Pos.Horizontal, Footprint, ref StartPos, ref FinishPos); if( StartPos.X < 0 | FinishPos.X >= Map.Terrain.TileSize.X | StartPos.Y < 0 | FinishPos.Y >= Map.Terrain.TileSize.Y ) { var resultItem = MapErrorHelper.CreateResultProblemGotoForObject(unit, form.ViewInfo); resultItem.Text = string.Format("Unit off map at position {0}.", unit.GetPosText()); Result.ItemAdd(resultItem); } else { for( var y = StartPos.Y; y <= FinishPos.Y; y++ ) { for( var x = StartPos.X; x <= FinishPos.X; x++ ) { if( TileHasUnit[x, y] ) { var resultItem = MapErrorHelper.CreateResultProblemGotoForObject(unit, form.ViewInfo); logger.Info("Bad overlap of {0} on tile {1}, {2}.", unit.TypeBase.GetDisplayTextName(), x, y); resultItem.Text = string.Format("Bad unit overlap of {0} on tile {1}, {2}.", unit.TypeBase.GetDisplayTextName(), x, y); Result.ItemAdd(resultItem); } else { logger.Debug("{0} on X:{1}, Y:{2} tile.", unit.TypeBase.GetDisplayTextName(), x, y); TileHasUnit[x, y] = true; if( unit.TypeBase.Type == UnitType.PlayerStructure ) { tileStructureTypeBase[x, y] = (StructureTypeBase)unit.TypeBase; } else if( unit.TypeBase.Type == UnitType.Feature ) { tileFeatureTypeBase[x, y] = (FeatureTypeBase)unit.TypeBase; } TileObjectGroup[x, y] = unit.UnitGroup; } } } } } } //check modules and extractors foreach( var unit in Map.Units ) { if( UnitIsStructureModule[unit.MapLink.Position] ) { StructureTypeType = ( (StructureTypeBase)unit.TypeBase ).StructureType; CentrePos.X = ( unit.Pos.Horizontal.X / Constants.TerrainGridSpacing ); CentrePos.Y = unit.Pos.Horizontal.Y / Constants.TerrainGridSpacing; if( CentrePos.X < 0 | CentrePos.X >= Map.Terrain.TileSize.X | CentrePos.Y < 0 | CentrePos.Y >= Map.Terrain.TileSize.Y ) { var resultItem = MapErrorHelper.CreateResultProblemGotoForObject(unit, form.ViewInfo); resultItem.Text = "Module off map at position " + unit.GetPosText() + "."; Result.ItemAdd(resultItem); } else { if( tileStructureTypeBase[CentrePos.X, CentrePos.Y] != null ) { if( TileObjectGroup[CentrePos.X, CentrePos.Y] == unit.UnitGroup ) { if( StructureTypeType == StructureType.FactoryModule ) { if( tileStructureTypeBase[CentrePos.X, CentrePos.Y].StructureType == StructureType.Factory | tileStructureTypeBase[CentrePos.X, CentrePos.Y].StructureType == StructureType.VTOLFactory ) { IsValid = true; } else { IsValid = false; } } else if( StructureTypeType == StructureType.PowerModule ) { if( tileStructureTypeBase[CentrePos.X, CentrePos.Y].StructureType == StructureType.PowerGenerator ) { IsValid = true; } else { IsValid = false; } } else if( StructureTypeType == StructureType.ResearchModule ) { if( tileStructureTypeBase[CentrePos.X, CentrePos.Y].StructureType == StructureType.Research ) { IsValid = true; } else { IsValid = false; } } else { IsValid = false; } } else { IsValid = false; } } else if( tileFeatureTypeBase[CentrePos.X, CentrePos.Y] != null ) { if( StructureTypeType == StructureType.ResourceExtractor ) { if( tileFeatureTypeBase[CentrePos.X, CentrePos.Y].FeatureType == FeatureType.OilResource ) { IsValid = true; } else { IsValid = false; } } else { IsValid = false; } } else if( StructureTypeType == StructureType.ResourceExtractor ) { IsValid = true; } else { IsValid = false; } if( !IsValid ) { var resultItem = MapErrorHelper.CreateResultProblemGotoForObject(unit, form.ViewInfo); resultItem.Text = "Bad module on tile " + Convert.ToString(CentrePos.X) + ", " + Convert.ToString(CentrePos.Y) + "."; Result.ItemAdd(resultItem); } } } } return Result; } private Result ValidateMap_Multiplayer(int PlayerCount) { var Result = new Result("Validate for multiplayer", false); logger.Info("Validate for multiplayer"); if( PlayerCount < 2 | PlayerCount > Constants.PlayerCountMax ) { Result.ProblemAdd("Unable to evaluate for multiplayer due to bad number of players."); return Result; } //check HQs, Trucks and unit counts var PlayerHQCount = new int[Constants.PlayerCountMax]; var Player23TruckCount = new int[Constants.PlayerCountMax]; var PlayerMasterTruckCount = new int[Constants.PlayerCountMax]; var DroidType = default(DroidDesign); StructureTypeBase structureTypeBase; var UnusedPlayerUnitWarningCount = 0; var Unit = default(Unit); foreach( var tempLoopVar_Unit in Map.Units ) { Unit = tempLoopVar_Unit; if( Unit.UnitGroup == Map.ScavengerUnitGroup ) { } else { if( Unit.TypeBase.Type == UnitType.PlayerDroid ) { DroidType = (DroidDesign)Unit.TypeBase; if( DroidType.Body != null && DroidType.Propulsion != null && DroidType.Turret1 != null && DroidType.TurretCount == 1 ) { if( DroidType.Turret1.TurretType == TurretType.Construct ) { PlayerMasterTruckCount[Unit.UnitGroup.WZ_StartPos]++; if( DroidType.IsTemplate ) { Player23TruckCount[Unit.UnitGroup.WZ_StartPos]++; } } } } else if( Unit.TypeBase.Type == UnitType.PlayerStructure ) { structureTypeBase = (StructureTypeBase)Unit.TypeBase; if( structureTypeBase.Code == "A0CommandCentre" ) { PlayerHQCount[Unit.UnitGroup.WZ_StartPos]++; } } } if( Unit.TypeBase.Type != UnitType.Feature ) { if( Unit.UnitGroup.WZ_StartPos >= PlayerCount ) { if( UnusedPlayerUnitWarningCount < 32 ) { UnusedPlayerUnitWarningCount++; var resultItem = MapErrorHelper.CreateResultProblemGotoForObject(Unit, form.ViewInfo); resultItem.Text = string.Format("An unused player ({0}) has a unit at {1}.", Unit.UnitGroup.WZ_StartPos, Unit.GetPosText()); Result.ItemAdd(resultItem); } } } } for( var A = 0; A <= PlayerCount - 1; A++ ) { if( PlayerHQCount[A] == 0 ) { Result.ProblemAdd("There is no Command Centre for player " + Convert.ToString(A) + "."); } if( PlayerMasterTruckCount[A] == 0 ) { Result.ProblemAdd("There are no constructor units for player " + Convert.ToString(A) + "."); } else if( Player23TruckCount[A] == 0 ) { Result.WarningAdd("All constructor units for player " + Convert.ToString(A) + " will only exist in master."); } } return Result; } private Result ValidateMap() { var ReturnResult = new Result("Validate map", false); logger.Info("Validate map"); if( Map.Terrain.TileSize.X > Constants.WzMapMaxSize ) { ReturnResult.WarningAdd("Map width is too large. The maximum is " + Convert.ToString(Constants.WzMapMaxSize) + "."); } if( Map.Terrain.TileSize.Y > Constants.WzMapMaxSize ) { ReturnResult.WarningAdd("Map height is too large. The maximum is " + Convert.ToString(Constants.WzMapMaxSize) + "."); } if( Map.Tileset == null ) { ReturnResult.ProblemAdd("No tileset selected."); } var PlayerStructureTypeCount = new int[Constants.PlayerCountMax, App.ObjectData.StructureTypes.Count]; var ScavStructureTypeCount = new int[App.ObjectData.StructureTypes.Count]; var structureTypeBase = default(StructureTypeBase); var Unit = default(Unit); foreach( var tempLoopVar_Unit in Map.Units ) { Unit = tempLoopVar_Unit; if( Unit.TypeBase.Type == UnitType.PlayerStructure ) { structureTypeBase = (StructureTypeBase)Unit.TypeBase; if( Unit.UnitGroup == Map.ScavengerUnitGroup ) { ScavStructureTypeCount[structureTypeBase.StructureType_ObjectDataLink.Position]++; } else { PlayerStructureTypeCount[Unit.UnitGroup.WZ_StartPos, structureTypeBase.StructureType_ObjectDataLink.Position]++; } } } foreach( var tempLoopVar_StructureType in App.ObjectData.StructureTypes ) { structureTypeBase = tempLoopVar_StructureType; var StructureTypeNum = structureTypeBase.StructureType_ObjectDataLink.Position; var PlayerNum = 0; for( PlayerNum = 0; PlayerNum <= Constants.PlayerCountMax - 1; PlayerNum++ ) { if( PlayerStructureTypeCount[PlayerNum, StructureTypeNum] > 255 ) { ReturnResult.ProblemAdd("Player {0} has to many ({1}) of structure \"{2}\"" + ". The limit is 255 of any one structure type.".Format2( PlayerNum, PlayerStructureTypeCount[PlayerNum, StructureTypeNum], structureTypeBase.Code) ); } } if( ScavStructureTypeCount[StructureTypeNum] > 255 ) { ReturnResult.ProblemAdd("Scavengers have to many ({0}) of structure \"{1}\"" + ". The limit is 255 of any one structure type.".Format2 (ScavStructureTypeCount[StructureTypeNum], structureTypeBase.Code) ); } } return ReturnResult; } private int ValidateMap_WaterTris() { var X = 0; var Y = 0; var Count = 0; if( Map.Tileset == null ) { return 0; } for( Y = 0; Y <= Map.Terrain.TileSize.Y - 1; Y++ ) { for( X = 0; X <= Map.Terrain.TileSize.X - 1; X++ ) { if( Map.Terrain.Tiles[X, Y].Tri ) { if( Map.Terrain.Tiles[X, Y].Texture.TextureNum >= 0 && Map.Terrain.Tiles[X, Y].Texture.TextureNum < Map.Tileset.Tiles.Count ) { if( Map.Tileset.Tiles[Map.Terrain.Tiles[X, Y].Texture.TextureNum].DefaultType == Constants.TileTypeNumWater ) { Count++; } } } } } return Count; } private void CompileMultiPlayer() { var options = Map.InterfaceOptions; var result = new Result("Compile multiplayer", false); logger.Info("Compile multiplayer"); var license = options.CompileMultiLicense; var playerCount = options.CompileMultiPlayers; if( playerCount < 2 | playerCount > Constants.PlayerCountMax ) { result.ProblemAdd(string.Format("The number of players must be from 2 to {0}", Constants.PlayerCountMax)); } var waterErrors = ValidateMap_WaterTris(); if( waterErrors > 0 ) { result.WarningAdd(string.Format("{0} water tiles have an incorrect triangle direction. There might be in-game graphical glitches on those tiles.", waterErrors)); } result.Add(ValidateMap()); result.Add(ValidateMap_UnitPositions()); result.Add(ValidateMap_Multiplayer(playerCount)); var mapName = options.CompileName; for( waterErrors = 0; waterErrors <= mapName.Length - 1; waterErrors++ ) { var currentChar = mapName[waterErrors]; if( !( ( currentChar >= 'a' && currentChar <= 'z' ) || ( currentChar >= 'A' && currentChar <= 'Z' ) || ( waterErrors >= 1 && ( ( currentChar >= '0' && currentChar <= '9' ) || currentChar == '-' || currentChar == '_' ) ) ) ) { break; } } if( waterErrors < mapName.Length ) { result.ProblemAdd("The map\'s name must contain only letters, numbers, underscores and hyphens, and must begin with a letter."); } if( mapName.Length < 1 | mapName.Length > 16 ) { result.ProblemAdd("Map name must be from 1 to 16 characters."); } if( string.IsNullOrEmpty(license) ) { result.ProblemAdd("Enter a valid license."); } if( result.HasProblems ) { App.ShowWarnings(result); return; } var saveFileDialog = new SaveFileDialog() { Filters = { new FileDialogFilter("WZ Files", ".wz") } }; if( Map.PathInfo != null ) { saveFileDialog.Directory = new Uri(Map.PathInfo.Path); } saveFileDialog.FileName = playerCount + "c-" + mapName; if( saveFileDialog.ShowDialog(Application.Instance.MainForm) != DialogResult.Ok ) { return; } if( options.AutoScrollLimits ) { SetScrollLimits(ref Map.InterfaceOptions.ScrollMin, ref Map.InterfaceOptions.ScrollMax); } var wzFormat = App.Kernel.Get<WzSaver>(); result.Add(wzFormat.Save(saveFileDialog.FileName, Map, true, true)); App.ShowWarnings(result); } private void SetScrollLimits(ref XYInt min, ref sXY_uint max) { min.X = 0; min.Y = 0; max.X = (uint)Map.Terrain.TileSize.X; max.Y = (uint)Map.Terrain.TileSize.Y; } private void CompileCampaign() { var options = Map.InterfaceOptions; var ReturnResult = new Result("Compile campaign", false); logger.Info("Compile campaign"); var waterTileErrors = 0; waterTileErrors = ValidateMap_WaterTris(); if( waterTileErrors > 0 ) { ReturnResult.WarningAdd(waterTileErrors + " water tiles have an incorrect triangle direction. There might be in-game graphical glitches on those tiles."); } ReturnResult.Add(ValidateMap()); ReturnResult.Add(ValidateMap_UnitPositions()); var MapName = ""; var TypeNum = 0; MapName = options.CompileName; if( MapName.Length < 1 ) { ReturnResult.ProblemAdd("Enter a name for the campaign files."); } TypeNum = (int)options.CampaignGameType; if( TypeNum < 0 | TypeNum > 2 ) { ReturnResult.ProblemAdd("Select a campaign type."); } if( ReturnResult.HasProblems ) { App.ShowWarnings(ReturnResult); return; } var CompileCampDialog = new SelectFolderDialog(); if( CompileCampDialog.ShowDialog(Application.Instance.MainForm) != DialogResult.Ok ) { return; } if( options.AutoScrollLimits ) { SetScrollLimits(ref Map.InterfaceOptions.ScrollMin, ref Map.InterfaceOptions.ScrollMax); } var wzFormat = App.Kernel.Get<WzSaver>(); ReturnResult.Add(wzFormat.Save(CompileCampDialog.Directory, Map, false, true)); App.ShowWarnings(ReturnResult); } } }
/* * DateTimeFormatter.cs - Implementation of the * "System.Private.DateTimeFormat.DateTimeFormatter" class. * * Copyright (C) 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Private.DateTimeFormat { using System.Globalization; using System.Text; internal sealed class DateTimeFormatter { // Format a date value as a string using a particular pattern format. public static String Format(String format, DateTime date, DateTimeFormatInfo info) { // Format the date/time value. StringBuilder builder = new StringBuilder(); int posn = 0; char ch; int count, value; while(posn < format.Length) { // Extract the next format character plus its count. ch = format[posn++]; count = 1; switch(ch) { case 'd': case 'm': case 'M': case 'y': case 'g': case 'h': case 'H': case 's': case 'f': case 't': case 'z': { while(posn < format.Length && format[posn] == ch) { ++posn; ++count; } } break; case ':': { builder.Append(info.TimeSeparator); continue; } // Not reached. case '/': { builder.Append(info.DateSeparator); continue; } // Not reached. case '%': { // Used to escape custom patterns that would // otherwise look like single-letter formats. continue; } // Not reached. case '\\': { // Escape the next character. if(posn < format.Length) { builder.Append(format[posn++]); } continue; } // Not reached. case '\'': { // Quoted text. while(posn < format.Length) { ch = format[posn++]; if(ch == '\'') { break; } builder.Append(ch); } continue; } // Not reached. default: { // Literal character. builder.Append(ch); continue; } // Not reached. } // Process the format character. switch(ch) { case 'd': { // Output the day or weekday. if(count == 1) { value = date.Day; if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else if(count == 2) { value = date.Day; builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } else if(count == 3) { builder.Append (info.AbbreviatedDayNames [(int)(date.DayOfWeek)]); } else { builder.Append (info.DayNames[(int)(date.DayOfWeek)]); } } break; case 'M': { // Output the month. value = date.Month; if(count == 1) { if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else if(count == 2) { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } else if(count == 3) { builder.Append (info.AbbreviatedMonthNames[value - 1]); } else { builder.Append(info.MonthNames[value - 1]); } } break; case 'y': { // Output the year. value = date.Year; if(count == 1) { value %= 100; if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else if(count == 2) { value %= 100; builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } else { builder.Append((char)('0' + (value / 1000))); builder.Append ((char)('0' + ((value / 100 % 10)))); builder.Append ((char)('0' + ((value / 10 % 10)))); builder.Append((char)('0' + (value % 10))); } } break; case 'g': { // Output the era name. try { int era = info.Calendar.GetEra(date); builder.Append(info.GetEraName(era)); } catch(ArgumentException) { // The date does not have an era. } } break; case 'h': { // Output the hour in 12-hour format. value = date.Hour; if(value == 0) { value = 12; } else if(value > 12) { value -= 12; } if(count == 1) { if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } break; case 'H': { // Output the hour in 24-hour format. value = date.Hour; if(count == 1) { if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } break; case 'm': { // Output the minute. value = date.Minute; if(count == 1) { if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } break; case 's': { // Output the second. value = date.Second; if(count == 1) { if(value < 10) { builder.Append((char)('0' + value)); } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } else { builder.Append((char)('0' + (value / 10))); builder.Append((char)('0' + (value % 10))); } } break; case 'f': { // Output fractions of a second. if(count > 7) { count = 7; } long frac = date.Ticks; long divisor = TimeSpan.TicksPerSecond; while(count > 0) { divisor /= 10; value = (int)((frac / divisor) % 10); builder.Append((char)('0' + value)); frac %= divisor; --count; } } break; case 't': { value = date.Hour; if(count == 1) { if(value < 12) { builder.Append(info.AMDesignator[0]); } else { builder.Append(info.PMDesignator[0]); } } else { if(value < 12) { builder.Append(info.AMDesignator); } else { builder.Append(info.PMDesignator); } } } break; #if !ECMA_COMPAT case 'z': { long offset = TimeZone.CurrentTimeZone .GetUtcOffset(date).Ticks; int hour, min; if(offset >= 0) { builder.Append('+'); } else { builder.Append('-'); offset = -offset; } hour = (int)(offset / TimeSpan.TicksPerHour); offset %= TimeSpan.TicksPerHour; min = (int)(offset / TimeSpan.TicksPerMinute); if(count == 1) { if(hour < 10) { builder.Append((char)('0' + hour)); } else { builder.Append((char)('0' + (hour / 10))); builder.Append((char)('0' + (hour % 10))); } } else if(count == 2) { builder.Append((char)('0' + (hour / 10))); builder.Append((char)('0' + (hour % 10))); } else { builder.Append((char)('0' + (hour / 10))); builder.Append((char)('0' + (hour % 10))); builder.Append(':'); builder.Append((char)('0' + (min / 10))); builder.Append((char)('0' + (min % 10))); } } break; #endif } } // Return the formatted string to the caller. return builder.ToString(); } // Format a date value as a string. public static String Format(DateTime date, String format, IFormatProvider provider) { DateTimeFormatInfo info; // Get the date/time formatting information to use. info = DateTimeFormatInfo.GetInstance(provider); // Validate the format string. if(format == null || format == String.Empty) { format = "G"; } if(format.Length == 1) { // Resolve the format code to a custom format string. switch(format) { case "d": format = info.ShortDatePattern; break; case "D": format = info.LongDatePattern; break; case "f": format = info.LongDatePattern + " " + info.ShortTimePattern; break; case "F": format = info.FullDateTimePattern; break; case "g": format = info.ShortDatePattern + " " + info.ShortTimePattern; break; case "G": format = info.ShortDatePattern + " " + info.LongTimePattern; break; case "m": case "M": format = info.MonthDayPattern; break; #if !ECMA_COMPAT case "r": case "R": format = info.RFC1123Pattern; break; case "s": format = info.SortableDateTimePattern; break; case "u": format = info.UniversalSortableDateTimePattern; break; #else case "r": case "R": format = "ddd, dd MMM yyyy HH':'mm':'ss 'GMT'"; break; case "s": format = "yyyy'-'MM'-'dd'T'HH':'mm':'ss"; break; case "u": format = "yyyy'-'MM'-'dd HH':'mm':'ss'Z'"; break; #endif case "t": format = info.ShortTimePattern; break; case "T": format = info.LongTimePattern; break; case "U": date = date.ToUniversalTime(); format = info.FullDateTimePattern; break; case "y": case "Y": format = info.YearMonthPattern; break; default: { throw new FormatException (_("Format_FormatString")); } // Not reached. } } return Format(format, date, info); } }; // class DateTimeFormatter }; // namespace System.Private.DateTimeFormat
// Copyright (c) Microsoft Corporation. All rights reserved. // // Licensed under the MIT License. See LICENSE.txt in the project root for license information. using System.Globalization; using SM = System.Math; namespace System.Numerics { public struct Matrix3x2 : IEquatable<Matrix3x2> { public float M11; public float M12; public float M21; public float M22; public float M31; public float M32; private static Matrix3x2 _identity = new Matrix3x2 ( 1f, 0f, 0f, 1f, 0f, 0f ); public static Matrix3x2 Identity { get { return _identity; } } public bool IsIdentity { get { return M11 == 1f && M22 == 1f && // Check diagonal element first for early out. M12 == 0f && M21 == 0f && M31 == 0f && M32 == 0f; } } public Vector2 Translation { get { Vector2 ans; ans.X = M31; ans.Y = M32; return ans; } set { M31 = value.X; M32 = value.Y; } } public Matrix3x2(float m11, float m12, float m21, float m22, float m31, float m32) { M11 = m11; M12 = m12; M21 = m21; M22 = m22; M31 = m31; M32 = m32; } public static Matrix3x2 CreateTranslation(Vector2 position) { Matrix3x2 result; result.M11 = 1.0f; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = 1.0f; result.M31 = position.X; result.M32 = position.Y; return result; } public static Matrix3x2 CreateTranslation(float xPosition, float yPosition) { Matrix3x2 result; result.M11 = 1.0f; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = 1.0f; result.M31 = xPosition; result.M32 = yPosition; return result; } public static Matrix3x2 CreateScale(float xScale, float yScale) { Matrix3x2 result; result.M11 = xScale; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = yScale; result.M31 = 0.0f; result.M32 = 0.0f; return result; } public static Matrix3x2 CreateScale(float xScale, float yScale, Vector2 centerPoint) { Matrix3x2 result; float tx = centerPoint.X * (1 - xScale); float ty = centerPoint.Y * (1 - yScale); result.M11 = xScale; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = yScale; result.M31 = tx; result.M32 = ty; return result; } public static Matrix3x2 CreateScale(Vector2 scales) { Matrix3x2 result; result.M11 = scales.X; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = scales.Y; result.M31 = 0.0f; result.M32 = 0.0f; return result; } public static Matrix3x2 CreateScale(Vector2 scales, Vector2 centerPoint) { Matrix3x2 result; float tx = centerPoint.X * (1 - scales.X); float ty = centerPoint.Y * (1 - scales.Y); result.M11 = scales.X; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = scales.Y; result.M31 = tx; result.M32 = ty; return result; } public static Matrix3x2 CreateScale(float scale) { Matrix3x2 result; result.M11 = scale; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = scale; result.M31 = 0.0f; result.M32 = 0.0f; return result; } public static Matrix3x2 CreateScale(float scale, Vector2 centerPoint) { Matrix3x2 result; float tx = centerPoint.X * (1 - scale); float ty = centerPoint.Y * (1 - scale); result.M11 = scale; result.M12 = 0.0f; result.M21 = 0.0f; result.M22 = scale; result.M31 = tx; result.M32 = ty; return result; } public static Matrix3x2 CreateSkew(float radiansX, float radiansY) { Matrix3x2 result; float xTan = (float)SM.Tan(radiansX); float yTan = (float)SM.Tan(radiansY); result.M11 = 1.0f; result.M12 = yTan; result.M21 = xTan; result.M22 = 1.0f; result.M31 = 0.0f; result.M32 = 0.0f; return result; } public static Matrix3x2 CreateSkew(float radiansX, float radiansY, Vector2 centerPoint) { Matrix3x2 result; float xTan = (float)SM.Tan(radiansX); float yTan = (float)SM.Tan(radiansY); float tx = -centerPoint.Y * xTan; float ty = -centerPoint.X * yTan; result.M11 = 1.0f; result.M12 = yTan; result.M21 = xTan; result.M22 = 1.0f; result.M31 = tx; result.M32 = ty; return result; } public static Matrix3x2 CreateRotation(float radians) { Matrix3x2 result; radians = (float)SM.IEEERemainder(radians, SM.PI * 2); float c, s; const float epsilon = 0.001f * (float)SM.PI / 180f; // 0.1% of a degree if (radians > -epsilon && radians < epsilon) { // Exact case for zero rotation. c = 1; s = 0; } else if (radians > SM.PI / 2 - epsilon && radians < SM.PI / 2 + epsilon) { // Exact case for 90 degree rotation. c = 0; s = 1; } else if (radians < -SM.PI + epsilon || radians > SM.PI - epsilon) { // Exact case for 180 degree rotation. c = -1; s = 0; } else if (radians > -SM.PI / 2 - epsilon && radians < -SM.PI / 2 + epsilon) { // Exact case for 270 degree rotation. c = 0; s = -1; } else { // Arbitrary rotation. c = (float)SM.Cos(radians); s = (float)SM.Sin(radians); } // [ c s ] // [ -s c ] // [ 0 0 ] result.M11 = c; result.M12 = s; result.M21 = -s; result.M22 = c; result.M31 = 0.0f; result.M32 = 0.0f; return result; } public static Matrix3x2 CreateRotation(float radians, Vector2 centerPoint) { Matrix3x2 result; radians = (float)SM.IEEERemainder(radians, SM.PI * 2); float c, s; const float epsilon = 0.001f * (float)SM.PI / 180f; // 0.1% of a degree if (radians > -epsilon && radians < epsilon) { // Exact case for zero rotation. c = 1; s = 0; } else if (radians > SM.PI / 2 - epsilon && radians < SM.PI / 2 + epsilon) { // Exact case for 90 degree rotation. c = 0; s = 1; } else if (radians < -SM.PI + epsilon || radians > SM.PI - epsilon) { // Exact case for 180 degree rotation. c = -1; s = 0; } else if (radians > -SM.PI / 2 - epsilon && radians < -SM.PI / 2 + epsilon) { // Exact case for 270 degree rotation. c = 0; s = -1; } else { // Arbitrary rotation. c = (float)SM.Cos(radians); s = (float)SM.Sin(radians); } float x = centerPoint.X * (1 - c) + centerPoint.Y * s; float y = centerPoint.Y * (1 - c) - centerPoint.X * s; // [ c s ] // [ -s c ] // [ x y ] result.M11 = c; result.M12 = s; result.M21 = -s; result.M22 = c; result.M31 = x; result.M32 = y; return result; } public float GetDeterminant() { // There isn't actually any such thing as a determinant for a non-square matrix, // but this 3x2 type is really just an optimization of a 3x3 where we happen to // know the rightmost column is always (0, 0, 1). So we expand to 3x3 format: // // [ M11, M12, 0 ] // [ M21, M22, 0 ] // [ M31, M32, 1 ] // // Sum the diagnonal products: // (M11 * M22 * 1) + (M12 * 0 * M31) + (0 * M21 * M32) // // Subtract the opposite diagonal products: // (M31 * M22 * 0) + (M32 * 0 * M11) + (1 * M21 * M12) // // Collapse out the constants and oh look, this is just a 2x2 determinant! return (M11 * M22) - (M21 * M12); } public static bool Invert(Matrix3x2 matrix, out Matrix3x2 result) { float det = (matrix.M11 * matrix.M22) - (matrix.M21 * matrix.M12); if (SM.Abs(det) < float.Epsilon) { result = new Matrix3x2(float.NaN, float.NaN, float.NaN, float.NaN, float.NaN, float.NaN); return false; } float invDet = 1.0f / det; result.M11 = matrix.M22 * invDet; result.M12 = -matrix.M12 * invDet; result.M21 = -matrix.M21 * invDet; result.M22 = matrix.M11 * invDet; result.M31 = (matrix.M21 * matrix.M32 - matrix.M31 * matrix.M22) * invDet; result.M32 = (matrix.M31 * matrix.M12 - matrix.M11 * matrix.M32) * invDet; return true; } public static Matrix3x2 Lerp(Matrix3x2 matrix1, Matrix3x2 matrix2, float amount) { Matrix3x2 result; // First row result.M11 = matrix1.M11 + (matrix2.M11 - matrix1.M11) * amount; result.M12 = matrix1.M12 + (matrix2.M12 - matrix1.M12) * amount; // Second row result.M21 = matrix1.M21 + (matrix2.M21 - matrix1.M21) * amount; result.M22 = matrix1.M22 + (matrix2.M22 - matrix1.M22) * amount; // Third row result.M31 = matrix1.M31 + (matrix2.M31 - matrix1.M31) * amount; result.M32 = matrix1.M32 + (matrix2.M32 - matrix1.M32) * amount; return result; } public static Matrix3x2 Negate(Matrix3x2 value) { Matrix3x2 result; result.M11 = -value.M11; result.M12 = -value.M12; result.M21 = -value.M21; result.M22 = -value.M22; result.M31 = -value.M31; result.M32 = -value.M32; return result; } public static Matrix3x2 Add(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 result; result.M11 = value1.M11 + value2.M11; result.M12 = value1.M12 + value2.M12; result.M21 = value1.M21 + value2.M21; result.M22 = value1.M22 + value2.M22; result.M31 = value1.M31 + value2.M31; result.M32 = value1.M32 + value2.M32; return result; } public static Matrix3x2 Subtract(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 result; result.M11 = value1.M11 - value2.M11; result.M12 = value1.M12 - value2.M12; result.M21 = value1.M21 - value2.M21; result.M22 = value1.M22 - value2.M22; result.M31 = value1.M31 - value2.M31; result.M32 = value1.M32 - value2.M32; return result; } public static Matrix3x2 Multiply(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 result; // First row result.M11 = value1.M11 * value2.M11 + value1.M12 * value2.M21; result.M12 = value1.M11 * value2.M12 + value1.M12 * value2.M22; // Second row result.M21 = value1.M21 * value2.M11 + value1.M22 * value2.M21; result.M22 = value1.M21 * value2.M12 + value1.M22 * value2.M22; // Third row result.M31 = value1.M31 * value2.M11 + value1.M32 * value2.M21 + value2.M31; result.M32 = value1.M31 * value2.M12 + value1.M32 * value2.M22 + value2.M32; return result; } public static Matrix3x2 Multiply(Matrix3x2 value1, float value2) { Matrix3x2 result; result.M11 = value1.M11 * value2; result.M12 = value1.M12 * value2; result.M21 = value1.M21 * value2; result.M22 = value1.M22 * value2; result.M31 = value1.M31 * value2; result.M32 = value1.M32 * value2; return result; } public static Matrix3x2 operator -(Matrix3x2 value) { Matrix3x2 m; m.M11 = -value.M11; m.M12 = -value.M12; m.M21 = -value.M21; m.M22 = -value.M22; m.M31 = -value.M31; m.M32 = -value.M32; return m; } public static Matrix3x2 operator +(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 m; m.M11 = value1.M11 + value2.M11; m.M12 = value1.M12 + value2.M12; m.M21 = value1.M21 + value2.M21; m.M22 = value1.M22 + value2.M22; m.M31 = value1.M31 + value2.M31; m.M32 = value1.M32 + value2.M32; return m; } public static Matrix3x2 operator -(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 m; m.M11 = value1.M11 - value2.M11; m.M12 = value1.M12 - value2.M12; m.M21 = value1.M21 - value2.M21; m.M22 = value1.M22 - value2.M22; m.M31 = value1.M31 - value2.M31; m.M32 = value1.M32 - value2.M32; return m; } public static Matrix3x2 operator *(Matrix3x2 value1, Matrix3x2 value2) { Matrix3x2 m; // First row m.M11 = value1.M11 * value2.M11 + value1.M12 * value2.M21; m.M12 = value1.M11 * value2.M12 + value1.M12 * value2.M22; // Second row m.M21 = value1.M21 * value2.M11 + value1.M22 * value2.M21; m.M22 = value1.M21 * value2.M12 + value1.M22 * value2.M22; // Third row m.M31 = value1.M31 * value2.M11 + value1.M32 * value2.M21 + value2.M31; m.M32 = value1.M31 * value2.M12 + value1.M32 * value2.M22 + value2.M32; return m; } public static Matrix3x2 operator *(Matrix3x2 value1, float value2) { Matrix3x2 m; m.M11 = value1.M11 * value2; m.M12 = value1.M12 * value2; m.M21 = value1.M21 * value2; m.M22 = value1.M22 * value2; m.M31 = value1.M31 * value2; m.M32 = value1.M32 * value2; return m; } public static bool operator ==(Matrix3x2 value1, Matrix3x2 value2) { return (value1.M11 == value2.M11 && value1.M22 == value2.M22 && // Check diagonal element first for early out. value1.M12 == value2.M12 && value1.M21 == value2.M21 && value1.M31 == value2.M31 && value1.M32 == value2.M32); } public static bool operator !=(Matrix3x2 value1, Matrix3x2 value2) { return (value1.M11 != value2.M11 || value1.M12 != value2.M12 || value1.M21 != value2.M21 || value1.M22 != value2.M22 || value1.M31 != value2.M31 || value1.M32 != value2.M32); } public bool Equals(Matrix3x2 other) { return (M11 == other.M11 && M22 == other.M22 && // Check diagonal element first for early out. M12 == other.M12 && M21 == other.M21 && M31 == other.M31 && M32 == other.M32); } public override bool Equals(object obj) { if (obj is Matrix3x2) { return Equals((Matrix3x2)obj); } return false; } public override string ToString() { CultureInfo ci = CultureInfo.CurrentCulture; return String.Format(ci, "{{ {{M11:{0} M12:{1}}} {{M21:{2} M22:{3}}} {{M31:{4} M32:{5}}} }}", M11.ToString(ci), M12.ToString(ci), M21.ToString(ci), M22.ToString(ci), M31.ToString(ci), M32.ToString(ci)); } public override int GetHashCode() { return M11.GetHashCode() + M12.GetHashCode() + M21.GetHashCode() + M22.GetHashCode() + M31.GetHashCode() + M32.GetHashCode(); } #if INCLUDE_WINRT_CANVAS_INTEROP public static unsafe implicit operator Microsoft.Graphics.Canvas.Numerics.Matrix3x2(Matrix3x2 value) { return *(Microsoft.Graphics.Canvas.Numerics.Matrix3x2*)&value; } public static unsafe implicit operator Matrix3x2(Microsoft.Graphics.Canvas.Numerics.Matrix3x2 value) { return *(Matrix3x2*)&value; } #endif } }
#region S# License /****************************************************************************************** NOTICE!!! This program and source code is owned and licensed by StockSharp, LLC, www.stocksharp.com Viewing or use of this code requires your acceptance of the license agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE Removal of this comment is a violation of the license agreement. Project: StockSharp.Algo.Storages.Algo File: SecurityList.cs Created: 2015, 11, 11, 2:32 PM Copyright 2010 by StockSharp, LLC *******************************************************************************************/ #endregion S# License namespace StockSharp.Algo.Storages { using System; using System.Collections.Generic; using System.Data; using System.Linq; using Ecng.Collections; using Ecng.Common; using Ecng.Data; using Ecng.Data.Sql; using Ecng.Serialization; using MoreLinq; using StockSharp.BusinessEntities; /// <summary> /// The class for representation in the form of list of instruments, stored in external storage. /// </summary> public class SecurityList : BaseStorageEntityList<Security>, IStorageSecurityList { private readonly EntityRegistry _registry; private readonly DatabaseCommand _readAllByCodeAndType; private readonly DatabaseCommand _readAllByCodeAndTypeAndExpiryDate; private readonly DatabaseCommand _readAllByType; private readonly DatabaseCommand _readAllByBoardAndType; private readonly DatabaseCommand _readAllByTypeAndExpiryDate; private readonly DatabaseCommand _readSecurityIds; /// <summary> /// Initializes a new instance of the <see cref="SecurityList"/>. /// </summary> /// <param name="registry">The storage of trade objects.</param> public SecurityList(EntityRegistry registry) : base(registry.Storage) { _registry = registry; var database = Storage as Database; if (database == null) return; var readAllByCodeAndType = database.CommandType == CommandType.StoredProcedure ? Query.Execute(Schema, SqlCommandTypes.ReadAll, string.Empty, "CodeAndType") : Query .Select(Schema) .From(Schema) .Where() .Like(Schema.Fields["Code"]) .And() .OpenBracket() .IsParamNull(Schema.Fields["Type"]) .Or() .Equals(Schema.Fields["Type"]) .CloseBracket(); _readAllByCodeAndType = database.GetCommand(readAllByCodeAndType, Schema, new FieldList(Schema.Fields["Code"], Schema.Fields["Type"]), new FieldList()); var readAllByCodeAndTypeAndExpiryDate = database.CommandType == CommandType.StoredProcedure ? Query.Execute(Schema, SqlCommandTypes.ReadAll, string.Empty, "CodeAndTypeAndExpiryDate") : Query .Select(Schema) .From(Schema) .Where() .Like(Schema.Fields["Code"]) .And() .OpenBracket() .IsParamNull(Schema.Fields["Type"]) .Or() .Equals(Schema.Fields["Type"]) .CloseBracket() .And() .OpenBracket() .IsNull(Schema.Fields["ExpiryDate"]) .Or() .Equals(Schema.Fields["ExpiryDate"]) .CloseBracket(); _readAllByCodeAndTypeAndExpiryDate = database.GetCommand(readAllByCodeAndTypeAndExpiryDate, Schema, new FieldList(Schema.Fields["Code"], Schema.Fields["Type"], Schema.Fields["ExpiryDate"]), new FieldList()); if (database.CommandType == CommandType.Text) { var readSecurityIds = Query .Execute("SELECT group_concat(Id, ',') FROM Security"); _readSecurityIds = database.GetCommand(readSecurityIds, null, new FieldList(), new FieldList()); var readAllByBoardAndType = Query .Select(Schema) .From(Schema) .Where() .Equals(Schema.Fields["Board"]) .And() .OpenBracket() .IsParamNull(Schema.Fields["Type"]) .Or() .Equals(Schema.Fields["Type"]) .CloseBracket(); _readAllByBoardAndType = database.GetCommand(readAllByBoardAndType, Schema, new FieldList(Schema.Fields["Board"], Schema.Fields["Type"]), new FieldList()); var readAllByTypeAndExpiryDate = Query .Select(Schema) .From(Schema) .Where() .Equals(Schema.Fields["Type"]) .And() .OpenBracket() .IsNull(Schema.Fields["ExpiryDate"]) .Or() .Equals(Schema.Fields["ExpiryDate"]) .CloseBracket(); _readAllByTypeAndExpiryDate = database.GetCommand(readAllByTypeAndExpiryDate, Schema, new FieldList(Schema.Fields["Type"], Schema.Fields["ExpiryDate"]), new FieldList()); var readAllByType = Query .Select(Schema) .From(Schema) .Where() .Equals(Schema.Fields["Type"]); _readAllByType = database.GetCommand(readAllByType, Schema, new FieldList(Schema.Fields["Type"]), new FieldList()); } ((ICollectionEx<Security>)this).AddedRange += s => _added.SafeInvoke(s); ((ICollectionEx<Security>)this).RemovedRange += s => _removed.SafeInvoke(s); } private Action<IEnumerable<Security>> _added; event Action<IEnumerable<Security>> ISecurityProvider.Added { add { _added += value; } remove { _added -= value; } } private Action<IEnumerable<Security>> _removed; event Action<IEnumerable<Security>> ISecurityProvider.Removed { add { _removed += value; } remove { _removed -= value; } } /// <summary> /// Lookup securities by criteria <paramref name="criteria" />. /// </summary> /// <param name="criteria">The instrument whose fields will be used as a filter.</param> /// <returns>Found instruments.</returns> public IEnumerable<Security> Lookup(Security criteria) { if (criteria.IsLookupAll()) return this.ToArray(); if (!criteria.Id.IsEmpty()) { var security = ReadById(criteria.Id); return security == null ? Enumerable.Empty<Security>() : new[] { security }; } if (!criteria.Code.IsEmpty() && _readAllByCodeAndType != null) { return criteria.ExpiryDate == null ? ReadAllByCodeAndType(criteria) : ReadAllByCodeAndTypeAndExpiryDate(criteria); } if (criteria.Board != null && _readAllByBoardAndType != null) { return ReadAllByBoardAndType(criteria); } if (criteria.Type != null && _readAllByTypeAndExpiryDate != null) { return criteria.ExpiryDate == null ? ReadAllByType(criteria) : ReadAllByTypeAndExpiryDate(criteria); } return this.Filter(criteria); } object ISecurityProvider.GetNativeId(Security security) { return null; } private IEnumerable<Security> ReadAllByCodeAndType(Security criteria) { var fields = new[] { new SerializationItem(Schema.Fields["Code"], "%" + criteria.Code + "%"), new SerializationItem(Schema.Fields["Type"], criteria.Type) }; return Database.ReadAll<Security>(_readAllByCodeAndType, new SerializationItemCollection(fields)); } private IEnumerable<Security> ReadAllByCodeAndTypeAndExpiryDate(Security criteria) { if (criteria.ExpiryDate == null) throw new ArgumentNullException(nameof(criteria), "ExpiryDate == null"); var fields = new[] { new SerializationItem(Schema.Fields["Code"], "%" + criteria.Code + "%"), new SerializationItem(Schema.Fields["Type"], criteria.Type), new SerializationItem(Schema.Fields["ExpiryDate"], criteria.ExpiryDate.Value) }; return Database.ReadAll<Security>(_readAllByCodeAndTypeAndExpiryDate, new SerializationItemCollection(fields)); } private IEnumerable<Security> ReadAllByBoardAndType(Security criteria) { var fields = new[] { new SerializationItem(Schema.Fields["Board"], criteria.Board.Code), new SerializationItem(Schema.Fields["Type"], criteria.Type) }; return Database.ReadAll<Security>(_readAllByCodeAndType, new SerializationItemCollection(fields)); } private IEnumerable<Security> ReadAllByTypeAndExpiryDate(Security criteria) { if (criteria.ExpiryDate == null) throw new ArgumentNullException(nameof(criteria), "ExpiryDate == null"); var fields = new[] { new SerializationItem(Schema.Fields["Type"], criteria.Type), new SerializationItem(Schema.Fields["ExpiryDate"], criteria.ExpiryDate.Value) }; return Database.ReadAll<Security>(_readAllByTypeAndExpiryDate, new SerializationItemCollection(fields)); } private IEnumerable<Security> ReadAllByType(Security criteria) { var fields = new[] { new SerializationItem(Schema.Fields["Type"], criteria.Type) }; return Database.ReadAll<Security>(_readAllByType, new SerializationItemCollection(fields)); } /// <summary> /// To save the trading object. /// </summary> /// <param name="entity">The trading object.</param> public override void Save(Security entity) { _registry.Exchanges.Save(entity.Board.Exchange); _registry.ExchangeBoards.Save(entity.Board); base.Save(entity); } /// <summary> /// To get identifiers of saved instruments. /// </summary> /// <returns>IDs securities.</returns> public IEnumerable<string> GetSecurityIds() { if (_readSecurityIds == null) return this.Select(s => s.Id); var str = _readSecurityIds.ExecuteScalar<string>(new SerializationItemCollection()); return str.SplitByComma(",", true); } /// <summary> /// It is called when adding element to the storage. /// </summary> /// <param name="entity">The trading object.</param> protected override void OnAdd(Security entity) { _registry.Exchanges.Save(entity.Board.Exchange); _registry.ExchangeBoards.Save(entity.Board); base.OnAdd(entity); } /// <summary> /// Delete security. /// </summary> /// <param name="security">Security.</param> public void Delete(Security security) { Remove(security); } /// <summary> /// To delete instruments by the criterion. /// </summary> /// <param name="criteria">The criterion.</param> public void DeleteBy(Security criteria) { this.Filter(criteria).ForEach(s => Remove(s)); } void IDisposable.Dispose() { } } }
// // PcFileCache.cs // // Author: // Lluis Sanchez Gual <[email protected]> // // Copyright (c) 2009 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Text; using System.Xml; using System.IO; using System.Collections.Generic; namespace Mono.PkgConfig { internal interface IPcFileCacheContext<TP> where TP:PackageInfo, new() { // In the implementation of this method, the host application can extract // information from the pc file and store it in the PackageInfo object void StoreCustomData (PcFile pcfile, TP pkg); // Should return false if the provided package does not have required // custom data bool IsCustomDataComplete (string pcfile, TP pkg); // Called to report errors void ReportError (string message, Exception ex); } internal interface IPcFileCacheContext: IPcFileCacheContext<PackageInfo> { } internal abstract class PcFileCache: PcFileCache<PackageInfo> { public PcFileCache (IPcFileCacheContext ctx): base (ctx) { } } internal abstract class PcFileCache<TP> where TP:PackageInfo, new() { const string CACHE_VERSION = "2"; Dictionary<string, TP> infos = new Dictionary<string, TP> (); Dictionary<string, List<TP>> filesByFolder = new Dictionary<string, List<TP>> (); string cacheFile; bool hasChanges; IPcFileCacheContext<TP> ctx; IEnumerable<string> defaultPaths; public PcFileCache (IPcFileCacheContext<TP> ctx) { this.ctx = ctx; try { string path = CacheDirectory; if (!Directory.Exists (path)) Directory.CreateDirectory (path); cacheFile = Path.Combine (path, "pkgconfig-cache-" + CACHE_VERSION + ".xml"); if (File.Exists (cacheFile)) Load (); } catch (Exception ex) { ctx.ReportError ("pc file cache could not be loaded.", ex); } } protected abstract string CacheDirectory { get; } // Updates the pkg-config index, using the default search directories public void Update () { Update (GetDefaultPaths ()); } // Updates the pkg-config index, looking for .pc files in the provided directories public void Update (IEnumerable<string> pkgConfigDirs) { foreach (string pcdir in pkgConfigDirs) { foreach (string pcfile in Directory.GetFiles (pcdir, "*.pc")) GetPackageInfo (pcfile); } Save (); } public IEnumerable<TP> GetPackages () { return GetPackages (null); } public IEnumerable<TP> GetPackages (IEnumerable<string> pkgConfigDirs) { if (pkgConfigDirs == null) pkgConfigDirs = GetDefaultPaths (); foreach (string sp in pkgConfigDirs) { List<TP> list; if (filesByFolder.TryGetValue (Path.GetFullPath (sp), out list)) { foreach (TP p in list) yield return p; } } } public TP GetPackageInfoByName (string name) { return GetPackageInfoByName (name, null); } public TP GetPackageInfoByName (string name, IEnumerable<string> pkgConfigDirs) { foreach (TP p in GetPackages (pkgConfigDirs)) if (p.Name == name) return p; return null; } // Returns information about a .pc file public TP GetPackageInfo (string file) { TP info, oldInfo = null; file = Path.GetFullPath (file); DateTime wtime = File.GetLastWriteTime (file); lock (infos) { if (infos.TryGetValue (file, out info)) { if (info.LastWriteTime == wtime) return info; oldInfo = info; } } try { info = ParsePackageInfo (file); } catch (Exception ex) { ctx.ReportError ("Error while parsing .pc file", ex); info = new TP (); } lock (infos) { if (!info.IsValidPackage) info = new TP (); // Create a default empty instance info.LastWriteTime = wtime; Add (file, info, oldInfo); hasChanges = true; } return info; } void Add (string file, TP info, TP replacedInfo) { infos [file] = info; string dir = Path.GetFullPath (Path.GetDirectoryName (file)); List<TP> list; if (!filesByFolder.TryGetValue (dir, out list)) { list = new List<TP> (); filesByFolder [dir] = list; } if (replacedInfo != null) { int i = list.IndexOf (replacedInfo); if (i != -1) { list [i] = info; return; } } list.Add (info); } FileStream OpenFile (FileAccess access) { int retries = 6; FileMode mode = access == FileAccess.Read ? FileMode.Open : FileMode.Create; Exception lastException = null; while (retries > 0) { try { return new FileStream (cacheFile, mode, access, FileShare.None); } catch (Exception ex) { // the file may be locked by another app. Wait a bit and try again lastException = ex; System.Threading.Thread.Sleep (200); retries--; } } ctx.ReportError ("File could not be opened: " + cacheFile, lastException); return null; } void Load () { // The serializer can't be used because this file is reused in xbuild using (FileStream fs = OpenFile (FileAccess.Read)) { if (fs == null) return; XmlTextReader xr = new XmlTextReader (fs); xr.MoveToContent (); xr.ReadStartElement (); xr.MoveToContent (); while (xr.NodeType == XmlNodeType.Element) ReadPackage (xr); } } public void Save () { // The serializer can't be used because this file is reused in xbuild lock (infos) { if (!hasChanges) return; using (FileStream fs = OpenFile (FileAccess.Write)) { if (fs == null) return; XmlTextWriter tw = new XmlTextWriter (new StreamWriter (fs)); tw.Formatting = Formatting.Indented; tw.WriteStartElement ("PcFileCache"); foreach (KeyValuePair<string,TP> file in infos) { WritePackage (tw, file.Key, file.Value); } tw.WriteEndElement (); // PcFileCache tw.Flush (); hasChanges = false; } } } void WritePackage (XmlTextWriter tw, string file, TP pinfo) { tw.WriteStartElement ("File"); tw.WriteAttributeString ("path", file); tw.WriteAttributeString ("lastWriteTime", XmlConvert.ToString (pinfo.LastWriteTime, XmlDateTimeSerializationMode.Local)); if (pinfo.IsValidPackage) { if (pinfo.Name != null) tw.WriteAttributeString ("name", pinfo.Name); if (pinfo.Version != null) tw.WriteAttributeString ("version", pinfo.Version); if (!string.IsNullOrEmpty (pinfo.Description)) tw.WriteAttributeString ("description", pinfo.Description); if (pinfo.CustomData != null) { foreach (KeyValuePair<string,string> cd in pinfo.CustomData) tw.WriteAttributeString (cd.Key, cd.Value); } WritePackageContent (tw, file, pinfo); } tw.WriteEndElement (); // File } protected virtual void WritePackageContent (XmlTextWriter tw, string file, TP pinfo) { } void ReadPackage (XmlReader tr) { TP pinfo = new TP (); string file = null; tr.MoveToFirstAttribute (); do { switch (tr.LocalName) { case "path": file = tr.Value; break; case "lastWriteTime": pinfo.LastWriteTime = XmlConvert.ToDateTime (tr.Value, XmlDateTimeSerializationMode.Local); break; case "name": pinfo.Name = tr.Value; break; case "version": pinfo.Version = tr.Value; break; case "description": pinfo.Description = tr.Value; break; default: pinfo.SetData (tr.LocalName, tr.Value); break; } } while (tr.MoveToNextAttribute ()); tr.MoveToElement (); if (!tr.IsEmptyElement) { tr.ReadStartElement (); tr.MoveToContent (); ReadPackageContent (tr, pinfo); tr.MoveToContent (); tr.ReadEndElement (); } else tr.Read (); tr.MoveToContent (); if (!pinfo.IsValidPackage || ctx.IsCustomDataComplete (file, pinfo)) Add (file, pinfo, null); } protected virtual void ReadPackageContent (XmlReader tr, TP pinfo) { } public object SyncRoot { get { return infos; } } TP ParsePackageInfo (string pcfile) { PcFile file = new PcFile (); file.Load (pcfile); TP pinfo = new TP (); pinfo.Name = Path.GetFileNameWithoutExtension (file.FilePath); if (!file.HasErrors) { pinfo.Version = file.Version; pinfo.Description = file.Description; ParsePackageInfo (file, pinfo); ctx.StoreCustomData (file, pinfo); } return pinfo; } protected virtual void ParsePackageInfo (PcFile file, TP pinfo) { } IEnumerable<string> GetDefaultPaths () { if (defaultPaths == null) { string pkgConfigPath = Environment.GetEnvironmentVariable ("PKG_CONFIG_PATH"); string pkgConfigDir = Environment.GetEnvironmentVariable ("PKG_CONFIG_LIBDIR"); defaultPaths = GetPkgconfigPaths (null, pkgConfigPath, pkgConfigDir); } return defaultPaths; } public IEnumerable<string> GetPkgconfigPaths (string prefix, string pkgConfigPath, string pkgConfigLibdir) { char[] sep = new char[] { Path.PathSeparator }; string[] pkgConfigPaths = null; if (!String.IsNullOrEmpty (pkgConfigPath)) { pkgConfigPaths = pkgConfigPath.Split (sep, StringSplitOptions.RemoveEmptyEntries); if (pkgConfigPaths.Length == 0) pkgConfigPaths = null; } string[] pkgConfigLibdirs = null; if (!String.IsNullOrEmpty (pkgConfigLibdir)) { pkgConfigLibdirs = pkgConfigLibdir.Split (sep, StringSplitOptions.RemoveEmptyEntries); if (pkgConfigLibdirs.Length == 0) pkgConfigLibdirs = null; } if (prefix == null) prefix = PathUp (typeof (int).Assembly.Location, 4); IEnumerable<string> paths = GetUnfilteredPkgConfigDirs (pkgConfigPaths, pkgConfigLibdirs, new string [] { prefix }); return NormaliseAndFilterPaths (paths, Environment.CurrentDirectory); } IEnumerable<string> GetUnfilteredPkgConfigDirs (IEnumerable<string> pkgConfigPaths, IEnumerable<string> pkgConfigLibdirs, IEnumerable<string> systemPrefixes) { if (pkgConfigPaths != null) { foreach (string dir in pkgConfigPaths) yield return dir; } if (pkgConfigLibdirs != null) { foreach (string dir in pkgConfigLibdirs) yield return dir; } else if (systemPrefixes != null) { string[] suffixes = new string [] { Path.Combine ("lib", "pkgconfig"), Path.Combine ("lib64", "pkgconfig"), Path.Combine ("libdata", "pkgconfig"), Path.Combine ("share", "pkgconfig"), }; foreach (string prefix in systemPrefixes) foreach (string suffix in suffixes) yield return Path.Combine (prefix, suffix); } } IEnumerable<string> NormaliseAndFilterPaths (IEnumerable<string> paths, string workingDirectory) { Dictionary<string,string> filtered = new Dictionary<string,string> (); foreach (string p in paths) { string path = p; if (!Path.IsPathRooted (path)) path = Path.Combine (workingDirectory, path); path = Path.GetFullPath (path); if (filtered.ContainsKey (path)) continue; filtered.Add (path,path); try { if (!Directory.Exists (path)) continue; } catch (IOException ex) { ctx.ReportError ("Error checking for directory '" + path + "'.", ex); } yield return path; } } static string PathUp (string path, int up) { if (up == 0) return path; for (int i = path.Length -1; i >= 0; i--) { if (path[i] == Path.DirectorySeparatorChar) { up--; if (up == 0) return path.Substring (0, i); } } return null; } } internal class PcFile { Dictionary<string,string> variables = new Dictionary<string, string> (); string filePath; string name; string description; string version; string libs; bool hasErrors; public string Description { get { return description; } set { description = value; } } public string FilePath { get { return filePath; } set { filePath = value; } } public bool HasErrors { get { return hasErrors; } set { hasErrors = value; } } public string Libs { get { return libs; } set { libs = value; } } public string Name { get { return name; } set { name = value; } } public string Version { get { return version; } set { version = value; } } public string GetVariable (string varName) { string val; variables.TryGetValue (varName, out val); return val; } public void Load (string pcfile) { FilePath = pcfile; variables.Add ("pcfiledir", Path.GetDirectoryName (pcfile)); using (StreamReader reader = new StreamReader (pcfile)) { string line; while ((line = reader.ReadLine ()) != null) { int i = line.IndexOf (':'); int j = line.IndexOf ('='); int k = System.Math.Min (i != -1 ? i : int.MaxValue, j != -1 ? j : int.MaxValue); if (k == int.MaxValue) continue; string var = line.Substring (0, k).Trim (); string value = line.Substring (k + 1).Trim (); value = Evaluate (value); if (k == j) { // Is variable variables [var] = value; } else { switch (var) { case "Name": Name = value; break; case "Description": Description = value; break; case "Version": Version = value; break; case "Libs": Libs = value; break; } } } } } string Evaluate (string value) { int i = value.IndexOf ("${"); if (i == -1) return value; StringBuilder sb = new StringBuilder (); int last = 0; while (i != -1 && i < value.Length) { sb.Append (value.Substring (last, i - last)); if (i == 0 || value [i - 1] != '$') { // Evaluate if var is not escaped i += 2; int n = value.IndexOf ('}', i); if (n == -1 || n == i) { // Closing bracket not found or empty name HasErrors = true; return value; } string rname = value.Substring (i, n - i); string rval; if (variables.TryGetValue (rname, out rval)) sb.Append (rval); else { HasErrors = true; return value; } i = n + 1; last = i; } else last = i++; if (i < value.Length - 1) i = value.IndexOf ("${", i); } sb.Append (value.Substring (last, value.Length - last)); return sb.ToString (); } } internal class PackageInfo { Dictionary<string,string> customData; string name; string version; string description; DateTime lastWriteTime; public string Name { get { return name; } set { name = value; } } public string Version { get { return version; } set { version = value; } } public string Description { get { return description; } set { description = value; } } public string GetData (string name) { if (customData == null) return null; string res; customData.TryGetValue (name, out res); return res; } public void SetData (string name, string value) { if (customData == null) customData = new Dictionary<string, string> (); customData [name] = value; } public void RemoveData (string name) { if (customData != null) customData.Remove (name); } internal Dictionary<string,string> CustomData { get { return customData; } } internal DateTime LastWriteTime { get { return lastWriteTime; } set { lastWriteTime = value; } } internal bool HasCustomData { get { return customData != null && customData.Count > 0; } } internal protected virtual bool IsValidPackage { get { return HasCustomData; } } } }
using System; using System.Collections.ObjectModel; namespace Simple.Owin { /// <summary> /// Represents the HTTP Status Code returned by a Handler. /// </summary> /// <remarks>Has an implicit cast from <see cref="int"/>.</remarks> internal class Status : IEquatable<Status> { private readonly int _httpStatusCode; private readonly string _httpStatusDescription; private readonly string _locationHeader; /// <summary> /// Initializes a new instance of the <see cref="Status"/> struct. /// </summary> /// <param name="httpStatusCode">The HTTP status code.</param> /// <param name="httpStatusDescription">The HTTP status description.</param> /// <param name="locationHeader">Redirection Url</param> public Status(int httpStatusCode, string httpStatusDescription = null, string locationHeader = null) { _httpStatusCode = httpStatusCode; _httpStatusDescription = httpStatusDescription; _locationHeader = locationHeader; } /// <summary> /// Gets the HTTP status code. /// </summary> public int Code { get { return _httpStatusCode; } } /// <summary> /// Gets the HTTP status description. /// </summary> public string Description { get { return _httpStatusDescription; } } public bool IsError { get { return _httpStatusCode >= 400 && _httpStatusCode <= 599; } } /// <summary> /// Gets a value indicating whether this Status represents success. /// </summary> /// <value> /// <c>true</c> if this Status represents success; otherwise, <c>false</c>. /// </value> public bool IsSuccess { get { return _httpStatusCode >= 200 && _httpStatusCode <= 299; } } public string LocationHeader { get { return _locationHeader; } } /// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <param name="other">An object to compare with this object.</param> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false. /// </returns> public bool Equals(Status other) { if (other == null) { return false; } return _httpStatusCode == other._httpStatusCode; } /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to this instance. /// </summary> /// <param name="obj">The <see cref="System.Object"/> to compare with this instance.</param> /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } if (obj.GetType() != typeof(Status)) { return false; } return Equals((Status)obj); } /// <summary> /// Returns a hash code for this instance. /// </summary> /// <returns> /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. /// </returns> public override int GetHashCode() { return _httpStatusCode; } public string ToHttp11StatusLine() { return string.Format("HTTP/1.1 {0} {1}\r\n", _httpStatusCode, _httpStatusDescription); } /// <summary> /// Returns an HTTP formatted representation of the <see cref="Status"/>. /// </summary> /// <returns> /// E.g. <c>200 OK</c> or <c>404 Not Found</c>. /// </returns> public override string ToString() { return string.Format("{0} {1}", Code, Description); } private static readonly StatusLookup Lookup = new StatusLookup(); public static class Is { static Is() { //200s Lookup.Add(OK); Lookup.Add(Created); Lookup.Add(Accepted); Lookup.Add(NonAuthoritativeInformation); Lookup.Add(NoContent); Lookup.Add(ResetContent); Lookup.Add(PartialContent); //300s Lookup.Add(NotModified); //400s Lookup.Add(BadRequest); Lookup.Add(Conflict); Lookup.Add(Forbidden); Lookup.Add(NotFound); Lookup.Add(Gone); Lookup.Add(UnsupportedMediaType); //500s Lookup.Add(InternalServerError); Lookup.Add(NotImplemented); } /// <summary> /// Indicated requerst accepted for processing, but the processing has not been completed. /// </summary> public static readonly Status Accepted = new Status(202, "Accepted"); /// <summary> /// Indicates that the request cannot be fulfilled due to bad syntax. /// </summary> public static readonly Status BadRequest = new Status(400, "Bad Request"); /// <summary> /// Indicates that a PUT or POST request conflicted with an existing resource. /// </summary> public static readonly Status Conflict = new Status(409, "Conflict"); /// <summary> /// Indicates that a request was processed successfully and a new resource was created. /// </summary> public static readonly Status Created = new Status(201, "Created"); /// <summary> /// Indicates that the request was a valid request, but the server is refusing to respond to it. /// </summary> public static readonly Status Forbidden = new Status(403, "Forbidden"); /// <summary> /// Indicates that the resource requested is no longer available and will not be available again. /// </summary> public static readonly Status Gone = new Status(410, "Gone"); /// <summary> /// Indicates that everything is horrible, and you should hide in a cupboard until it's all over. /// </summary> public static readonly Status InternalServerError = new Status(500, "Internal Server Error"); /// <summary> /// Nothing to see here. /// </summary> public static readonly Status NoContent = new Status(204, "No Content"); public static readonly Status NonAuthoritativeInformation = new Status(203, "Non-Authoritative Information"); /// <summary> /// Indicates that the requested resource could not be found. /// </summary> public static readonly Status NotFound = new Status(404, "Not Found"); public static readonly Status NotImplemented = new Status(501, "Not Implemented"); /// <summary> /// Not modified since last request. Using headers If-Modified-Since or If-Match /// </summary> public static readonly Status NotModified = new Status(304, "Not Modified"); /// <summary> /// The basic "everything's OK" status. /// </summary> public static readonly Status OK = new Status(200, "OK"); public static readonly Status PartialContent = new Status(206, "Partial Content"); public static readonly Status ResetContent = new Status(205, "Reset Content"); /// <summary> /// Indicates that the request entity has a media type which the server or resource does not support. /// </summary> public static readonly Status UnsupportedMediaType = new Status(415, "Unsupported Media Type"); /// <summary> /// Indicated requerst accepted for processing, but the processing has not been completed. The /// location is the URL used to check it's status. /// </summary> public static Status AcceptedRedirect(string location) { return new Status(202, "Accepted", location); } /// <summary> /// Indicates that a request was processed successfully and a new resource was created. /// </summary> /// <param name="location">The redirect location.</param> /// <returns></returns> public static Status CreatedRedirect(string location) { return new Status(201, "Created", location); } /// <summary> /// A redirect to another resource, but telling the client to continue to use this URI for future requests. /// </summary> public static Status Found(string location) { return new Status(302, "Found", location); } /// <summary> /// A redirect to another resource, telling the client to use the new URI for all future requests. /// </summary> public static Status MovedPermanently(string location) { return new Status(301, "Moved Permanently", location); } /// <summary> /// A redirect to another resource, commonly used after a POST operation to prevent refreshes. /// </summary> public static Status SeeOther(string location) { return new Status(303, "See Other", location); } /// <summary> /// A Temporary redirect, e.g. for a login page. /// </summary> public static Status TemporaryRedirect(string location) { return new Status(307, "Temporary Redirect", location); } public static Status UseProxy(string location) { return new Status(305, "Use Proxy", location); } } /// <summary> /// Implements the operator ==. /// </summary> /// <param name="left">The left.</param> /// <param name="right">The right.</param> /// <returns> /// The result of the operator. /// </returns> public static bool operator ==(Status left, Status right) { return ReferenceEquals(left, null) ? ReferenceEquals(right, null) : left.Equals(right); } /// <summary> /// Implements the operator !=. /// </summary> /// <param name="left">The left.</param> /// <param name="right">The right.</param> /// <returns> /// The result of the operator. /// </returns> public static bool operator !=(Status left, Status right) { return !(left == right); } /// <summary> /// Performs an implicit conversion from <see cref="System.Int32"/> to <see cref="Status"/>. /// </summary> /// <param name="httpStatus">The HTTP status code.</param> /// <returns> /// The result of the conversion. /// </returns> public static implicit operator Status(int httpStatus) { return Lookup.Contains(httpStatus) ? Lookup[httpStatus] : new Status(httpStatus); } /// <summary> /// Performs an implicit conversion from <see cref="System.Int32" /> to <see cref="Status" />. /// </summary> /// <param name="source">The string source.</param> /// <returns>A <see cref="Status"/> object for the specified status.</returns> /// <example> /// Status status = 404 + "Not Found"; /// </example> /// <exception cref="System.InvalidCastException"></exception> public static implicit operator Status(string source) { try { return new Status(int.Parse(source.Substring(0, 3)), source.Substring(3) .Trim()); } catch (Exception) { throw new InvalidCastException( "Status can only be implicitly cast from an integer, or a string of the format 'nnnSss...s', e.g. '404Not Found'."); } } private class StatusLookup : KeyedCollection<int, Status> { protected override int GetKeyForItem(Status item) { return item.Code; } } } }
#if UNITY_EDITOR using System; using System.Collections.Generic; using System.Reflection; using UnityEditor; using UnityEngine; using UMA.CharacterSystem; namespace UMA.Editors { //unfortunately this needs to be here if we are going to make it possible to have 'Backewards Compatible' DCA recipes (i.e. saved as 'Standard' but with a wardrobeSet) //if we removed that functionality this could all go into UMADynamicCharacterAvatarRecipeEditor public partial class RecipeEditor { /// <summary> /// Draws a popup containing the available Wardrobe recipes for a particular race for a particular wardrobe slot /// </summary> public class WardrobeSlotRecipePopup { private string _wsRace; private string _wsSlot; private string _wsRecipeName; Texture warningIcon; public string RecipeName { get { return _wsRecipeName; } } public WardrobeSlotRecipePopup(string race, string slot, string recipeName) { _wsRace = race; _wsSlot = slot; _wsRecipeName = recipeName; } public bool OnGUI() { if (warningIcon == null) { warningIcon = EditorGUIUtility.FindTexture("console.warnicon.sml"); } bool changed = false; var context = UMAContext.FindInstance(); if (context == null) { var _errorMessage = "Editing a recipe requires a loaded scene with a valid UMAContext."; Debug.LogWarning(_errorMessage); } var recipesForRaceSlot = context.dynamicCharacterSystem.GetRecipeNamesForRaceSlot(_wsRace, _wsSlot); List<string> thisPopupVals = new List<string>(); thisPopupVals.Add("None"); thisPopupVals.AddRange(recipesForRaceSlot); var selected = 0; var recipeIsLive = true; Rect valRBut = new Rect(); var warningStyle = new GUIStyle(EditorStyles.label); warningStyle.fixedHeight = warningIcon.height + 4f; warningStyle.contentOffset = new Vector2(0, -2f); if (_wsRecipeName != "") { recipeIsLive = context.dynamicCharacterSystem.CheckRecipeAvailability(_wsRecipeName); selected = thisPopupVals.IndexOf(_wsRecipeName); if (selected == -1) { selected = thisPopupVals.Count; string missingOrIncompatible = "missing"; if (context.dynamicCharacterSystem.GetBaseRecipe(_wsRecipeName, false) != null) missingOrIncompatible = "incompatible"; thisPopupVals.Add(_wsRecipeName + " (" + missingOrIncompatible + ")"); } } var newSelected = selected; if (!recipeIsLive) EditorGUI.indentLevel++; var label = _wsSlot == "WardrobeCollection" ? " " : _wsSlot; EditorGUI.BeginChangeCheck(); newSelected = EditorGUILayout.Popup(label, selected, thisPopupVals.ToArray()); if (!recipeIsLive) { EditorGUI.indentLevel--; valRBut = GUILayoutUtility.GetLastRect(); } if (EditorGUI.EndChangeCheck()) { if (newSelected != selected) { changed = true; _wsRecipeName = (thisPopupVals[newSelected].IndexOf("(missing)") == -1 && thisPopupVals[newSelected].IndexOf("(incompatible)") == -1) ? (thisPopupVals[newSelected] != "None" ? thisPopupVals[newSelected] : "") : _wsRecipeName.Replace("(missing)", "").Replace("(incompatible)", ""); } } if (!recipeIsLive) { var warningRect = new Rect((valRBut.xMin - 5f), valRBut.yMin, 20f, valRBut.height); var warningGUIContent = new GUIContent("", _wsRecipeName + " was not Live. You can make it live by adding it to the UMA/UMA Global Library."); warningGUIContent.image = warningIcon; GUI.Button(warningRect, warningGUIContent, warningStyle); //TODO we can probably use AssetIndexer.AddEvilAsset here so it gets added without having to go there //Id like this to be a button that opens the window, opens the recipe section and ideally highlights the asset that needs to be made live /*if(GUI.Button(warningRect, warningGUIContent, warningStyle)) { UMAAssetIndexWindow.Init(); }*/ } return changed; } } /// <summary> /// Draws an editor for a Wardrobe set which displays a list of popups listing all the possible recipes that could be set for any given wardrobe slot for the given race /// </summary> public class WardrobeSetEditor { private readonly UMAData.UMARecipe _recipe; private readonly List<WardrobeSettings> _wardrobeSet; private readonly RaceData _race; private readonly bool _allowWardrobeCollectionSlot = true; public List<WardrobeSettings> WardrobeSet { get { return _wardrobeSet; } } public WardrobeSetEditor(RaceData race, List<WardrobeSettings> wardrobeSet, UMAData.UMARecipe recipe, bool allowWardrobeCollectionSlot) { _recipe = recipe; _wardrobeSet = wardrobeSet; _race = race; _allowWardrobeCollectionSlot = allowWardrobeCollectionSlot; } public bool OnGUI() { bool changed = false; if (_race != null) if (_race.wardrobeSlots.Count > 0) { var context = UMAContext.FindInstance(); if (context == null) { var _errorMessage = "Editing a recipe requires a loaded scene with a valid UMAContext."; Debug.LogWarning(_errorMessage); } if (_wardrobeSet == null || context == null) return false; GUIHelper.BeginVerticalPadded(10, new Color(0.75f, 0.875f, 1f)); if (_allowWardrobeCollectionSlot) { var wcRecipesForRace = context.dynamicCharacterSystem.GetRecipesForRaceSlot(_race.raceName, "WardrobeCollection"); var wcGroupDict = new Dictionary<string, List<string>>(); //for 'Standard Assets' we need to do some kind of get Types thing I think because we then need to use reflection to get the wardrobeSlot field //how can we get what we want here when WardrobeCollections dont exist in Standard Assets (if 'StandardAssets' has been moved there) for (int i = 0; i < wcRecipesForRace.Count; i++) { Type wcType = wcRecipesForRace[i].GetType(); if (wcType.ToString().Replace(wcType.Namespace+".", "") == "UMAWardrobeCollection") { FieldInfo wcRecipeSlotField = wcType.GetField("wardrobeSlot", BindingFlags.Public | BindingFlags.Instance); var wcRecipeSlot = (string)wcRecipeSlotField.GetValue(wcRecipesForRace[i]); if (!wcGroupDict.ContainsKey(wcRecipeSlot)) { wcGroupDict.Add(wcRecipeSlot, new List<string>()); } wcGroupDict[wcRecipeSlot].Add(wcRecipesForRace[i].name); } } if (wcGroupDict.Count > 0) { EditorGUILayout.LabelField("WardrobeCollections"); EditorGUI.indentLevel++; foreach (KeyValuePair<string, List<string>> kp in wcGroupDict) { var thisPopupVals = new List<string>(); thisPopupVals.Add("None"); thisPopupVals.AddRange(kp.Value); var selected = 0; var prevRecipe = ""; //if one of the recipes in the wardrobe set is one of these then its selected for (int pvi = 0; pvi < thisPopupVals.Count; pvi++) { for (int wsi = 0; wsi < _wardrobeSet.Count; wsi++) { if (thisPopupVals[pvi] == _wardrobeSet[wsi].recipe) { prevRecipe = _wardrobeSet[wsi].recipe; selected = pvi; break; } } } EditorGUI.BeginChangeCheck(); var newSelected = EditorGUILayout.Popup(kp.Key, selected, thisPopupVals.ToArray()); if (EditorGUI.EndChangeCheck()) { for (int wsi = 0; wsi < _wardrobeSet.Count; wsi++) { if (_wardrobeSet[wsi].recipe == prevRecipe) { //we need to remove the wardrobeSettings that has prevRecipe as its value from _wardrobeSettings if (newSelected == 0) { _wardrobeSet.RemoveAt(wsi); } else { //we need to make wardrobeSettings that has prevRecipe have the new value _wardrobeSet[wsi].recipe = thisPopupVals[newSelected]; } } } changed = true; } } EditorGUI.indentLevel--; EditorGUILayout.Space(); EditorGUILayout.LabelField("WardrobeSlots"); EditorGUI.indentLevel++; } } foreach (string wsl in _race.wardrobeSlots) { if (wsl == "None") continue; if (wsl == "FullOutfit" && _allowWardrobeCollectionSlot == false) continue; WardrobeSlotRecipePopup thisPicker = null; bool assignedPicker = false; for (int wsi = 0; wsi < _wardrobeSet.Count; wsi++) { if (_wardrobeSet[wsi].slot == wsl) { thisPicker = new WardrobeSlotRecipePopup(_race.raceName, wsl, _wardrobeSet[wsi].recipe); assignedPicker = true; break; } } if (!assignedPicker)//means there was nothing in the wardrobe set for it { thisPicker = new WardrobeSlotRecipePopup(_race.raceName, wsl, ""); } if (thisPicker.OnGUI()) { changed = true; if (thisPicker.RecipeName != "None" && thisPicker.RecipeName != "") { bool contained = false; for (int i = 0; i < _wardrobeSet.Count; i++) { if (_wardrobeSet[i].slot == wsl) { _wardrobeSet[i].recipe = thisPicker.RecipeName; contained = true; break; } } if (!contained) _wardrobeSet.Add(new WardrobeSettings(wsl, thisPicker.RecipeName)); } else { for (int i = 0; i < _wardrobeSet.Count; i++) { if (_wardrobeSet[i].slot == wsl) { _wardrobeSet.RemoveAt(i); break; } } } } } if (_allowWardrobeCollectionSlot) { EditorGUI.indentLevel--; } if (WardrobeSet.Count > 0) { EditorGUILayout.Space(); if (GUILayout.Button(new GUIContent("UpdateSharedColors", "Automatically adds any shared colors defined in the selected recipes to this recipes SharedColors"))) { for (int i = 0; i < _wardrobeSet.Count; i++) { changed = AddSharedColorsFromRecipe(_wardrobeSet[i].recipe, _recipe) == true ? true : changed; } } } GUIHelper.EndVerticalPadded(10); } return changed; } /// <summary> /// Adds the shared colors from a given recipe name into the target recipe /// </summary> /// <param name="sourceRecipeName"></param> protected virtual bool AddSharedColorsFromRecipe(string sourceRecipeName, UMAData.UMARecipe targetRecipe) { bool changed = false; var thisUmaDataRecipe = new UMAData.UMARecipe(); var context = UMAContext.FindInstance(); if (context == null) return false; var thisWardrobeRecipe = context.dynamicCharacterSystem.GetBaseRecipe(sourceRecipeName); if (thisWardrobeRecipe == null) return false; try { thisWardrobeRecipe.Load(thisUmaDataRecipe, context); } catch { return false; } if (thisUmaDataRecipe.sharedColors.Length > 0) { List<OverlayColorData> newSharedColors = new List<OverlayColorData>(); newSharedColors.AddRange(targetRecipe.sharedColors); for (int i = 0; i < thisUmaDataRecipe.sharedColors.Length; i++) { bool existed = false; for (int ii = 0; ii < newSharedColors.Count; ii++) if (newSharedColors[ii].name == thisUmaDataRecipe.sharedColors[i].name) { existed = true; break; } if (!existed) { newSharedColors.Add(thisUmaDataRecipe.sharedColors[i]); changed = true; } } if (changed) targetRecipe.sharedColors = newSharedColors.ToArray(); } return changed; } } /// <summary> /// Replaces the standard 'Slot' editor in a DynamicCharacterAvatar type of recipe with one that shows the assigned wardrobe recipes in its wardrobe set /// </summary> public class WardrobeSetMasterEditor : SlotMasterEditor { private List<WardrobeSettings> _wardrobeSet; //private bool _foldout = true; public WardrobeSetMasterEditor(UMAData.UMARecipe recipe, List<WardrobeSettings> wardrobeSet) : base(recipe) { _wardrobeSet = wardrobeSet; } public override bool OnGUI(string targetName, ref bool _dnaDirty, ref bool _textureDirty, ref bool _meshDirty) { bool changed = false; if (!OpenSlots.ContainsKey("wardrobeSet")) OpenSlots.Add("wardrobeSet", true); if (_sharedColorsEditor.OnGUI(_recipe)) { changed = true; } //if this is a backwards compatible DCS recipe (i.e. has SlotData AND a Wardrobe set) we need to show BOTH things //for this to really work youd need to be able to edit the WardrobeSet and have that modify the slotDataList //Hence the epic UpdateBackwardsCompatibleData method if (_recipe.slotDataList.Length > 0) { EditorGUILayout.HelpBox("This is a 'Backwards Compatible' DynamicCharacterAvatar recipe. The slots and overlays in the 'BackwardsCompatibleData' section will update as you change the items in the WardrobeSet.", MessageType.Info); } if (DrawWardrobeSetUI()) { changed = true; if (_recipe.slotDataList.Length > 0) { UpdateBackwardsCompatibleData(); } } if (_recipe.slotDataList.Length > 0) { if (!OpenSlots.ContainsKey("backwardsCompatibleData")) OpenSlots.Add("backwardsCompatibleData", false); GUILayout.BeginHorizontal(EditorStyles.toolbarButton); GUILayout.Space(10); bool bcdfoldoutOpen = OpenSlots["backwardsCompatibleData"]; bcdfoldoutOpen = EditorGUILayout.Foldout(OpenSlots["backwardsCompatibleData"], "Backwards Compatible Data"); OpenSlots["backwardsCompatibleData"] = bcdfoldoutOpen; GUILayout.EndHorizontal(); if (bcdfoldoutOpen) { EditorGUI.BeginDisabledGroup(true); GUIHelper.BeginVerticalPadded(10, new Color(0.75f, 0.875f, 1f)); for (int i = 0; i < _slotEditors.Count; i++) { var editor = _slotEditors[i]; if (editor == null) { GUILayout.Label("Empty Slot"); continue; } changed |= editor.OnGUI(ref _dnaDirty, ref _textureDirty, ref _meshDirty); if (editor.Delete) { _dnaDirty = true; _textureDirty = true; _meshDirty = true; _slotEditors.RemoveAt(i); _recipe.SetSlot(editor.idx, null); i--; changed = true; } } GUIHelper.EndVerticalPadded(10); EditorGUI.EndDisabledGroup(); } } return changed; } private bool DrawWardrobeSetUI() { bool changed = false; if (_recipe.raceData != null) { if (_recipe.raceData.wardrobeSlots.Count > 0) { GUILayout.BeginHorizontal(EditorStyles.toolbarButton); GUILayout.Space(10); bool wsfoldoutOpen = OpenSlots["wardrobeSet"]; wsfoldoutOpen = EditorGUILayout.Foldout(OpenSlots["wardrobeSet"], "Wardrobe Set"); OpenSlots["wardrobeSet"] = wsfoldoutOpen; GUILayout.EndHorizontal(); if (wsfoldoutOpen) { if (_wardrobeSet == null) return false; //if this is a 'backwards compatible' recipe dont show the 'wardrobeCollections' bit bool showWardrobeCollections = _recipe.slotDataList.Length > 0 ? false : true; var thisWardrobeSetEditor = new WardrobeSetEditor(_recipe.raceData, _wardrobeSet, _recipe, showWardrobeCollections); if (thisWardrobeSetEditor.OnGUI()) { _wardrobeSet = thisWardrobeSetEditor.WardrobeSet; changed = true; } } } } return changed; } private void UpdateBackwardsCompatibleData() { var context = UMAContext.FindInstance(); if (context == null) { var _errorMessage = "Editing a recipe requires a loaded scene with a valid UMAContext."; Debug.LogWarning(_errorMessage); } //reset the recipe to the raceBase recipe var thisBaseRecipe = _recipe.raceData.baseRaceRecipe; thisBaseRecipe.Load(_recipe, context); if (_wardrobeSet.Count > 0) { var thisDCS = context.dynamicCharacterSystem; if (thisDCS == null) { var _errorMessage = "Editing a recipe requires a loaded scene with a valid UMAContext."; Debug.LogWarning(_errorMessage); } List<UMARecipeBase> Recipes = new List<UMARecipeBase>(); List<string> SuppressSlotsStrings = new List<string>(); List<string> HiddenSlots = new List<string>(); var wardrobeRecipesToRender = new Dictionary<string, UMARecipeBase>(); var activeRace = _recipe.raceData.raceName; //Dont add the WardrobeCollection to the recipes to render- they doesn't render directly and will have already set their actual wardrobeRecipe slots SetSlot foreach (WardrobeSettings set in _wardrobeSet) { var thisRecipe = thisDCS.GetBaseRecipe(set.recipe); if (thisRecipe == null) { continue; } if (thisRecipe.GetType().ToString() == "UMAWardrobeCollection") { var TargetType = thisRecipe.GetType(); FieldInfo WardrobeCollectionField = TargetType.GetField("wardrobeCollection", BindingFlags.Public | BindingFlags.Instance); WardrobeCollectionList wardrobeCollection = (WardrobeCollectionList)WardrobeCollectionField.GetValue(thisRecipe); if (wardrobeCollection[activeRace] != null) { foreach (WardrobeSettings ws in wardrobeCollection[activeRace]) { var wsRecipe = thisDCS.GetBaseRecipe(ws.recipe); if (wsRecipe != null) { if (wardrobeRecipesToRender.ContainsKey(ws.slot)) wardrobeRecipesToRender[ws.slot] = wsRecipe; else wardrobeRecipesToRender.Add(ws.slot, wsRecipe); } } } } else { //_recipe.Merge(thisRecipe.GetCachedRecipe(context), true); if (wardrobeRecipesToRender.ContainsKey(set.slot)) wardrobeRecipesToRender[set.slot] = thisRecipe; else wardrobeRecipesToRender.Add(set.slot, thisRecipe); } } if (wardrobeRecipesToRender.Count > 0) { foreach (UMARecipeBase utr in wardrobeRecipesToRender.Values) { var TargetType = utr.GetType(); FieldInfo CompatibleRacesField = TargetType.GetField("compatibleRaces", BindingFlags.Public | BindingFlags.Instance); FieldInfo WardrobeSlotField = TargetType.GetField("wardrobeSlot", BindingFlags.Public | BindingFlags.Instance); FieldInfo SuppressWardrobeSlotField = TargetType.GetField("suppressWardrobeSlots", BindingFlags.Public | BindingFlags.Instance); //field values List<string> compatibleRaces = (List<string>)CompatibleRacesField.GetValue(utr); string wardrobeSlot = (string)WardrobeSlotField.GetValue(utr); List<string> suppressWardrobeSlot = (List<string>)SuppressWardrobeSlotField.GetValue(utr); if (suppressWardrobeSlot != null) { if (activeRace == "" || ((compatibleRaces.Count == 0 || compatibleRaces.Contains(activeRace)) || (_recipe.raceData.IsCrossCompatibleWith(compatibleRaces) && _recipe.raceData.wardrobeSlots.Contains(wardrobeSlot)))) { if (!SuppressSlotsStrings.Contains(wardrobeSlot)) { foreach (string suppressedSlot in suppressWardrobeSlot) { SuppressSlotsStrings.Add(suppressedSlot); } } } } } } foreach (string ws in _recipe.raceData.wardrobeSlots) { if (SuppressSlotsStrings.Contains(ws)) { continue; } if (wardrobeRecipesToRender.ContainsKey(ws)) { UMARecipeBase utr = wardrobeRecipesToRender[ws]; var TargetType = wardrobeRecipesToRender[ws].GetType(); FieldInfo CompatibleRacesField = TargetType.GetField("compatibleRaces", BindingFlags.Public | BindingFlags.Instance); FieldInfo WardrobeSlotField = TargetType.GetField("wardrobeSlot", BindingFlags.Public | BindingFlags.Instance); FieldInfo HidesField = TargetType.GetField("Hides", BindingFlags.Public | BindingFlags.Instance); //field values List<string> compatibleRaces = (List<string>)CompatibleRacesField.GetValue(utr); string wardrobeSlot = (string)WardrobeSlotField.GetValue(utr); List<string> hides = (List<string>)HidesField.GetValue(utr); if (activeRace == "" || ((compatibleRaces.Count == 0 || compatibleRaces.Contains(activeRace)) || (_recipe.raceData.IsCrossCompatibleWith(compatibleRaces) && _recipe.raceData.wardrobeSlots.Contains(wardrobeSlot)))) { Recipes.Add(utr); if (hides.Count > 0) { foreach (string s in hides) { HiddenSlots.Add(s); } } } } } //merge them in foreach (var additionalRecipe in Recipes) { _recipe.Merge(additionalRecipe.GetCachedRecipe(context), true); } if (HiddenSlots.Count > 0) { List<SlotData> NewSlots = new List<SlotData>(); foreach (SlotData sd in _recipe.slotDataList) { if (sd == null) continue; if (!HiddenSlots.Contains(sd.asset.slotName)) { NewSlots.Add(sd); } } _recipe.slotDataList = NewSlots.ToArray(); } ResetSlotEditors(); } } private void ResetSlotEditors() { if (_recipe.slotDataList == null) { _recipe.slotDataList = new SlotData[0]; } for (int i = 0; i < _recipe.slotDataList.Length; i++) { var slot = _recipe.slotDataList[i]; if (slot == null) continue; _slotEditors.Add(new SlotEditor(_recipe, slot, i)); } if (_slotEditors.Count > 1) { // Don't juggle the order - this way, they're in the order they're in the file, or dropped in. List<SlotEditor> sortedSlots = new List<SlotEditor>(_slotEditors); sortedSlots.Sort(SlotEditor.comparer); var overlays1 = sortedSlots[0].GetOverlays(); var overlays2 = sortedSlots[1].GetOverlays(); for (int i = 0; i < sortedSlots.Count - 2; i++) { if (overlays1 == overlays2) sortedSlots[i].sharedOverlays = true; overlays1 = overlays2; overlays2 = sortedSlots[i + 2].GetOverlays(); } } } } } } #endif
#region License // // CompositeInlineList.cs July 2006 // // Copyright (C) 2006, Niall Gallagher <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // #endregion #region Using directives using SimpleFramework.Xml.Strategy; using SimpleFramework.Xml.Stream; using System.Collections.Generic; using System; #endregion namespace SimpleFramework.Xml.Core { /// <summary> /// The <c>CompositeInlineList</c> object is used to convert an /// group of elements in to a collection of element entries. This is /// used when a containing element for a list is not required. It /// extracts the elements by matching elements to name of the type /// that the annotated field or method requires. This enables these /// element entries to exist as siblings to other objects within the /// object. One restriction is that the <c>Root</c> annotation /// for each of the types within the list must be the same. /// </code> /// &lt;entry attribute="one"&gt; /// &lt;text&gt;example text value&lt;/text&gt; /// &lt;/entry&gt; /// &lt;entry attribute="two"&gt; /// &lt;text&gt;some other example&lt;/text&gt; /// &lt;/entry&gt; /// &lt;entry attribute="three"&gt; /// &lt;text&gt;yet another example&lt;/text&gt; /// &lt;/entry&gt; /// </code> /// For the above XML element list the element <c>entry</c> is /// contained within the list. Each entry element is thus deserialized /// as a root element and then inserted into the list. This enables /// lists to be composed from XML documents. For serialization the /// reverse is done, each element taken from the collection is written /// as a root element to the owning element to create the list. /// Entry objects do not need to be of the same type. /// </summary> /// <seealso> /// SimpleFramework.Xml.Core.Traverser /// </seealso> /// <seealso> /// SimpleFramework.Xml.ElementList /// </seealso> class CompositeInlineList : Repeater { /// <summary> /// This factory is used to create a suitable collection list. /// </summary> private readonly CollectionFactory factory; /// <summary> /// This performs the traversal used for object serialization. /// </summary> private readonly Traverser root; /// <summary> /// This represents the name of the entry elements to write. /// </summary> private readonly String name; /// <summary> /// This is the entry type for elements within the list. /// </summary> private readonly Type entry; /// <summary> /// Constructor for the <c>CompositeInlineList</c> object. /// This is given the list type and entry type to be used. The list /// type is the <c>Collection</c> implementation that is used /// to collect the deserialized entry objects from the XML source. /// </summary> /// <param name="context"> /// this is the context object used for serialization /// </param> /// <param name="type"> /// this is the collection type for the list used /// </param> /// <param name="entry"> /// the entry type to be stored within the list /// </param> /// <param name="name"> /// this is the name of the entries used for this list /// </param> public CompositeInlineList(Context context, Type type, Type entry, String name) { this.factory = new CollectionFactory(context, type); this.root = new Traverser(context); this.entry = entry; this.name = name; } /// <summary> /// This <c>read</c> method wll read the XML element list from /// the provided node and deserialize its children as entry types. /// This will each entry type is deserialized as a root type, that /// is, its <c>Root</c> annotation must be present and the /// name of the entry element must match that root element name. /// </summary> /// <param name="node"> /// this is the XML element that is to be deserialized /// </param> /// <returns> /// this returns the item to attach to the object contact /// </returns> public Object Read(InputNode node) { Object value = factory.Instance; Collection list = (Collection) value; if(list != null) { return Read(node, list); } return null; } /// <summary> /// This <c>read</c> method will read the XML element list from /// the provided node and deserialize its children as entry types. /// This will each entry type is deserialized as a root type, that /// is, its <c>Root</c> annotation must be present and the /// name of the entry element must match that root element name. /// </summary> /// <param name="node"> /// this is the XML element that is to be deserialized /// </param> /// <returns> /// this returns the item to attach to the object contact /// </returns> public Object Read(InputNode node, Object value) { Collection list = (Collection) value; if(list != null) { return Read(node, list); } return Read(node); } /// <summary> /// This <c>read</c> method wll read the XML element list from /// the provided node and deserialize its children as entry types. /// This will each entry type is deserialized as a root type, that /// is, its <c>Root</c> annotation must be present and the /// name of the entry element must match that root element name. /// </summary> /// <param name="node"> /// this is the XML element that is to be deserialized /// </param> /// <param name="list"> /// this is the collection that is to be populated /// </param> /// <returns> /// this returns the item to attach to the object contact /// </returns> public Object Read(InputNode node, Collection list) { InputNode from = node.getParent(); String name = node.GetName(); while(node != null) { Class type = entry.Type; Object item = Read(node, type); if(item != null) { list.add(item); } node = from.getNext(name); } return list; } /// <summary> /// This <c>read</c> method will read the XML element from the /// provided node. This checks to ensure that the deserialized type /// is the same as the entry type provided. If the types are not /// the same then an exception is thrown. This is done to ensure /// each node in the collection contain the same root annotation. /// </summary> /// <param name="node"> /// this is the XML element that is to be deserialized /// </param> /// <param name="expect"> /// this is the type expected of the deserialized type /// </param> /// <returns> /// this returns the item to attach to the object contact /// </returns> public Object Read(InputNode node, Class expect) { Object item = root.Read(node, expect); Class result = item.getClass(); Class type = entry.Type; if(!type.isAssignableFrom(result)) { throw new PersistenceException("Entry %s does not match %s", result, entry); } return item; } /// <summary> /// This <c>read</c> method wll read the XML element list from /// the provided node and deserialize its children as entry types. /// This will each entry type is deserialized as a root type, that /// is, its <c>Root</c> annotation must be present and the /// name of the entry element must match that root element name. /// </summary> /// <param name="node"> /// this is the XML element that is to be deserialized /// </param> /// <returns> /// this returns the item to attach to the object contact /// </returns> public bool Validate(InputNode node) { InputNode from = node.getParent(); Class type = entry.Type; String name = node.GetName(); while(node != null) { bool valid = root.Validate(node, type); if(valid == false) { return false; } node = from.getNext(name); } return true; } /// <summary> /// This <c>write</c> method will write the specified object /// to the given XML element as as list entries. Each entry within /// the given collection must be assignable from the annotated /// type specified within the <c>ElementList</c> annotation. /// Each entry is serialized as a root element, that is, its /// <c>Root</c> annotation is used to extract the name. /// </summary> /// <param name="source"> /// this is the source collection to be serialized /// </param> /// <param name="node"> /// this is the XML element container to be populated /// </param> public void Write(OutputNode node, Object source) { Collection list = (Collection) source; OutputNode parent = node.getParent(); if(!node.isCommitted()) { node.remove(); } Write(parent, list); } /// <summary> /// This <c>write</c> method will write the specified object /// to the given XML element as as list entries. Each entry within /// the given collection must be assignable from the annotated /// type specified within the <c>ElementList</c> annotation. /// Each entry is serialized as a root element, that is, its /// <c>Root</c> annotation is used to extract the name. /// </summary> /// <param name="list"> /// this is the source collection to be serialized /// </param> /// <param name="node"> /// this is the XML element container to be populated /// </param> public void Write(OutputNode node, Collection list) { for(Object item : list) { if(item != null) { Class type = entry.Type; Class result = item.getClass(); if(!type.isAssignableFrom(result)) { throw new PersistenceException("Entry %s does not match %s", result, type); } root.Write(node, item, type, name); } } } } }
//--------------------------------------------------------------------------- // // <copyright file="Visual3DCollection.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // // History: // 6/9/2005 : [....] - Created // //--------------------------------------------------------------------------- #pragma warning disable 1634, 1691 // suppressing PreSharp warnings using MS.Utility; using MS.Internal; using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Windows; using MS.Internal.PresentationCore; using SR=MS.Internal.PresentationCore.SR; using SRID=MS.Internal.PresentationCore.SRID; namespace System.Windows.Media.Media3D { /// <summary> /// A collection of Visual3D objects. /// </summary> public sealed class Visual3DCollection : IList, IList<Visual3D> { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ #region Constructors internal Visual3DCollection(IVisual3DContainer owner) { _owner = owner; } #endregion Constructors //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// Adds the value to the collection. /// </summary> public void Add(Visual3D value) { VerifyAPIForAdd(value); int addedPosition = InternalCount; _collection.Add(value); InvalidateEnumerators(); // NOTE: The collection must be updated before notifying the Visual. ConnectChild(addedPosition, value); Debug_ICC(); } private void ConnectChild(int index, Visual3D value) { value.ParentIndex = index; _owner.AddChild(value); } /// <summary> /// Inserts the value into the list at the specified position /// </summary> public void Insert(int index, Visual3D value) { VerifyAPIForAdd(value); InternalInsert(index, value); } /// <summary> /// Removes the value from the collection. /// </summary> public bool Remove(Visual3D value) { VerifyAPIReadWrite(value); if (!_collection.Contains(value)) { return false; } InternalRemoveAt(value.ParentIndex); return true; } /// <summary> /// Removes the value at the specified index. /// </summary> public void RemoveAt(int index) { if (index < 0 || index >= InternalCount) { throw new ArgumentOutOfRangeException("index"); } VerifyAPIReadWrite(_collection[index]); InternalRemoveAt(index); } /// <summary> /// Removes all IElements from the collection. /// </summary> public void Clear() { VerifyAPIReadWrite(); // Rather than clear, we swap out the FrugalStructList because // we need to keep the old values around to notify the parent // they were removed. FrugalStructList<Visual3D> oldCollection = _collection; _collection = new FrugalStructList<Visual3D>(); InvalidateEnumerators(); // NOTE: The collection must be updated before notifying the Visual. for (int i = oldCollection.Count - 1; i >= 0; i--) { _owner.RemoveChild(oldCollection[i]); } Debug_ICC(); } /// <summary> /// Copies the IElements of the collection into "array" starting at "index" /// </summary> public void CopyTo(Visual3D[] array, int index) { VerifyAPIReadOnly(); if (array == null) { throw new ArgumentNullException("array"); } // The extra "index >= array.Length" check in because even if _collection.Count // is 0 the index is not allowed to be equal or greater than the length // (from the MSDN ICollection docs) if (index < 0 || index >= array.Length || (index + _collection.Count) > array.Length) { throw new ArgumentOutOfRangeException("index"); } _collection.CopyTo(array, index); } void ICollection.CopyTo(Array array, int index) { VerifyAPIReadOnly(); if (array == null) { throw new ArgumentNullException("array"); } // The extra "index >= array.Length" check in because even if _collection.Count // is 0 the index is not allowed to be equal or greater than the length // (from the MSDN ICollection docs) if (index < 0 || index >= array.Length || (index + _collection.Count) > array.Length) { throw new ArgumentOutOfRangeException("index"); } if (array.Rank != 1) { throw new ArgumentException(SR.Get(SRID.Collection_BadRank)); } // Elsewhere in the collection we throw an AE when the type is // bad so we do it here as well to be consistent try { int count = _collection.Count; for (int i = 0; i < count; i++) { array.SetValue(_collection[i], index + i); } } catch (InvalidCastException e) { throw new ArgumentException(SR.Get(SRID.Collection_BadDestArray, "Visual3DCollection"), e); } } /// <summary> /// Determines if the list contains "value" /// </summary> public bool Contains(Visual3D value) { VerifyAPIReadOnly(value); return (value != null && (value.InternalVisualParent == _owner)); } /// <summary> /// Returns the index of value in the list /// </summary> public int IndexOf(Visual3D value) { VerifyAPIReadOnly(value); if (value == null || (value.InternalVisualParent != _owner)) { return -1; } #pragma warning disable 56506 // Suppress presharp warning: Parameter 'value' to this public method must be validated: A null-dereference can occur here. return value.ParentIndex; #pragma warning restore 56506 } /// <summary> /// Returns an Enumerator for the collection. /// </summary> public Enumerator GetEnumerator() { VerifyAPIReadOnly(); return new Enumerator(this); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } IEnumerator<Visual3D> IEnumerable<Visual3D>.GetEnumerator() { return GetEnumerator(); } #endregion Public Methods //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ #region Public Properties /// <summary> /// Returns the IElement at the given index in the collection. /// </summary> public Visual3D this[int index] { get { VerifyAPIReadOnly(); return InternalGetItem(index); } set { if (index < 0 || index >= InternalCount) { throw new ArgumentOutOfRangeException("index"); } VerifyAPIForAdd(value); InternalRemoveAt(index); InternalInsert(index, value); } } /// <summary> /// The number of IElements contained in the collection. /// </summary> public int Count { get { VerifyAPIReadOnly(); return InternalCount; } } bool ICollection.IsSynchronized { get { VerifyAPIReadOnly(); // True because we force single thread access via VerifyAccess() return true; } } object ICollection.SyncRoot { get { VerifyAPIReadOnly(); return _owner; } } bool ICollection<Visual3D>.IsReadOnly { get { VerifyAPIReadOnly(); return false; } } #region IList Members /// <summary> /// Adds an element to the Visual3DCollection /// </summary> int IList.Add(object value) { Add(Cast(value)); return InternalCount - 1; } /// <summary> /// Determines whether an element is in the Visual3DCollection. /// </summary> bool IList.Contains(object value) { return Contains(value as Visual3D); } /// <summary> /// Returns the index of the element in the Visual3DCollection /// </summary> int IList.IndexOf(object value) { return IndexOf(value as Visual3D); } /// <summary> /// Inserts an element into the Visual3DCollection /// </summary> void IList.Insert(int index, object value) { Insert(index, Cast(value)); } /// <summary> /// </summary> bool IList.IsFixedSize { get { return false; } } /// <summary> /// </summary> bool IList.IsReadOnly { get { return false; } } /// <summary> /// Removes an element from the Visual3DCollection /// </summary> void IList.Remove(object value) { Remove(value as Visual3D); } /// <summary> /// For more details, see <see cref="System.Windows.Media.VisualCollection" /> /// </summary> object IList.this[int index] { get { return this[index]; } set { this[index] = Cast(value); } } #endregion #endregion Public Properties //------------------------------------------------------ // // Public Events // //------------------------------------------------------ //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods internal Visual3D InternalGetItem(int index) { return _collection[index]; } #endregion Internal Methods //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ #region Internal Properties internal int InternalCount { get { return _collection.Count; } } #endregion Internal Properties //------------------------------------------------------ // // Private Methods // //------------------------------------------------------ #region Private Methods private void VerifyAPIReadOnly() { Debug_ICC(); _owner.VerifyAPIReadOnly(); } private void VerifyAPIReadOnly(Visual3D other) { Debug_ICC(); _owner.VerifyAPIReadOnly(other); } private void VerifyAPIReadWrite() { Debug_ICC(); _owner.VerifyAPIReadWrite(); } private void VerifyAPIReadWrite(Visual3D other) { Debug_ICC(); _owner.VerifyAPIReadWrite(other); } private Visual3D Cast(object value) { if( value == null ) { throw new System.ArgumentNullException("value"); } if (!(value is Visual3D)) { throw new System.ArgumentException(SR.Get(SRID.Collection_BadType, this.GetType().Name, value.GetType().Name, "Visual3D")); } return (Visual3D) value; } private void VerifyAPIForAdd(Visual3D value) { if (value == null) { throw new System.ArgumentException(SR.Get(SRID.Collection_NoNull)); } VerifyAPIReadWrite(value); if (value.InternalVisualParent != null) { throw new System.ArgumentException(SR.Get(SRID.VisualCollection_VisualHasParent)); } } private void InternalInsert(int index, Visual3D value) { _collection.Insert(index, value); // Update ParentIndex value on each Visual3D. Run through them // and increment. Note that this means that Inserting/Removal from a // Visual3DCollection can be O(n^2) if done in the wrong order. for (int i = index + 1, count = InternalCount; i < count; i++) { Debug.Assert(InternalGetItem(i).ParentIndex == i - 1, "ParentIndex has been corrupted."); InternalGetItem(i).ParentIndex = i; } InvalidateEnumerators(); // NOTE: The collection must be updated before notifying the Visual. ConnectChild(index, value); Debug_ICC(); } private void InternalRemoveAt(int index) { Visual3D value = _collection[index]; _collection.RemoveAt(index); // Update ParentIndices after the modified index are now invalid. Run through them // and decrement. Note that this means that Inserting/Removal from a // Visual3DCollection can be O(n^2) if done in the wrong order. for (int i = index; i < InternalCount; i++) { Debug.Assert(InternalGetItem(i).ParentIndex == i + 1, "ParentIndex has been corrupted."); InternalGetItem(i).ParentIndex = i; } InvalidateEnumerators(); // NOTE: The collection must be updated before notifying the Visual. _owner.RemoveChild(value); Debug_ICC(); } // Each member which modifies the collection should call this method to // invalidate any enumerators which might have been handed out. private void InvalidateEnumerators() { _version++; } #endregion Private Methods //------------------------------------------------------ // // DEBUG // //------------------------------------------------------ #region DEBUG [Conditional("DEBUG")] private void Debug_ICC() { Debug.Assert(_owner != null, "How did an Visual3DCollection get constructed without an owner?"); Dictionary<Visual3D, string> duplicates = new Dictionary<Visual3D, string>(); for (int i = 0; i < _collection.Count; i++) { Visual3D visual = _collection[i]; Debug.Assert(!duplicates.ContainsKey(visual), "How did the visual get re-inserted?"); duplicates.Add(visual, String.Empty); Debug.Assert(visual.InternalVisualParent == _owner, "Why isn't our child's parent pointer the same as the collection owner?"); Debug.Assert(visual.ParentIndex == i, String.Format( CultureInfo.InvariantCulture, "Child's ParentIndex does not match the child's actual position in the collection. Expected='{0}' Actual='{1}'", i, visual.ParentIndex)); // If the Visual3D is being added to the collection via a resource reference // its inheritance context will be the owner of the ResourceDictionary in which // it was declared. (For more info, see Windows OS Bugs #1614016) // // Debug.Assert(visual.InheritanceContext == _inheritanceContext, // "How did a Visual3D get inserted without updating it's InheritanceContext?"); } } #endregion DEBUG //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Private Fields private IVisual3DContainer _owner = null; private FrugalStructList<Visual3D> _collection = new FrugalStructList<Visual3D>(); private int _version = 0; #endregion Private Fields //------------------------------------------------------ // // Enumerator // //------------------------------------------------------ #region Enumerator /// <summary> /// VisualCollection Enumerator. /// </summary> public struct Enumerator : IEnumerator<Visual3D>, IEnumerator { #region Constructors internal Enumerator(Visual3DCollection list) { Debug.Assert(list != null, "list may not be null."); _list = list; _index = -1; _version = _list._version; } #endregion Constructors #region Public Methods /// <summary> /// Advances the enumerator to the next IElement of the collection. /// </summary> public bool MoveNext() { if (_list._version != _version) { throw new InvalidOperationException(SR.Get(SRID.Enumerator_CollectionChanged)); } int count = _list.Count; if (_index < count) { _index++; } return _index < count; } /// <summary> /// Resets the enumerator to its initial position. /// </summary> public void Reset() { if (_list._version != _version) { throw new InvalidOperationException(SR.Get(SRID.Enumerator_CollectionChanged)); } _index = -1; } void IDisposable.Dispose() { // Do nothing - Required by the IEnumeable contract. } #endregion Public Methods #region Public Properties object IEnumerator.Current { get { return this.Current; } } /// <summary> /// Returns the current IElement. /// </summary> public Visual3D Current { #pragma warning disable 1634, 1691 #pragma warning disable 6503 get { if ((_index < 0) || (_index >= _list.Count)) { throw new InvalidOperationException(SR.Get(SRID.Enumerator_VerifyContext)); } return _list[_index]; } #pragma warning restore 6503 #pragma warning restore 1634, 1691 } #endregion Public Methods #region Private Fields private Visual3DCollection _list; private int _index; private int _version; #endregion Private Fields } #endregion Enumerator } }
using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Globalization; using System.Linq; using EPiServer; using EPiServer.Configuration; using EPiServer.Core; using EPiServer.DataAbstraction; using EPiServer.Filters; using EPiServer.Logging; using EPiServer.ServiceLocation; using EPiServer.Web; namespace EPiServerAlloySite.Business.ContentProviders { /// <summary> /// Used to clone a part of the page tree /// </summary> /// <remarks>The current implementation only supports cloning of <see cref="PageData"/> content</remarks> /// <code> /// // Example of programmatically registering a cloned content provider /// /// var rootPageOfContentToClone = new PageReference(10); /// /// var pageWhereClonedContentShouldAppear = new PageReference(20); /// /// var provider = new ClonedContentProvider(rootPageOfContentToClone, pageWhereClonedContentShouldAppear); /// /// var providerManager = ServiceLocator.Current.GetInstance<IContentProviderManager>(); /// /// providerManager.ProviderMap.AddProvider(provider); /// </code> public class ClonedContentProvider : ContentProvider, IPageCriteriaQueryService { private static readonly ILogger Logger = LogManager.GetLogger(); private readonly NameValueCollection _parameters = new NameValueCollection(1); public ClonedContentProvider(PageReference cloneRoot, PageReference entryRoot) : this(cloneRoot, entryRoot, null) { } public ClonedContentProvider(PageReference cloneRoot, PageReference entryRoot, CategoryList categoryFilter) { if (cloneRoot.CompareToIgnoreWorkID(entryRoot)) { throw new NotSupportedException("Entry root and clone root cannot be set to the same content reference"); } if (ServiceLocator.Current.GetInstance<IContentLoader>().GetChildren<IContent>(entryRoot).Any()) { throw new NotSupportedException("Unable to create ClonedContentProvider, the EntryRoot property must point to leaf content (without children)"); } CloneRoot = cloneRoot; EntryRoot = entryRoot; Category = categoryFilter; // Set the entry point parameter Parameters.Add(ContentProviderElement.EntryPointString, EntryRoot.ID.ToString(CultureInfo.InvariantCulture)); } /// <summary> /// Clones a page to make it appear to come from where the content provider is attached /// </summary> private PageData ClonePage(PageData originalPage) { if (originalPage == null) { throw new ArgumentNullException("originalPage", "No page to clone specified"); } Logger.Debug("Cloning page {0}...", originalPage.PageLink); var clone = originalPage.CreateWritableClone(); // If original page was under the clone root, we make it appear to be under the entry root instead if (originalPage.ParentLink.CompareToIgnoreWorkID(CloneRoot)) { clone.ParentLink = EntryRoot; } // All pages but the entry root should appear to come from this content provider if (!clone.PageLink.CompareToIgnoreWorkID(EntryRoot)) { clone.ContentLink.ProviderName = ProviderKey; } // Unless the parent is the entry root, it should appear to come from this content provider if (!clone.ParentLink.CompareToIgnoreWorkID(EntryRoot)) { var parentLinkClone = clone.ParentLink.CreateWritableClone(); parentLinkClone.ProviderName = ProviderKey; clone.ParentLink = parentLinkClone; } // This is integral to map the cloned page to this content provider clone.LinkURL = ConstructContentUri(originalPage.PageTypeID, clone.ContentLink, clone.ContentGuid).ToString(); return clone; } /// <summary> /// Filters out content references to content that does not match current category filters, if any /// </summary> /// <param name="contentReferences"></param> /// <returns></returns> private IList<T> FilterByCategory<T>(IEnumerable<T> contentReferences) { if (Category == null || !Category.Any()) { return contentReferences.ToList(); } // Filter by category if a category filter has been set var filteredChildren = new List<T>(); foreach (var contentReference in contentReferences) { ICategorizable content = null; if (contentReference is ContentReference) { content = (contentReference as ContentReference).Get<IContent>() as ICategorizable; } else if (typeof(T) == typeof(GetChildrenReferenceResult)) { content = (contentReference as GetChildrenReferenceResult).ContentLink.Get<IContent>() as ICategorizable; } if (content != null) { var atLeastOneMatchingCategory = content.Category.Any(c => Category.Contains(c)); if (atLeastOneMatchingCategory) { filteredChildren.Add(contentReference); } } else // Non-categorizable content will also be included { filteredChildren.Add(contentReference); } } return filteredChildren; } protected override IContent LoadContent(ContentReference contentLink, ILanguageSelector languageSelector) { if (ContentReference.IsNullOrEmpty(contentLink) || contentLink.ID == 0) { throw new ArgumentNullException("contentLink"); } if (contentLink.WorkID > 0) { return ContentStore.LoadVersion(contentLink, -1); } var languageBranchRepository = ServiceLocator.Current.GetInstance<ILanguageBranchRepository>(); LanguageBranch langBr = null; if (languageSelector.Language != null) { langBr = languageBranchRepository.Load(languageSelector.Language); } if (contentLink.GetPublishedOrLatest) { return ContentStore.LoadVersion(contentLink, langBr != null ? langBr.ID : -1); } // Get published version of Content var originalContent = ContentStore.Load(contentLink, langBr != null ? langBr.ID : -1); var page = originalContent as PageData; if (page == null) { throw new NotSupportedException("Only cloning of pages is supported"); } return ClonePage(page); } protected override ContentResolveResult ResolveContent(ContentReference contentLink) { var contentData = ContentCoreDataLoader.Service.Load(contentLink.ID); // All pages but the entry root should appear to come from this content provider if (!contentLink.CompareToIgnoreWorkID(EntryRoot)) { contentData.ContentReference.ProviderName = ProviderKey; } var result = CreateContentResolveResult(contentData); if (!result.ContentLink.CompareToIgnoreWorkID(EntryRoot)) { result.ContentLink.ProviderName = ProviderKey; } return result; } protected override Uri ConstructContentUri(int contentTypeId, ContentReference contentLink, Guid contentGuid) { if (!contentLink.CompareToIgnoreWorkID(EntryRoot)) { contentLink.ProviderName = ProviderKey; } return base.ConstructContentUri(contentTypeId, contentLink, contentGuid); } protected override IList<GetChildrenReferenceResult> LoadChildrenReferencesAndTypes(ContentReference contentLink, string languageID, out bool languageSpecific) { // If retrieving children for the entry point, we retrieve pages from the clone root contentLink = contentLink.CompareToIgnoreWorkID(EntryRoot) ? CloneRoot : contentLink; FilterSortOrder sortOrder; var children = ContentStore.LoadChildrenReferencesAndTypes(contentLink.ID, languageID, out sortOrder); languageSpecific = sortOrder == FilterSortOrder.Alphabetical; foreach (var contentReference in children.Where(contentReference => !contentReference.ContentLink.CompareToIgnoreWorkID(EntryRoot))) { contentReference.ContentLink.ProviderName = ProviderKey; } return FilterByCategory <GetChildrenReferenceResult>(children); } protected override IEnumerable<IContent> LoadContents(IList<ContentReference> contentReferences, ILanguageSelector selector) { return contentReferences .Select(contentReference => ClonePage(ContentLoader.Get<PageData>(contentReference.ToReferenceWithoutVersion()))) .Cast<IContent>() .ToList(); } protected override void SetCacheSettings(IContent content, CacheSettings cacheSettings) { // Make the cache of this content provider depend on the original content cacheSettings.CacheKeys.Add(DataFactoryCache.PageCommonCacheKey(new ContentReference(content.ContentLink.ID))); } protected override void SetCacheSettings(ContentReference contentReference, IEnumerable<GetChildrenReferenceResult> children, CacheSettings cacheSettings) { // Make the cache of this content provider depend on the original content cacheSettings.CacheKeys.Add(DataFactoryCache.PageCommonCacheKey(new ContentReference(contentReference.ID))); foreach (var child in children) { cacheSettings.CacheKeys.Add(DataFactoryCache.PageCommonCacheKey(new ContentReference(child.ContentLink.ID))); } } public override IList<ContentReference> GetDescendentReferences(ContentReference contentLink) { // If retrieving children for the entry point, we retrieve pages from the clone root contentLink = contentLink.CompareToIgnoreWorkID(EntryRoot) ? CloneRoot : contentLink; var descendents = ContentStore.ListAll(contentLink); foreach (var contentReference in descendents.Where(contentReference => !contentReference.CompareToIgnoreWorkID(EntryRoot))) { contentReference.ProviderName = ProviderKey; } return FilterByCategory<ContentReference>(descendents); } public PageDataCollection FindAllPagesWithCriteria(PageReference pageLink, PropertyCriteriaCollection criterias, string languageBranch, ILanguageSelector selector) { // Any search beneath the entry root should in fact be performed under the clone root as that's where the original content resides if (pageLink.CompareToIgnoreWorkID(EntryRoot)) { pageLink = CloneRoot; } else if (!string.IsNullOrWhiteSpace(pageLink.ProviderName)) // Any search beneath a cloned page should in fact be performed under the original page, so we use a page link without any provider information { pageLink = new PageReference(pageLink.ID); } var pages = PageQueryService.FindAllPagesWithCriteria(pageLink, criterias, languageBranch, selector); // Return cloned search result set return new PageDataCollection(pages.Select(ClonePage)); } public PageDataCollection FindPagesWithCriteria(PageReference pageLink, PropertyCriteriaCollection criterias, string languageBranch, ILanguageSelector selector) { // Any search beneath the entry root should in fact be performed under the clone root as that's where the original content resides if (pageLink.CompareToIgnoreWorkID(EntryRoot)) { pageLink = CloneRoot; } else if (!string.IsNullOrWhiteSpace(pageLink.ProviderName)) // Any search beneath a cloned page should in fact be performed under the original page, so we use a page link without any provider information { pageLink = new PageReference(pageLink.ID); } var pages = PageQueryService.FindPagesWithCriteria(pageLink, criterias, languageBranch, selector); // Return cloned search result set return new PageDataCollection(pages.Select(ClonePage)); } /// <summary> /// Gets the content store used to get original content /// </summary> protected virtual ContentStore ContentStore { get { return ServiceLocator.Current.GetInstance<ContentStore>(); } } /// <summary> /// Gets the content loader used to get content /// </summary> protected virtual IContentLoader ContentLoader { get { return ServiceLocator.Current.GetInstance<IContentLoader>(); } } /// <summary> /// Gets the service used to query for pages using criterias /// </summary> protected virtual IPageCriteriaQueryService PageQueryService { get { return ServiceLocator.Current.GetInstance<IPageCriteriaQueryService>(); } } /// <summary> /// Content that should be cloned at the entry point /// </summary> public PageReference CloneRoot { get; protected set; } /// <summary> /// Gets the page where the cloned content will appear /// </summary> public PageReference EntryRoot { get; protected set; } /// <summary> /// Gets the category filters used for this content provider /// </summary> /// <remarks>If set, pages not matching at least one of these categories will be excluded from this content provider</remarks> public CategoryList Category { get; protected set; } /// <summary> /// Gets a unique key for this content provider instance /// </summary> public override string ProviderKey { get { return string.Format("ClonedContent-{0}-{1}", CloneRoot.ID, EntryRoot.ID); } } /// <summary> /// Gets capabilities indicating no content editing can be performed through this provider /// </summary> public override ContentProviderCapabilities ProviderCapabilities { get { return ContentProviderCapabilities.Search; } } /// <summary> /// Gets configuration parameters for this content provider instance /// </summary> public override NameValueCollection Parameters { get { return _parameters; } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Suppression; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.CodeFixes.Suppression; using Microsoft.CodeAnalysis.CSharp.Diagnostics.SimplifyTypeNames; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Diagnostics.CSharp; using Microsoft.CodeAnalysis.Editor.UnitTests.Diagnostics; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.CodeAnalysis.ErrorLogger; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.Text; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Diagnostics.Suppression { public abstract partial class CSharpSuppressionTests : AbstractSuppressionDiagnosticTest { protected override ParseOptions GetScriptOptions() { return Options.Script; } protected override Task<TestWorkspace> CreateWorkspaceFromFileAsync(string definition, ParseOptions parseOptions, CompilationOptions compilationOptions) { return CSharpWorkspaceFactory.CreateWorkspaceFromFileAsync(definition, (CSharpParseOptions)parseOptions, (CSharpCompilationOptions)compilationOptions); } protected override string GetLanguage() { return LanguageNames.CSharp; } #region "Pragma disable tests" public abstract partial class CSharpPragmaWarningDisableSuppressionTests : CSharpSuppressionTests { protected sealed override int CodeActionIndex { get { return 0; } } public class CompilerDiagnosticSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return Tuple.Create<DiagnosticAnalyzer, ISuppressionFixProvider>(null, new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirective() { await TestAsync( @" class Class { void Method() { [|int x = 0;|] } }", $@" class Class {{ void Method() {{ #pragma warning disable CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} int x = 0; #pragma warning restore CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} }} }}"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestMultilineStatementPragmaWarningDirective() { await TestAsync( @" class Class { void Method() { [|int x = 0 + 1;|] } }", $@" class Class {{ void Method() {{ #pragma warning disable CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} int x = 0 #pragma warning restore CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} + 1; }} }}"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveWithExistingTrivia() { await TestAsync( @" class Class { void Method() { // Start comment previous line /* Start comment same line */ [|int x = 0;|] // End comment same line /* End comment next line */ } }", $@" class Class {{ void Method() {{ // Start comment previous line #pragma warning disable CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} /* Start comment same line */ int x = 0; // End comment same line #pragma warning restore CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} /* End comment next line */ }} }}"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestMultipleInstancesOfPragmaWarningDirective() { await TestAsync( @" class Class { void Method() { [|int x = 0, y = 0;|] } }", $@" class Class {{ void Method() {{ #pragma warning disable CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} int x = 0, y = 0; #pragma warning restore CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} }} }}"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] [WorkItem(3311, "https://github.com/dotnet/roslyn/issues/3311")] public async Task TestNoDuplicateSuppressionCodeFixes() { var source = @" class Class { void Method() { [|int x = 0, y = 0; string s;|] } }"; using (var workspace = await CreateWorkspaceFromFileAsync(source, parseOptions: null, compilationOptions: null)) { var diagnosticService = new TestDiagnosticAnalyzerService(LanguageNames.CSharp, new CSharpCompilerDiagnosticAnalyzer()); var incrementalAnalyzer = diagnosticService.CreateIncrementalAnalyzer(workspace); var suppressionProvider = CreateDiagnosticProviderAndFixer(workspace).Item2; var suppressionProviderFactory = new Lazy<ISuppressionFixProvider, CodeChangeProviderMetadata>(() => suppressionProvider, new CodeChangeProviderMetadata("SuppressionProvider", languages: new[] { LanguageNames.CSharp })); var fixService = new CodeFixService(diagnosticService, SpecializedCollections.EmptyEnumerable<Lazy<IErrorLoggerService>>(), SpecializedCollections.EmptyEnumerable<Lazy<CodeFixProvider, CodeChangeProviderMetadata>>(), SpecializedCollections.SingletonEnumerable(suppressionProviderFactory)); TextSpan span; var document = GetDocumentAndSelectSpan(workspace, out span); var diagnostics = await diagnosticService.GetDiagnosticsForSpanAsync(document, span); Assert.Equal(2, diagnostics.Where(d => d.Id == "CS0219").Count()); var allFixes = (await fixService.GetFixesAsync(document, span, includeSuppressionFixes: true, cancellationToken: CancellationToken.None)) .SelectMany(fixCollection => fixCollection.Fixes); var cs0219Fixes = allFixes.Where(fix => fix.PrimaryDiagnostic.Id == "CS0219"); // Ensure that both the fixes have identical equivalence key, and hence get de-duplicated in LB menu. Assert.Equal(2, cs0219Fixes.Count()); var cs0219EquivalenceKey = cs0219Fixes.First().Action.EquivalenceKey; Assert.NotNull(cs0219EquivalenceKey); Assert.Equal(cs0219EquivalenceKey, cs0219Fixes.Last().Action.EquivalenceKey); // Ensure that there *is* a fix for the other warning and that it has a *different* // equivalence key so that it *doesn't* get de-duplicated Assert.Equal(1, diagnostics.Where(d => d.Id == "CS0168").Count()); var cs0168Fixes = allFixes.Where(fix => fix.PrimaryDiagnostic.Id == "CS0168"); var cs0168EquivalenceKey = cs0168Fixes.Single().Action.EquivalenceKey; Assert.NotNull(cs0168EquivalenceKey); Assert.NotEqual(cs0219EquivalenceKey, cs0168EquivalenceKey); } } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestErrorAndWarningScenario() { await TestAsync( @" class Class { void Method() { return 0; [|int x = ""0"";|] } }", $@" class Class {{ void Method() {{ return 0; #pragma warning disable CS0162 // {CSharpResources.WRN_UnreachableCode_Title} int x = ""0""; #pragma warning restore CS0162 // {CSharpResources.WRN_UnreachableCode_Title} }} }}"); } [WorkItem(956453)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestWholeFilePragmaWarningDirective() { await TestAsync( @"class Class { void Method() { [|int x = 0;|] } }", $@"#pragma warning disable CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title} class Class {{ void Method() {{ int x = 0; }} }} #pragma warning restore CS0219 // {CSharpResources.WRN_UnreferencedVarAssg_Title}"); } [WorkItem(970129)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionAroundSingleToken() { await TestAsync( @" using System; [Obsolete] class Session { } class Program { static void Main() { [|Session|] } }", $@" using System; [Obsolete] class Session {{ }} class Program {{ static void Main() {{ #pragma warning disable CS0612 // {CSharpResources.WRN_DeprecatedSymbol_Title} Session #pragma warning restore CS0612 // {CSharpResources.WRN_DeprecatedSymbol_Title} }} }}"); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia1() { await TestAsync( @" class Class { void Method() { // Comment // Comment [|#pragma abcde|] } // Comment }", $@" class Class {{ void Method() {{ // Comment // Comment #pragma warning disable CS1633 // {CSharpResources.WRN_IllegalPragma_Title} #pragma abcde }} // Comment #pragma warning restore CS1633 // {CSharpResources.WRN_IllegalPragma_Title} }}"); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia2() { await TestAsync( @"[|#pragma abcde|]", $@"#pragma warning disable CS1633 // {CSharpResources.WRN_IllegalPragma_Title} #pragma abcde #pragma warning restore CS1633 // {CSharpResources.WRN_IllegalPragma_Title}"); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia3() { await TestAsync( @"[|#pragma abcde|] ", $@"#pragma warning disable CS1633 // {CSharpResources.WRN_IllegalPragma_Title} #pragma abcde #pragma warning restore CS1633 // {CSharpResources.WRN_IllegalPragma_Title}"); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia4() { await TestAsync( @" [|#pragma abc|] class C { } ", $@" #pragma warning disable CS1633 // {CSharpResources.WRN_IllegalPragma_Title} #pragma abc class C {{ }} #pragma warning restore CS1633 // {CSharpResources.WRN_IllegalPragma_Title} "); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia5() { await TestAsync( @"class C1 { } [|#pragma abc|] class C2 { } class C3 { }", $@"class C1 {{ }} #pragma warning disable CS1633 // {CSharpResources.WRN_IllegalPragma_Title} #pragma abc class C2 {{ }} #pragma warning restore CS1633 // {CSharpResources.WRN_IllegalPragma_Title} class C3 {{ }}"); } [WorkItem(1066576)] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundTrivia6() { await TestAsync( @"class C1 { } class C2 { } /// <summary><see [|cref=""abc""|]/></summary> class C3 { } // comment // comment // comment", @"class C1 { } class C2 { } #pragma warning disable CS1574 /// <summary><see cref=""abc""/></summary> class C3 { } // comment #pragma warning enable CS1574 // comment // comment", CSharpParseOptions.Default.WithDocumentationMode(DocumentationMode.Diagnose)); } } public class UserHiddenDiagnosticSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new CSharpSimplifyTypeNamesDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestHiddenDiagnosticCannotBeSuppressed() { await TestMissingAsync( @" using System; class Class { int Method() { [|System.Int32 x = 0;|] return x; } }"); } } public partial class UserInfoDiagnosticSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { public static readonly DiagnosticDescriptor Decsciptor = new DiagnosticDescriptor("InfoDiagnostic", "InfoDiagnostic Title", "InfoDiagnostic", "InfoDiagnostic", DiagnosticSeverity.Info, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Decsciptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { var classDecl = (ClassDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Decsciptor, classDecl.Identifier.GetLocation())); } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestInfoDiagnosticSuppressed() { await TestAsync( @" using System; [|class Class|] { int Method() { int x = 0; } }", @" using System; #pragma warning disable InfoDiagnostic // InfoDiagnostic Title class Class #pragma warning restore InfoDiagnostic // InfoDiagnostic Title { int Method() { int x = 0; } }"); } } public class UserErrorDiagnosticSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { private DiagnosticDescriptor _descriptor = new DiagnosticDescriptor("ErrorDiagnostic", "ErrorDiagnostic", "ErrorDiagnostic", "ErrorDiagnostic", DiagnosticSeverity.Error, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(_descriptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { var classDecl = (ClassDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(_descriptor, classDecl.Identifier.GetLocation())); } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestErrorDiagnosticCanBeSuppressed() { await TestAsync( @" using System; [|class Class|] { int Method() { int x = 0; } }", @" using System; #pragma warning disable ErrorDiagnostic // ErrorDiagnostic class Class #pragma warning restore ErrorDiagnostic // ErrorDiagnostic { int Method() { int x = 0; } }"); } } public class DiagnosticWithBadIdSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { // Analyzer driver generates a no-location analyzer exception diagnostic, which we don't intend to test here. protected override bool IncludeNoLocationDiagnostics => false; private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { private DiagnosticDescriptor _descriptor = new DiagnosticDescriptor("@~DiagnosticWithBadId", "DiagnosticWithBadId", "DiagnosticWithBadId", "DiagnosticWithBadId", DiagnosticSeverity.Info, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(_descriptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { var classDecl = (ClassDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(_descriptor, classDecl.Identifier.GetLocation())); } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestDiagnosticWithBadIdSuppressed() { // Diagnostics with bad/invalid ID are not reported. await TestMissingAsync( @" using System; [|class Class|] { int Method() { int x = 0; } }"); } } } public partial class MultilineDiagnosticSuppressionTests : CSharpPragmaWarningDisableSuppressionTests { private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { public static readonly DiagnosticDescriptor Decsciptor = new DiagnosticDescriptor("InfoDiagnostic", "InfoDiagnostic Title", "InfoDiagnostic", "InfoDiagnostic", DiagnosticSeverity.Info, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Decsciptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { var classDecl = (ClassDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Decsciptor, classDecl.GetLocation())); } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [WorkItem(2764, "https://github.com/dotnet/roslyn/issues/2764")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestPragmaWarningDirectiveAroundMultilineDiagnostic() { await TestAsync( @" [|class Class { }|] ", $@" #pragma warning disable {UserDiagnosticAnalyzer.Decsciptor.Id} // {UserDiagnosticAnalyzer.Decsciptor.Title} class Class {{ }} #pragma warning restore {UserDiagnosticAnalyzer.Decsciptor.Id} // {UserDiagnosticAnalyzer.Decsciptor.Title} "); } } #endregion #region "SuppressMessageAttribute tests" public abstract partial class CSharpGlobalSuppressMessageSuppressionTests : CSharpSuppressionTests { protected sealed override int CodeActionIndex { get { return 1; } } public class CompilerDiagnosticSuppressionTests : CSharpGlobalSuppressMessageSuppressionTests { internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return Tuple.Create<DiagnosticAnalyzer, ISuppressionFixProvider>(null, new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestCompilerDiagnosticsCannotBeSuppressed() { // Another test verifies we have a pragma warning action for this source, this verifies there are no other suppression actions. await TestActionCountAsync( @" class Class { void Method() { [|int x = 0;|] } }", 1); } } public class UserHiddenDiagnosticSuppressionTests : CSharpGlobalSuppressMessageSuppressionTests { internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new CSharpSimplifyTypeNamesDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestHiddenDiagnosticsCannotBeSuppressed() { await TestMissingAsync( @" using System; class Class { void Method() { [|System.Int32 x = 0;|] } }"); } } public partial class UserInfoDiagnosticSuppressionTests : CSharpGlobalSuppressMessageSuppressionTests { private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { public static readonly DiagnosticDescriptor Descriptor = new DiagnosticDescriptor("InfoDiagnostic", "InfoDiagnostic", "InfoDiagnostic", "InfoDiagnostic", DiagnosticSeverity.Info, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Descriptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration, SyntaxKind.EnumDeclaration, SyntaxKind.NamespaceDeclaration, SyntaxKind.MethodDeclaration, SyntaxKind.PropertyDeclaration, SyntaxKind.FieldDeclaration, SyntaxKind.EventDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { switch (context.Node.Kind()) { case SyntaxKind.ClassDeclaration: var classDecl = (ClassDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, classDecl.Identifier.GetLocation())); break; case SyntaxKind.NamespaceDeclaration: var ns = (NamespaceDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, ns.Name.GetLocation())); break; case SyntaxKind.MethodDeclaration: var method = (MethodDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, method.Identifier.GetLocation())); break; case SyntaxKind.PropertyDeclaration: var property = (PropertyDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, property.Identifier.GetLocation())); break; case SyntaxKind.FieldDeclaration: var field = (FieldDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, field.Declaration.Variables.First().Identifier.GetLocation())); break; case SyntaxKind.EventDeclaration: var e = (EventDeclarationSyntax)context.Node; context.ReportDiagnostic(Diagnostic.Create(Descriptor, e.Identifier.GetLocation())); break; case SyntaxKind.EnumDeclaration: // Report diagnostic on each descendant comment trivia foreach (var trivia in context.Node.DescendantTrivia().Where(t => t.Kind() == SyntaxKind.SingleLineCommentTrivia || t.Kind() == SyntaxKind.MultiLineCommentTrivia)) { context.ReportDiagnostic(Diagnostic.Create(Descriptor, trivia.GetLocation())); } break; } } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnSimpleType() { await TestAsync( @" using System; [|class Class|] { int Method() { int x = 0; } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:Class"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""~T:Class"")] [|class Class|] { int Method() { int x = 0; } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnNamespace() { await TestAsync( @" using System; [|namespace N|] { class Class { int Method() { int x = 0; } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""namespace"", Target = ""~N:N"")] ", index: 1); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""namespace"", Target = ""~N:N"")] [|namespace N|] { class Class { int Method() { int x = 0; } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnTypeInsideNamespace() { await TestAsync( @" using System; namespace N1 { namespace N2 { [|class Class|] { int Method() { int x = 0; } } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:N1.N2.Class"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""~T:N1.N2.Class"")] namespace N1 { namespace N2 { [|class Class|] { int Method() { int x = 0; } } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnNestedType() { await TestAsync( @" using System; namespace N { class Generic<T> { [|class Class|] { int Method() { int x = 0; } } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:N.Generic`1.Class"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""~T:N.Generic`1.Class"")] namespace N { class Generic<T> { [|class Class|] { int Method() { int x = 0; } } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnMethod() { await TestAsync( @" using System; namespace N { class Generic<T> { class Class { [|int Method() { int x = 0; }|] } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method~System.Int32"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method~System.Int32"")] namespace N { class Generic<T> { class Class { [|int Method()|] { int x = 0; } } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnOverloadedMethod() { await TestAsync( @" using System; namespace N { class Generic<T> { class Class { [|int Method(int y, ref char z) { int x = 0; }|] int Method() { int x = 0; } } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method(System.Int32,System.Char@)~System.Int32"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method(System.Int32,System.Char@)~System.Int32"")] namespace N { class Generic<T> { class Class { [|int Method(int y, ref char z)|] { int x = 0; } int Method() { int x = 0; } } } }"); await TestAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method(System.Int32,System.Char@)~System.Int32"")] namespace N { class Generic<T> { class Class { [|int Method(int y, ref char z) { int x = 0; } int Method() { int x = 0; }|] } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method~System.Int32"")] "); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnGenericMethod() { await TestAsync( @" using System; namespace N { class Generic<T> { class Class { [|int Method<U>(U u) { int x = 0; }|] } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method``1(``0)~System.Int32"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~M:N.Generic`1.Class.Method``1(``0)~System.Int32"")] namespace N { class Generic<T> { class Class { [|int Method<U>(U u)|] { int x = 0; } } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnProperty() { await TestAsync( @" using System; namespace N { class Generic { class Class { [|int Property|] { get { int x = 0; } } } } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~P:N.Generic.Class.Property"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~P:N.Generic.Class.Property"")] namespace N { class Generic { class Class { [|int Property|] { get { int x = 0; } } } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnField() { await TestAsync( @" using System; class Class { [|int field = 0;|] }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~F:Class.field"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~F:Class.field"")] class Class { [|int field = 0;|] }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] [WorkItem(6379, "https://github.com/dotnet/roslyn/issues/6379")] public async Task TestSuppressionOnTriviaBetweenFields() { await TestAsync( @" using System; // suppressions on field are not relevant. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~F:E.Field1"")] [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~F:E.Field2"")] enum E { [| Field1, // trailing trivia for comma token which doesn't belong to span of any of the fields Field2 |] }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:E"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:E"")] enum E { [| Field1, // trailing trivia for comma token which doesn't belong to span of any of the fields Field2 |] }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnField2() { await TestAsync( @" using System; class Class { int [|field = 0|], field2 = 1; }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~F:Class.field"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~F:Class.field"")] class Class { int [|field|] = 0, field2 = 1; }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionOnEvent() { await TestAsync( @" using System; public class SampleEventArgs { public SampleEventArgs(string s) { Text = s; } public String Text {get; private set;} // readonly } class Class { // Declare the delegate (if using non-generic pattern). public delegate void SampleEventHandler(object sender, SampleEventArgs e); // Declare the event. [|public event SampleEventHandler SampleEvent { add { } remove { } }|] }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""member"", Target = ""~E:Class.SampleEvent"")] "); // Also verify that the added attribute does indeed suppress the diagnostic. await TestMissingAsync( @" using System; [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""member"", Target = ""~E:Class.SampleEvent"")] public class SampleEventArgs { public SampleEventArgs(string s) { Text = s; } public String Text {get; private set;} // readonly } class Class { // Declare the delegate (if using non-generic pattern). public delegate void SampleEventHandler(object sender, SampleEventArgs e); // Declare the event. [|public event SampleEventHandler SampleEvent|] { add { } remove { } } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionWithExistingGlobalSuppressionsDocument() { var initialMarkup = @"<Workspace> <Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj1""> <Document FilePath=""CurrentDocument.cs""><![CDATA[ using System; class Class { } [|class Class2|] { } ]]> </Document> <Document FilePath=""GlobalSuppressions.cs""><![CDATA[ // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""Class"")] ]]> </Document> </Project> </Workspace>"; var expectedText = $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""Class"")] [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:Class2"")] "; await TestAsync(initialMarkup, expectedText); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionWithExistingGlobalSuppressionsDocument2() { // Own custom file named GlobalSuppressions.cs var initialMarkup = @"<Workspace> <Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj1""> <Document FilePath=""CurrentDocument.cs""><![CDATA[ using System; class Class { } [|class Class2|] { } ]]> </Document> <Document FilePath=""GlobalSuppressions.cs""><![CDATA[ // My own file named GlobalSuppressions.cs. using System; class Class { } ]]> </Document> </Project> </Workspace>"; var expectedText = $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:Class2"")] "; await TestAsync(initialMarkup, expectedText); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] public async Task TestSuppressionWithExistingGlobalSuppressionsDocument3() { // Own custom file named GlobalSuppressions.cs + existing GlobalSuppressions2.cs with global suppressions var initialMarkup = @"<Workspace> <Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj1""> <Document FilePath=""CurrentDocument.cs""><![CDATA[ using System; class Class { } [|class Class2|] { } ]]> </Document> <Document FilePath=""GlobalSuppressions.cs""><![CDATA[ // My own file named GlobalSuppressions.cs. using System; class Class { } ]]> </Document> <Document FilePath=""GlobalSuppressions2.cs""><![CDATA[ // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""Class"")] ]]> </Document> </Project> </Workspace>"; var expectedText = $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""<Pending>"", Scope = ""type"", Target = ""Class"")] [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""InfoDiagnostic"", ""InfoDiagnostic:InfoDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"", Scope = ""type"", Target = ""~T:Class2"")] "; await TestAsync(initialMarkup, expectedText); } } } #endregion #region NoLocation Diagnostics tests public partial class CSharpDiagnosticWithoutLocationSuppressionTests : CSharpSuppressionTests { private class UserDiagnosticAnalyzer : DiagnosticAnalyzer { public static readonly DiagnosticDescriptor Descriptor = new DiagnosticDescriptor("NoLocationDiagnostic", "NoLocationDiagnostic", "NoLocationDiagnostic", "NoLocationDiagnostic", DiagnosticSeverity.Info, isEnabledByDefault: true); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Descriptor); } } public override void Initialize(AnalysisContext context) { context.RegisterSyntaxNodeAction(AnalyzeNode, SyntaxKind.ClassDeclaration); } public void AnalyzeNode(SyntaxNodeAnalysisContext context) { context.ReportDiagnostic(Diagnostic.Create(Descriptor, Location.None)); } } internal override Tuple<DiagnosticAnalyzer, ISuppressionFixProvider> CreateDiagnosticProviderAndFixer(Workspace workspace) { return new Tuple<DiagnosticAnalyzer, ISuppressionFixProvider>( new UserDiagnosticAnalyzer(), new CSharpSuppressionCodeFixProvider()); } protected override int CodeActionIndex { get { return 0; } } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsSuppression)] [WorkItem(1073825)] public async Task TestDiagnosticWithoutLocationCanBeSuppressed() { await TestAsync( @"[||] using System; class Class { int Method() { int x = 0; } }", $@" // This file is used by Code Analysis to maintain SuppressMessage // attributes that are applied to this project. // Project-level suppressions either have no target or are given // a specific target and scoped to a namespace, type, member, etc. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage(""NoLocationDiagnostic"", ""NoLocationDiagnostic:NoLocationDiagnostic"", Justification = ""{FeaturesResources.SuppressionPendingJustification}"")] "); } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\General\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; namespace JIT.HardwareIntrinsics.General { public static partial class Program { private static void GetAndWithElementSByte7() { var test = new VectorGetAndWithElement__GetAndWithElementSByte7(); // Validates basic functionality works test.RunBasicScenario(); // Validates calling via reflection works test.RunReflectionScenario(); // Validates that invalid indices throws ArgumentOutOfRangeException test.RunArgumentOutOfRangeScenario(); if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class VectorGetAndWithElement__GetAndWithElementSByte7 { private static readonly int LargestVectorSize = 16; private static readonly int ElementCount = Unsafe.SizeOf<Vector128<SByte>>() / sizeof(SByte); public bool Succeeded { get; set; } = true; public void RunBasicScenario(int imm = 7, bool expectedOutOfRangeException = false) { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario)); SByte[] values = new SByte[ElementCount]; for (int i = 0; i < ElementCount; i++) { values[i] = TestLibrary.Generator.GetSByte(); } Vector128<SByte> value = Vector128.Create(values[0], values[1], values[2], values[3], values[4], values[5], values[6], values[7], values[8], values[9], values[10], values[11], values[12], values[13], values[14], values[15]); bool succeeded = !expectedOutOfRangeException; try { SByte result = value.GetElement(imm); ValidateGetResult(result, values); } catch (ArgumentOutOfRangeException) { succeeded = expectedOutOfRangeException; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"Vector128<SByte.GetElement({imm}): {nameof(RunBasicScenario)} failed to throw ArgumentOutOfRangeException."); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } succeeded = !expectedOutOfRangeException; SByte insertedValue = TestLibrary.Generator.GetSByte(); try { Vector128<SByte> result2 = value.WithElement(imm, insertedValue); ValidateWithResult(result2, values, insertedValue); } catch (ArgumentOutOfRangeException) { succeeded = expectedOutOfRangeException; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"Vector128<SByte.WithElement({imm}): {nameof(RunBasicScenario)} failed to throw ArgumentOutOfRangeException."); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } public void RunReflectionScenario(int imm = 7, bool expectedOutOfRangeException = false) { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario)); SByte[] values = new SByte[ElementCount]; for (int i = 0; i < ElementCount; i++) { values[i] = TestLibrary.Generator.GetSByte(); } Vector128<SByte> value = Vector128.Create(values[0], values[1], values[2], values[3], values[4], values[5], values[6], values[7], values[8], values[9], values[10], values[11], values[12], values[13], values[14], values[15]); bool succeeded = !expectedOutOfRangeException; try { object result = typeof(Vector128) .GetMethod(nameof(Vector128.GetElement)) .MakeGenericMethod(typeof(SByte)) .Invoke(null, new object[] { value, imm }); ValidateGetResult((SByte)(result), values); } catch (TargetInvocationException e) { succeeded = expectedOutOfRangeException && e.InnerException is ArgumentOutOfRangeException; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"Vector128<SByte.GetElement({imm}): {nameof(RunReflectionScenario)} failed to throw ArgumentOutOfRangeException."); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } succeeded = !expectedOutOfRangeException; SByte insertedValue = TestLibrary.Generator.GetSByte(); try { object result2 = typeof(Vector128) .GetMethod(nameof(Vector128.WithElement)) .MakeGenericMethod(typeof(SByte)) .Invoke(null, new object[] { value, imm, insertedValue }); ValidateWithResult((Vector128<SByte>)(result2), values, insertedValue); } catch (TargetInvocationException e) { succeeded = expectedOutOfRangeException && e.InnerException is ArgumentOutOfRangeException; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"Vector128<SByte.WithElement({imm}): {nameof(RunReflectionScenario)} failed to throw ArgumentOutOfRangeException."); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } public void RunArgumentOutOfRangeScenario() { RunBasicScenario(7 - ElementCount, expectedOutOfRangeException: true); RunBasicScenario(7 + ElementCount, expectedOutOfRangeException: true); RunReflectionScenario(7 - ElementCount, expectedOutOfRangeException: true); RunReflectionScenario(7 + ElementCount, expectedOutOfRangeException: true); } private void ValidateGetResult(SByte result, SByte[] values, [CallerMemberName] string method = "") { if (result != values[7]) { Succeeded = false; TestLibrary.TestFramework.LogInformation($"Vector128<SByte.GetElement(7): {method} failed:"); TestLibrary.TestFramework.LogInformation($" value: ({string.Join(", ", values)})"); TestLibrary.TestFramework.LogInformation($" result: ({result})"); TestLibrary.TestFramework.LogInformation(string.Empty); } } private void ValidateWithResult(Vector128<SByte> result, SByte[] values, SByte insertedValue, [CallerMemberName] string method = "") { SByte[] resultElements = new SByte[ElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<SByte, byte>(ref resultElements[0]), result); ValidateWithResult(resultElements, values, insertedValue, method); } private void ValidateWithResult(SByte[] result, SByte[] values, SByte insertedValue, [CallerMemberName] string method = "") { bool succeeded = true; for (int i = 0; i < ElementCount; i++) { if ((i != 7) && (result[i] != values[i])) { succeeded = false; break; } } if (result[7] != insertedValue) { succeeded = false; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"Vector128<SByte.WithElement(7): {method} failed:"); TestLibrary.TestFramework.LogInformation($" value: ({string.Join(", ", values)})"); TestLibrary.TestFramework.LogInformation($" insert: insertedValue"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using J2N.Text; using YAF.Lucene.Net.Analysis.TokenAttributes; using System; using System.Reflection; using Attribute = YAF.Lucene.Net.Util.Attribute; using AttributeSource = YAF.Lucene.Net.Util.AttributeSource; using BytesRef = YAF.Lucene.Net.Util.BytesRef; using IAttribute = YAF.Lucene.Net.Util.IAttribute; namespace YAF.Lucene.Net.Analysis { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using CharTermAttribute = YAF.Lucene.Net.Analysis.TokenAttributes.CharTermAttribute; using IAttributeReflector = YAF.Lucene.Net.Util.IAttributeReflector; /// <summary> /// A <see cref="Token"/> is an occurrence of a term from the text of a field. It consists of /// a term's text, the start and end offset of the term in the text of the field, /// and a type string. /// <para/> /// The start and end offsets permit applications to re-associate a token with /// its source text, e.g., to display highlighted query terms in a document /// browser, or to show matching text fragments in a KWIC (KeyWord In Context) /// display, etc. /// <para/> /// The type is a string, assigned by a lexical analyzer /// (a.k.a. tokenizer), naming the lexical or syntactic class that the token /// belongs to. For example an end of sentence marker token might be implemented /// with type "eos". The default token type is "word". /// <para/> /// A Token can optionally have metadata (a.k.a. payload) in the form of a variable /// length byte array. Use <see cref="Index.DocsAndPositionsEnum.GetPayload()"/> to retrieve the /// payloads from the index. /// /// <para/><para/> /// /// <para/><b>NOTE:</b> As of 2.9, Token implements all <see cref="IAttribute"/> interfaces /// that are part of core Lucene and can be found in the <see cref="TokenAttributes"/> namespace. /// Even though it is not necessary to use <see cref="Token"/> anymore, with the new <see cref="TokenStream"/> API it can /// be used as convenience class that implements all <see cref="IAttribute"/>s, which is especially useful /// to easily switch from the old to the new <see cref="TokenStream"/> API. /// /// <para/><para/> /// /// <para><see cref="Tokenizer"/>s and <see cref="TokenFilter"/>s should try to re-use a <see cref="Token"/> /// instance when possible for best performance, by /// implementing the <see cref="TokenStream.IncrementToken()"/> API. /// Failing that, to create a new <see cref="Token"/> you should first use /// one of the constructors that starts with null text. To load /// the token from a char[] use <see cref="ICharTermAttribute.CopyBuffer(char[], int, int)"/>. /// To load from a <see cref="string"/> use <see cref="ICharTermAttribute.SetEmpty()"/> followed by /// <see cref="ICharTermAttribute.Append(string)"/> or <see cref="ICharTermAttribute.Append(string, int, int)"/>. /// Alternatively you can get the <see cref="Token"/>'s termBuffer by calling either <see cref="ICharTermAttribute.Buffer"/>, /// if you know that your text is shorter than the capacity of the termBuffer /// or <see cref="ICharTermAttribute.ResizeBuffer(int)"/>, if there is any possibility /// that you may need to grow the buffer. Fill in the characters of your term into this /// buffer, with <see cref="string.ToCharArray(int, int)"/> if loading from a string, /// or with <see cref="System.Array.Copy(System.Array, int, System.Array, int, int)"/>, /// and finally call <see cref="ICharTermAttribute.SetLength(int)"/> to /// set the length of the term text. See <a target="_top" /// href="https://issues.apache.org/jira/browse/LUCENE-969">LUCENE-969</a> /// for details.</para> /// <para>Typical Token reuse patterns: /// <list type="bullet"> /// <item><description> Copying text from a string (type is reset to <see cref="TypeAttribute.DEFAULT_TYPE"/> if not specified): /// <code> /// return reusableToken.Reinit(string, startOffset, endOffset[, type]); /// </code> /// </description></item> /// <item><description> Copying some text from a string (type is reset to <see cref="TypeAttribute.DEFAULT_TYPE"/> if not specified): /// <code> /// return reusableToken.Reinit(string, 0, string.Length, startOffset, endOffset[, type]); /// </code> /// </description></item> /// <item><description> Copying text from char[] buffer (type is reset to <see cref="TypeAttribute.DEFAULT_TYPE"/> if not specified): /// <code> /// return reusableToken.Reinit(buffer, 0, buffer.Length, startOffset, endOffset[, type]); /// </code> /// </description></item> /// <item><description> Copying some text from a char[] buffer (type is reset to <see cref="TypeAttribute.DEFAULT_TYPE"/> if not specified): /// <code> /// return reusableToken.Reinit(buffer, start, end - start, startOffset, endOffset[, type]); /// </code> /// </description></item> /// <item><description> Copying from one one <see cref="Token"/> to another (type is reset to <see cref="TypeAttribute.DEFAULT_TYPE"/> if not specified): /// <code> /// return reusableToken.Reinit(source.Buffer, 0, source.Length, source.StartOffset, source.EndOffset[, source.Type]); /// </code> /// </description></item> /// </list> /// A few things to note: /// <list type="bullet"> /// <item><description><see cref="Clear()"/> initializes all of the fields to default values. this was changed in contrast to Lucene 2.4, but should affect no one.</description></item> /// <item><description>Because <see cref="TokenStream"/>s can be chained, one cannot assume that the <see cref="Token"/>'s current type is correct.</description></item> /// <item><description>The startOffset and endOffset represent the start and offset in the source text, so be careful in adjusting them.</description></item> /// <item><description>When caching a reusable token, clone it. When injecting a cached token into a stream that can be reset, clone it again.</description></item> /// </list> /// </para> /// <para> /// <b>Please note:</b> With Lucene 3.1, the <see cref="CharTermAttribute.ToString()"/> method had to be changed to match the /// <see cref="ICharSequence"/> interface introduced by the interface <see cref="ICharTermAttribute"/>. /// this method now only prints the term text, no additional information anymore. /// </para> /// </summary> public class Token : CharTermAttribute, ITypeAttribute, IPositionIncrementAttribute, IFlagsAttribute, IOffsetAttribute, IPayloadAttribute, IPositionLengthAttribute { private int startOffset, endOffset; private string type = TypeAttribute.DEFAULT_TYPE; private int flags; private BytesRef payload; private int positionIncrement = 1; private int positionLength = 1; /// <summary> /// Constructs a <see cref="Token"/> will null text. </summary> public Token() { string s = "fooobar"; s.ToCharArray(); } /// <summary> /// Constructs a <see cref="Token"/> with null text and start &amp; end /// offsets. </summary> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> public Token(int start, int end) { CheckOffsets(start, end); startOffset = start; endOffset = end; } /// <summary> /// Constructs a <see cref="Token"/> with null text and start &amp; end /// offsets plus the <see cref="Token"/> type. </summary> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> /// <param name="typ"> the lexical type of this <see cref="Token"/> </param> public Token(int start, int end, string typ) { CheckOffsets(start, end); startOffset = start; endOffset = end; type = typ; } /// <summary> /// Constructs a <see cref="Token"/> with null text and start &amp; end /// offsets plus flags. NOTE: flags is EXPERIMENTAL. </summary> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> /// <param name="flags"> The bits to set for this token </param> public Token(int start, int end, int flags) { CheckOffsets(start, end); startOffset = start; endOffset = end; this.flags = flags; } /// <summary> /// Constructs a <see cref="Token"/> with the given term text, and start /// &amp; end offsets. The type defaults to "word." /// <b>NOTE:</b> for better indexing speed you should /// instead use the char[] termBuffer methods to set the /// term text. </summary> /// <param name="text"> term text </param> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> public Token(string text, int start, int end) { CheckOffsets(start, end); Append(text); startOffset = start; endOffset = end; } /// <summary> /// Constructs a <see cref="Token"/> with the given text, start and end /// offsets, &amp; type. <b>NOTE:</b> for better indexing /// speed you should instead use the char[] termBuffer /// methods to set the term text. </summary> /// <param name="text"> term text </param> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> /// <param name="typ"> token type </param> public Token(string text, int start, int end, string typ) { CheckOffsets(start, end); Append(text); startOffset = start; endOffset = end; type = typ; } /// <summary> /// Constructs a <see cref="Token"/> with the given text, start and end /// offsets, &amp; type. <b>NOTE:</b> for better indexing /// speed you should instead use the char[] termBuffer /// methods to set the term text. </summary> /// <param name="text"> term text </param> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> /// <param name="flags"> token type bits </param> public Token(string text, int start, int end, int flags) { CheckOffsets(start, end); Append(text); startOffset = start; endOffset = end; this.flags = flags; } /// <summary> /// Constructs a <see cref="Token"/> with the given term buffer (offset /// &amp; length), start and end offsets /// </summary> /// <param name="startTermBuffer"> buffer containing term text </param> /// <param name="termBufferOffset"> the index in the buffer of the first character </param> /// <param name="termBufferLength"> number of valid characters in the buffer </param> /// <param name="start"> start offset in the source text </param> /// <param name="end"> end offset in the source text </param> public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end) { CheckOffsets(start, end); CopyBuffer(startTermBuffer, termBufferOffset, termBufferLength); startOffset = start; endOffset = end; } /// <summary> /// Gets or Sets the position increment (the distance from the prior term). The default value is one. /// </summary> /// <exception cref="System.ArgumentException"> if value is set to a negative value. </exception> /// <seealso cref="IPositionIncrementAttribute"/> public virtual int PositionIncrement { set { if (value < 0) { throw new System.ArgumentException("Increment must be zero or greater: " + value); } this.positionIncrement = value; } get { return positionIncrement; } } /// <summary> /// Gets or Sets the position length of this <see cref="Token"/> (how many positions this token /// spans). /// <para/> /// The default value is one. /// </summary> /// <exception cref="System.ArgumentException"> if value /// is set to zero or negative. </exception> /// <seealso cref="IPositionLengthAttribute"/> public virtual int PositionLength { set { this.positionLength = value; } get { return positionLength; } } /// <summary> /// Returns this <see cref="Token"/>'s starting offset, the position of the first character /// corresponding to this token in the source text. /// <para/> /// Note that the difference between <see cref="EndOffset"/> and <see cref="StartOffset"/> /// may not be equal to termText.Length, as the term text may have been altered by a /// stemmer or some other filter. /// </summary> /// <seealso cref="SetOffset(int, int)"/> /// <seealso cref="IOffsetAttribute"/> public int StartOffset { get { return startOffset; } } /// <summary> /// Returns this <see cref="Token"/>'s ending offset, one greater than the position of the /// last character corresponding to this token in the source text. The length /// of the token in the source text is (<code>EndOffset</code> - <see cref="StartOffset"/>). /// </summary> /// <seealso cref="SetOffset(int, int)"/> /// <seealso cref="IOffsetAttribute"/> public int EndOffset { get { return endOffset; } } /// <summary> /// Set the starting and ending offset. /// </summary> /// <exception cref="System.ArgumentException"> If <paramref name="startOffset"/> or <paramref name="endOffset"/> /// are negative, or if <paramref name="startOffset"/> is greater than /// <paramref name="endOffset"/> </exception> /// <seealso cref="StartOffset"/> /// <seealso cref="EndOffset"/> /// <seealso cref="IOffsetAttribute"/> public virtual void SetOffset(int startOffset, int endOffset) { CheckOffsets(startOffset, endOffset); this.startOffset = startOffset; this.endOffset = endOffset; } /// <summary>Gets or Sets this <see cref="Token"/>'s lexical type. Defaults to "word". </summary> public string Type { get { return type; } set { this.type = value; } } /// <summary> /// Get the bitset for any bits that have been set. /// <para/> /// This is completely distinct from <see cref="ITypeAttribute.Type" />, although they do share similar purposes. /// The flags can be used to encode information about the token for use by other <see cref="Lucene.Net.Analysis.TokenFilter" />s. /// </summary> /// <seealso cref="IFlagsAttribute"/> public virtual int Flags { get { return flags; } set { this.flags = value; } } /// <summary> /// Gets or Sets this <see cref="Token"/>'s payload. /// </summary> /// <seealso cref="IPayloadAttribute"/> public virtual BytesRef Payload { get { return this.payload; } set { this.payload = value; } } /// <summary> /// Resets the term text, payload, flags, and positionIncrement, /// startOffset, endOffset and token type to default. /// </summary> public override void Clear() { base.Clear(); payload = null; positionIncrement = 1; flags = 0; startOffset = endOffset = 0; type = TokenAttributes.TypeAttribute.DEFAULT_TYPE; } public override object Clone() { var t = (Token)base.Clone(); // Do a deep clone if (payload != null) { t.payload = (BytesRef)payload.Clone(); } return t; } /// <summary> /// Makes a clone, but replaces the term buffer &amp; /// start/end offset in the process. This is more /// efficient than doing a full clone (and then calling /// <see cref="ICharTermAttribute.CopyBuffer"/>) because it saves a wasted copy of the old /// termBuffer. /// </summary> public virtual Token Clone(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) { var t = new Token(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset) { positionIncrement = positionIncrement, flags = flags, type = type }; if (payload != null) { t.payload = (BytesRef)payload.Clone(); } return t; } public override bool Equals(object obj) { if (obj == this) { return true; } if (obj is Token) { var other = (Token)obj; return (startOffset == other.startOffset && endOffset == other.endOffset && flags == other.flags && positionIncrement == other.positionIncrement && (type == null ? other.type == null : type.Equals(other.type, StringComparison.Ordinal)) && (payload == null ? other.payload == null : payload.Equals(other.payload)) && base.Equals(obj) ); } else { return false; } } public override int GetHashCode() { int code = base.GetHashCode(); code = code * 31 + startOffset; code = code * 31 + endOffset; code = code * 31 + flags; code = code * 31 + positionIncrement; if (type != null) { code = code * 31 + type.GetHashCode(); } if (payload != null) { code = code * 31 + payload.GetHashCode(); } return code; } // like clear() but doesn't clear termBuffer/text private void ClearNoTermBuffer() { payload = null; positionIncrement = 1; flags = 0; startOffset = endOffset = 0; type = TokenAttributes.TypeAttribute.DEFAULT_TYPE; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.CopyBuffer(char[], int, int)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, string newType) { CheckOffsets(newStartOffset, newEndOffset); ClearNoTermBuffer(); CopyBuffer(newTermBuffer, newTermOffset, newTermLength); payload = null; positionIncrement = 1; startOffset = newStartOffset; endOffset = newEndOffset; type = newType; return this; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.CopyBuffer(char[], int, int)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) on <see cref="TypeAttribute.DEFAULT_TYPE"/> </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) { CheckOffsets(newStartOffset, newEndOffset); ClearNoTermBuffer(); CopyBuffer(newTermBuffer, newTermOffset, newTermLength); startOffset = newStartOffset; endOffset = newEndOffset; type = TokenAttributes.TypeAttribute.DEFAULT_TYPE; return this; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.Append(string)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(string newTerm, int newStartOffset, int newEndOffset, string newType) { CheckOffsets(newStartOffset, newEndOffset); Clear(); Append(newTerm); startOffset = newStartOffset; endOffset = newEndOffset; type = newType; return this; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.Append(string, int, int)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(string newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, string newType) { CheckOffsets(newStartOffset, newEndOffset); Clear(); Append(newTerm, newTermOffset, newTermOffset + newTermLength); startOffset = newStartOffset; endOffset = newEndOffset; type = newType; return this; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.Append(string)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) on <see cref="TypeAttribute.DEFAULT_TYPE"/> </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(string newTerm, int newStartOffset, int newEndOffset) { CheckOffsets(newStartOffset, newEndOffset); Clear(); Append(newTerm); startOffset = newStartOffset; endOffset = newEndOffset; type = TokenAttributes.TypeAttribute.DEFAULT_TYPE; return this; } /// <summary> /// Shorthand for calling <see cref="Clear"/>, /// <see cref="ICharTermAttribute.Append(string, int, int)"/>, /// <see cref="SetOffset"/>, /// <see cref="Type"/> (set) on <see cref="TypeAttribute.DEFAULT_TYPE"/> </summary> /// <returns> this <see cref="Token"/> instance </returns> public virtual Token Reinit(string newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) { CheckOffsets(newStartOffset, newEndOffset); Clear(); Append(newTerm, newTermOffset, newTermOffset + newTermLength); startOffset = newStartOffset; endOffset = newEndOffset; type = TokenAttributes.TypeAttribute.DEFAULT_TYPE; return this; } /// <summary> /// Copy the prototype token's fields into this one. Note: Payloads are shared. </summary> /// <param name="prototype"> source <see cref="Token"/> to copy fields from </param> public virtual void Reinit(Token prototype) { CopyBuffer(prototype.Buffer, 0, prototype.Length); positionIncrement = prototype.positionIncrement; flags = prototype.flags; startOffset = prototype.startOffset; endOffset = prototype.endOffset; type = prototype.type; payload = prototype.payload; } /// <summary> /// Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared. </summary> /// <param name="prototype"> existing <see cref="Token"/> </param> /// <param name="newTerm"> new term text </param> public virtual void Reinit(Token prototype, string newTerm) { SetEmpty().Append(newTerm); positionIncrement = prototype.positionIncrement; flags = prototype.flags; startOffset = prototype.startOffset; endOffset = prototype.endOffset; type = prototype.type; payload = prototype.payload; } /// <summary> /// Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared. </summary> /// <param name="prototype"> existing <see cref="Token"/> </param> /// <param name="newTermBuffer"> buffer containing new term text </param> /// <param name="offset"> the index in the buffer of the first character </param> /// <param name="length"> number of valid characters in the buffer </param> public virtual void Reinit(Token prototype, char[] newTermBuffer, int offset, int length) { CopyBuffer(newTermBuffer, offset, length); positionIncrement = prototype.positionIncrement; flags = prototype.flags; startOffset = prototype.startOffset; endOffset = prototype.endOffset; type = prototype.type; payload = prototype.payload; } public override void CopyTo(IAttribute target) { var to = target as Token; if (to != null) { to.Reinit(this); // reinit shares the payload, so clone it: if (payload != null) { to.payload = (BytesRef)payload.Clone(); } } else { base.CopyTo(target); ((IOffsetAttribute)target).SetOffset(startOffset, endOffset); ((IPositionIncrementAttribute)target).PositionIncrement = positionIncrement; ((IPayloadAttribute)target).Payload = (payload == null) ? null : (BytesRef)payload.Clone(); ((IFlagsAttribute)target).Flags = flags; ((ITypeAttribute)target).Type = type; } } public override void ReflectWith(IAttributeReflector reflector) { base.ReflectWith(reflector); reflector.Reflect(typeof(IOffsetAttribute), "startOffset", startOffset); reflector.Reflect(typeof(IOffsetAttribute), "endOffset", endOffset); reflector.Reflect(typeof(IPositionIncrementAttribute), "positionIncrement", positionIncrement); reflector.Reflect(typeof(IPayloadAttribute), "payload", payload); reflector.Reflect(typeof(IFlagsAttribute), "flags", flags); reflector.Reflect(typeof(ITypeAttribute), "type", type); } private void CheckOffsets(int startOffset, int endOffset) { if (startOffset < 0 || endOffset < startOffset) { throw new System.ArgumentException("startOffset must be non-negative, and endOffset must be >= startOffset, " + "startOffset=" + startOffset + ",endOffset=" + endOffset); } } /// <summary> /// Convenience factory that returns <see cref="Token"/> as implementation for the basic /// attributes and return the default impl (with &quot;Impl&quot; appended) for all other /// attributes. /// @since 3.0 /// </summary> public static readonly AttributeSource.AttributeFactory TOKEN_ATTRIBUTE_FACTORY = new TokenAttributeFactory(AttributeSource.AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY); /// <summary> /// <b>Expert:</b> Creates a <see cref="TokenAttributeFactory"/> returning <see cref="Token"/> as instance for the basic attributes /// and for all other attributes calls the given delegate factory. /// @since 3.0 /// </summary> public sealed class TokenAttributeFactory : AttributeSource.AttributeFactory { internal readonly AttributeSource.AttributeFactory @delegate; /// <summary> /// <b>Expert</b>: Creates an <see cref="AttributeSource.AttributeFactory"/> returning <see cref="Token"/> as instance for the basic attributes /// and for all other attributes calls the given delegate factory. /// </summary> public TokenAttributeFactory(AttributeSource.AttributeFactory @delegate) { this.@delegate = @delegate; } public override Attribute CreateAttributeInstance<T>() { var attClass = typeof(T); return attClass.GetTypeInfo().IsAssignableFrom(typeof(Token).GetTypeInfo()) ? new Token() : @delegate.CreateAttributeInstance<T>(); } public override bool Equals(object other) { if (this == other) { return true; } var af = other as TokenAttributeFactory; if (af != null) { return [email protected](af.@delegate); } return false; } public override int GetHashCode() { return @delegate.GetHashCode() ^ 0x0a45aa31; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Xml; using System.Xml.Schema; using Xunit; using Xunit.Abstractions; namespace System.Xml.Tests { // ===================== GetExpectedParticles ===================== public class TCGetExpectedParticles : CXmlSchemaValidatorTestCase { private ITestOutputHelper _output; public TCGetExpectedParticles(ITestOutputHelper output): base(output) { _output = output; } [Theory] [InlineData("ctor")] [InlineData("init")] [InlineData("end")] public void CallAfter_Constructor_Initialize_EndValidation(String after) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); if (after == "init") { val.Initialize(); Assert.Equal(val.GetExpectedParticles().Length, 18); } else if (after == "end") { val.Initialize(); val.EndValidation(); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); } else CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); return; } [Theory] [InlineData("elem")] [InlineData("attrib")] [InlineData("endof")] public void CallAfterValidate_Element_Attribute_EndOfAttributes_ForSequence(String after) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("SequenceElement", "", info); if (after == "attrib") val.ValidateAttribute("attr1", "", StringGetter("foo"), info); if (after == "endof") { val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); } CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("elem1") }); return; } [Theory] [InlineData("inside")] [InlineData("end")] public void CallForSequence_Between_After_ValidationAllSeqElements(String callOn) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement("SequenceElement", "", info); val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); val.ValidateElement("elem1", "", info); val.SkipToEndElement(info); if (callOn == "end") { val.ValidateElement("elem2", "", info); val.SkipToEndElement(info); names = new XmlQualifiedName[] { }; } else { names = new XmlQualifiedName[] { new XmlQualifiedName("elem2") }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Theory] [InlineData("elem")] [InlineData("attrib")] [InlineData("endof")] public void CallAfterValidate_Element_Attribute_EndOfAttributes_ForChoice(String after) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("ChoiceElement", "", info); if (after == "attrib") val.ValidateAttribute("attr1", "", StringGetter("foo"), info); if (after == "endof") { val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); } CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("elem1"), new XmlQualifiedName("elem2") }); return; } [Theory] [InlineData("elem1")] [InlineData("elem2")] public void CallForChoiceAfterValidating_1_2_ChoiceElement(String elemAfter) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); string elem = elemAfter; val.Initialize(); val.ValidateElement("ChoiceElement", "", info); val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); val.ValidateElement(elem, "", info); val.SkipToEndElement(info); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); return; } [Theory] [InlineData("elem")] [InlineData("attrib")] [InlineData("endof")] public void CallAfterValidate_Element_Attribute_EndOfAttributes_ForAll(String after) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("AllElement", "", info); if (after == "attrib") val.ValidateAttribute("attr1", "", StringGetter("foo"), info); if (after == "endof") { val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); } CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("elem1"), new XmlQualifiedName("elem2") }); return; } [Theory] [InlineData("elem1")] [InlineData("elem2")] public void CallForAllAfterValidating_1_2_element(string elemAfter) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); string elem = elemAfter; string notElem = (elem == "elem1" ? "elem2" : "elem1"); val.Initialize(); val.ValidateElement("AllElement", "", info); val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); val.ValidateElement(elem, "", info); val.SkipToEndElement(info); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName(notElem) }); return; } [Fact] public void CallForAllAfterValidatingBothElements() { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("AllElement", "", info); val.ValidateAttribute("attr1", "", StringGetter("foo"), info); val.ValidateAttribute("attr2", "", StringGetter("foo"), info); val.ValidateEndOfAttributes(null); foreach (string elem in new string[] { "elem1", "elem2" }) { val.ValidateElement(elem, "", info); val.SkipToEndElement(info); } CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); return; } [Fact] public void CallForElementWithReferenceToGlobalElement() { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("ReferenceElement", "", info); val.ValidateEndOfAttributes(null); foreach (string elem in new string[] { "NestedElement", "foo", "bar" }) { CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName(elem) }); val.ValidateElement(elem, "", info); val.ValidateEndOfAttributes(null); } foreach (string elem in new string[] { "bar", "foo", "NestedElement" }) { val.ValidateEndElement(info); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); } return; } [Fact] public void CallForElementWithZeroMinOccurs() { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("MinOccurs0Element", "", info); val.ValidateEndOfAttributes(null); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("foo"), new XmlQualifiedName("bar") }); return; } [Fact] public void CallForElementWithZeroMaxOccurs() { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); val.Initialize(); val.ValidateElement("MaxOccurs0Element", "", info); val.ValidateEndOfAttributes(null); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("bar") }); return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForSequence_Before_After_ValidatingWildcard(String callOrder) { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet schemas = new XmlSchemaSet(); XmlSchemaParticle[] result; schemas.Add("", TestData + XSDFILE_GET_EXPECTED_PARTICLES); schemas.Add("uri:tempuri", TestData + XSDFILE_TARGET_NAMESPACE); val = CreateValidator(schemas); val.Initialize(); val.ValidateElement("SequenceWildcardElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { result = val.GetExpectedParticles(); Assert.Equal(result.Length, 1); Assert.True(result[0] is XmlSchemaAny); Assert.Equal((result[0] as XmlSchemaAny).Namespace, "uri:tempuri"); Assert.Equal((result[0] as XmlSchemaAny).ProcessContents, XmlSchemaContentProcessing.Strict); } else { val.ValidateElement("elem1", "uri:tempuri", info); val.SkipToEndElement(info); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("foo") }); } return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForChoice_Before_After_ValidatingWildcard(String callOrder) { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet schemas = new XmlSchemaSet(); XmlSchemaParticle[] result; schemas.Add("", TestData + XSDFILE_GET_EXPECTED_PARTICLES); schemas.Add("uri:tempuri", TestData + XSDFILE_TARGET_NAMESPACE); val = CreateValidator(schemas); val.Initialize(); val.ValidateElement("ChoiceWildcardElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { result = val.GetExpectedParticles(); Assert.Equal(result.Length, 2); if (result[0] is XmlSchemaAny) { Assert.Equal((result[0] as XmlSchemaAny).Namespace, "uri:tempuri"); Assert.Equal((result[0] as XmlSchemaAny).ProcessContents, XmlSchemaContentProcessing.Strict); Assert.True(result[1] is XmlSchemaElement); Assert.Equal((result[1] as XmlSchemaElement).QualifiedName, new XmlQualifiedName("foo")); } else { Assert.True(result[1] is XmlSchemaAny); Assert.Equal((result[1] as XmlSchemaAny).Namespace, "uri:tempuri"); Assert.Equal((result[1] as XmlSchemaAny).ProcessContents, XmlSchemaContentProcessing.Strict); Assert.True(result[0] is XmlSchemaElement); Assert.Equal((result[0] as XmlSchemaElement).QualifiedName, new XmlQualifiedName("foo")); } } else { val.ValidateElement("elem1", "uri:tempuri", info); val.SkipToEndElement(info); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { }); } return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForSequenceWithChoiceGroup_Before_After_ValidatingGroupMembers(String callOrder) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement("SequenceGroupElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { names = new XmlQualifiedName[] { new XmlQualifiedName("g1"), new XmlQualifiedName("g2") }; } else { val.ValidateElement("g1", "", info); val.SkipToEndElement(info); names = new XmlQualifiedName[] { new XmlQualifiedName("foo") }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForChoiceWithSequenceGroup_Before_After_ValidatingGroupMembers(String callOrder) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement("ChoiceGroupElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { names = new XmlQualifiedName[] { new XmlQualifiedName("g1"), new XmlQualifiedName("foo") }; } else { val.ValidateElement("g1", "", info); val.SkipToEndElement(info); names = new XmlQualifiedName[] { new XmlQualifiedName("g2") }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForExtendedSequence_Before_After_ValidatingSeqOrAllBaseElements(String callOrder) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement("SequenceExtensionElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { names = new XmlQualifiedName[] { new XmlQualifiedName("elem1") }; } else { val.ValidateElement("elem1", "", info); val.ValidateEndElement(info); val.ValidateElement("elem2", "", info); val.ValidateEndElement(info); names = new XmlQualifiedName[] { new XmlQualifiedName("extended") }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Theory] [InlineData("before")] [InlineData("after")] public void CallForExtendedChoice_Before_After_ValidatingBaseChoiceElement(String callOrder) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement("ChoiceExtensionElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { names = new XmlQualifiedName[] { new XmlQualifiedName("elem1"), new XmlQualifiedName("elem2") }; } else { val.ValidateElement("elem1", "", info); val.ValidateEndElement(info); names = new XmlQualifiedName[] { new XmlQualifiedName("ext1"), new XmlQualifiedName("ext2") }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Theory] [InlineData("Sequence", "before")] [InlineData("Sequence", "after")] [InlineData("Choice", "before")] [InlineData("Choice", "after" )] [InlineData("All", "before")] [InlineData("All", "after")] public void CallForRestricted_Sequence_Choice_All__Before_After_ValidatingSeqElements(String restrType, String callOrder) { XmlSchemaValidator val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); XmlSchemaInfo info = new XmlSchemaInfo(); XmlQualifiedName[] names; val.Initialize(); val.ValidateElement(restrType + "RestrictionElement", "", info); val.ValidateEndOfAttributes(null); if (callOrder == "before") { names = new XmlQualifiedName[] { new XmlQualifiedName("elem1") }; } else { val.ValidateElement("elem1", "", info); val.ValidateEndElement(info); names = new XmlQualifiedName[] { }; } CheckExpectedElements(val.GetExpectedParticles(), names); return; } [Fact] public void CallForChoiceWithElementsFromDifferentNamespaces() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet schemas = new XmlSchemaSet(); schemas.Add("", XmlReader.Create(new StringReader("<?xml version=\"1.0\"?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:temp=\"uri:tempuri\">\n" + " <xs:import namespace=\"uri:tempuri\" />\n" + " <xs:element name=\"ImportElement\">\n" + " <xs:complexType>\n" + " <xs:choice>\n" + " <xs:element name=\"elem1\" />\n" + " <xs:element ref=\"temp:elem1\" />\n" + " <xs:element name=\"elem2\" />\n" + " </xs:choice>\n" + " </xs:complexType>\n" + " </xs:element>\n" + "</xs:schema>"))); schemas.Add("uri:tempuri", TestData + XSDFILE_TARGET_NAMESPACE); val = CreateValidator(schemas); val.Initialize(); val.ValidateElement("ImportElement", "", info); val.ValidateEndOfAttributes(null); CheckExpectedElements(val.GetExpectedParticles(), new XmlQualifiedName[] { new XmlQualifiedName("elem1"), new XmlQualifiedName("elem1", "uri:tempuri"), new XmlQualifiedName("elem2") }); return; } [Fact] public void CallForElementWithoutTypeDefined() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet schemas = new XmlSchemaSet(); XmlSchemaParticle[] result; val = CreateValidator(XSDFILE_GET_EXPECTED_PARTICLES); val.Initialize(); val.ValidateElement("NoTypeElement", "", info); val.ValidateEndOfAttributes(null); result = val.GetExpectedParticles(); Assert.Equal(result.Length, 1); Assert.True(result[0] is XmlSchemaAny); Assert.Equal((result[0] as XmlSchemaAny).Namespace, null); Assert.Equal((result[0] as XmlSchemaAny).ProcessContents, XmlSchemaContentProcessing.Lax); return; } private void CheckExpectedElements(XmlSchemaParticle[] result, XmlQualifiedName[] names) { int cntFound; Assert.Equal(result.Length, names.Length); foreach (XmlSchemaParticle res in result) Assert.True(res is XmlSchemaElement); foreach (XmlQualifiedName n in names) { cntFound = 0; foreach (XmlSchemaParticle res in result) { if (n == (res as XmlSchemaElement).QualifiedName) cntFound++; } Assert.True(cntFound != 0); Assert.True(cntFound <= 1); } } } }
#region Header /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Copyright (c) 2007-2008 James Nies and NArrange contributors. * All rights reserved. * * This program and the accompanying materials are made available under * the terms of the Common Public License v1.0 which accompanies this * distribution. * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * *<author>James Nies</author> *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ #endregion Header namespace NArrange.Core.CodeElements { using System; using System.Collections.Generic; /// <summary> /// Comparer for ICodeElements. /// </summary> public class ElementComparer : IComparer<ICodeElement> { #region Fields /// <summary> /// Comparison element attribute. /// </summary> private ElementAttributeType _compareAttribute; /// <summary> /// Comparison delegate. /// </summary> private Comparison<ICodeElement> _comparison; /// <summary> /// Inner comparer. /// </summary> private IComparer<ICodeElement> _innerComparer; /// <summary> /// Sort direction. /// </summary> private SortDirection _sortDirection; #endregion Fields #region Constructors /// <summary> /// Creates a new ElementComparer. /// </summary> /// <param name="compareAttribute">The compare attribute.</param> /// <param name="sortDirection">The sort direction.</param> public ElementComparer(ElementAttributeType compareAttribute, SortDirection sortDirection) : this(compareAttribute, sortDirection, null) { } /// <summary> /// Create a new ElementComparer. /// </summary> /// <param name="compareAttribute">The compare attribute.</param> /// <param name="sortDirection">The sort direction.</param> /// <param name="innerComparer">The inner comparer.</param> public ElementComparer( ElementAttributeType compareAttribute, SortDirection sortDirection, IComparer<ICodeElement> innerComparer) { _compareAttribute = compareAttribute; _sortDirection = sortDirection; _innerComparer = innerComparer; _comparison = CreateComparison(compareAttribute); } #endregion Constructors #region Methods /// <summary> /// Compares two objects and returns a value indicating whether one is less than, equal to, or greater than the other. /// </summary> /// <param name="x">The first object to compare.</param> /// <param name="y">The second object to compare.</param> /// <returns> /// Value Condition Less than zerox is less than y.Zerox equals y.Greater than zerox is greater than y. /// </returns> public int Compare(ICodeElement x, ICodeElement y) { int compareValue = 0; if (_sortDirection != SortDirection.None) { compareValue = _comparison(x, y); // // Inner sort? // if (compareValue == 0) { if (_innerComparer != null) { compareValue = _innerComparer.Compare(x, y); } } else if (_sortDirection == SortDirection.Descending) { compareValue = -compareValue; } } return compareValue; } /// <summary> /// Creates a comparison delegate based on the configuration. /// </summary> /// <param name="compareAttribute">The compare attribute.</param> /// <returns> /// Comparision delegate for two code elements. /// </returns> public Comparison<ICodeElement> CreateComparison(ElementAttributeType compareAttribute) { Comparison<ICodeElement> comparison = delegate(ICodeElement x, ICodeElement y) { int compareValue = 0; if (x == null && y != null) { compareValue = -1; } else if (x != null && y == null) { compareValue = 1; } else { switch (compareAttribute) { case ElementAttributeType.Access: AttributedElement attributedX = x as AttributedElement; AttributedElement attributedY = y as AttributedElement; if (attributedX != null && attributedY != null) { compareValue = attributedX.Access.CompareTo(attributedY.Access); } break; case ElementAttributeType.Modifier: MemberElement memberX = x as MemberElement; MemberElement memberY = y as MemberElement; if (memberX != null && memberY != null) { compareValue = memberX.MemberModifiers.CompareTo(memberY.MemberModifiers); } break; case ElementAttributeType.ElementType: compareValue = x.ElementType.CompareTo(y.ElementType); break; default: if (compareAttribute == ElementAttributeType.Type && x is TypeElement && y is TypeElement) { compareValue = ((TypeElement)x).Type.CompareTo(((TypeElement)y).Type); } else if (compareAttribute == ElementAttributeType.Type && x is UsingElement && y is UsingElement) { compareValue = ((UsingElement)x).Type.CompareTo(((UsingElement)y).Type); } else { string attributeX = ElementUtilities.GetAttribute(compareAttribute, x); string attributeY = ElementUtilities.GetAttribute(compareAttribute, y); compareValue = StringComparer.OrdinalIgnoreCase.Compare(attributeX, attributeY); } break; } } return compareValue; }; return comparison; } #endregion Methods } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using openvpn.api.Areas.HelpPage.ModelDescriptions; using openvpn.api.Areas.HelpPage.Models; namespace openvpn.api.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Xunit; namespace ComparedQueryable.Test.NativeQueryableTests { public class QueryFromExpressionTests { private class SimplePair : IEnumerable<int> { public int First { get; set; } public int Second { get; set; } public IEnumerator<int> GetEnumerator() { yield return First; yield return Second; } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } private static IQueryProvider _prov = Enumerable.Empty<int>().AsNaturalQueryable().Provider; [Fact] public void ExpressionToQueryFromProvider() { Expression exp = Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()); IQueryable<int> q = _prov.CreateQuery<int>(exp); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void ExpressionToQueryByConstructor() { Expression exp = Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()); IQueryable<int> q = new EnumerableQuery<int>(exp); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void ConditionalEqualModuloConstantConstant() { Expression cond = Expression.Condition( Expression.Equal( Expression.Modulo(Expression.Constant(1), Expression.Constant(2)), Expression.Constant(0) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(3, 2), q); } private static IQueryable<char> AsciiControlCharacters { get { return Enumerable.Range(0, 128).AsNaturalQueryable() .Select(i => (char)i) .Where(c => char.IsControl(c)); } } [Fact] public void PropertyAccess() { Expression access = Expression.Property(null, typeof(QueryFromExpressionTests), "AsciiControlCharacters"); IQueryable<char> q = _prov.CreateQuery<char>(access); Assert.Equal(Enumerable.Range(0, 128).Select(i => (char)i).Where(c => char.IsControl(c)), q); } [Fact] public void ConditionalNotNotEqualAddPlusConstantNegateConstant() { Expression cond = Expression.Condition( Expression.Not( Expression.NotEqual( Expression.Add(Expression.UnaryPlus(Expression.Constant(1)), Expression.Negate(Expression.Constant(2))), Expression.Constant(-1) ) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void ConditionalNotNotEqualAddCheckedPlusConstantNegateCheckedConstant() { Expression cond = Expression.Condition( Expression.Not( Expression.NotEqual( Expression.AddChecked(Expression.UnaryPlus(Expression.Constant(1)), Expression.NegateChecked(Expression.Constant(2))), Expression.Constant(-1) ) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void ConditionalLogicalOperators() { Expression logic = Expression.OrElse( Expression.AndAlso( Expression.LessThanOrEqual(Expression.Constant(3), Expression.Constant(4)), Expression.LessThan(Expression.Constant(2), Expression.Constant(1)) ), Expression.Or( Expression.And( Expression.GreaterThan(Expression.Constant(2), Expression.Constant(1)), Expression.GreaterThanOrEqual( Expression.Constant(8), Expression.ExclusiveOr(Expression.Constant(3), Expression.Constant(5)) ) ), Expression.Constant(true) ) ); Expression cond = Expression.Condition( logic, Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void SubtractionAndCalls() { Expression rangeCall = Expression.Call( typeof(Enumerable), "Range", new Type[0], Expression.Subtract(Expression.Constant(6), Expression.Constant(2)), Expression.SubtractChecked(Expression.Constant(12), Expression.Constant(3)) ); Expression call = Expression.Call( typeof(Queryable), "AsQueryable", new[] { typeof(int) }, rangeCall ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Equal(Enumerable.Range(4, 9), q); } [Fact] public void MultiplicationAndCalls() { Expression rangeCall = Expression.Call( typeof(Enumerable), "Range", new Type[0], Expression.Multiply(Expression.Constant(4), Expression.Constant(5)), Expression.MultiplyChecked(Expression.Constant(3), Expression.Constant(2)) ); Expression call = Expression.Call( typeof(Queryable), "AsQueryable", new[] { typeof(int) }, rangeCall ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Equal(Enumerable.Range(20, 6), q); } [Fact] public void ConvertsNewArrayAndArrayLength() { Expression cond = Expression.Condition( Expression.Equal( Expression.AddChecked( Expression.Convert( Expression.ArrayLength(Expression.NewArrayInit(typeof(int), Enumerable.Range(0, 3).Select(i => Expression.Constant(i)))), typeof(long)), Expression.ConvertChecked( Expression.ArrayLength(Expression.NewArrayBounds(typeof(bool), Expression.Constant(2))), typeof(long) ) ), Expression.Constant(5L) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void TypeAs() { Expression cond = Expression.Condition( Expression.Equal( Expression.Constant(null), Expression.TypeAs(Expression.Constant("", typeof(object)), typeof(string)) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(3, 2), q); cond = Expression.Condition( Expression.Equal( Expression.Constant(null), Expression.TypeAs(Expression.Constant("", typeof(object)), typeof(Uri)) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); } [Fact] public void TypeIs() { Expression cond = Expression.Condition( Expression.TypeIs(Expression.Constant("", typeof(object)), typeof(string)), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(0, 2), q); cond = Expression.Condition( Expression.TypeIs(Expression.New(typeof(object)), typeof(string)), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); q = _prov.CreateQuery<int>(cond); Assert.Equal(Enumerable.Range(3, 2), q); } [Fact] public void MemberInit() { Expression init = Expression.MemberInit( Expression.New(typeof(SimplePair)), Expression.Bind(typeof(SimplePair).GetMember("First")[0], Expression.Constant(8)), Expression.Bind(typeof(SimplePair).GetMember("Second")[0], Expression.Constant(13)) ); Expression call = Expression.Call( typeof(Queryable), "AsQueryable", new[] { typeof(int) }, init ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Equal(new[] { 8, 13 }, q); } [Fact] public void InvokeAndMemberAccess() { Expression<Func<int, IQueryable<char>>> lambda = start => "acbdefghijklmnop".AsNaturalQueryable().Skip(start); Expression invoke = Expression.Invoke(lambda, Expression.Constant(2)); IQueryable<char> q = _prov.CreateQuery<char>(invoke); Assert.Equal("bdefghijklmnop".ToCharArray(), q.ToArray()); } [Fact] public void QueryWrappedAsConstant() { Expression cond = Expression.Condition( Expression.Equal( Expression.Modulo(Expression.Constant(1), Expression.Constant(2)), Expression.Constant(0) ), Expression.Constant(Enumerable.Range(0, 2).AsNaturalQueryable()), Expression.Constant(Enumerable.Range(3, 2).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(Expression.Constant(_prov.CreateQuery<int>(cond))); Assert.Equal(Enumerable.Range(3, 2), q); } private sealed class BogusExpression : Expression { public override ExpressionType NodeType { get { return (ExpressionType)(-1); } } public override Type Type { get { return typeof(IQueryable<bool>); } } } [Fact] public void UnknownExpressionType() { IQueryable<bool> q = _prov.CreateQuery<bool>(new BogusExpression()); Assert.Throws<ArgumentException>(() => q.GetEnumerator()); } private IQueryable<string> SimpleMethod() { return new[] { "a", "b", "c" }.AsNaturalQueryable(); } [Fact] public void SimpleMethodCall() { Expression call = Expression.Call(Expression.Constant(this), "SimpleMethod", new Type[0]); IQueryable<string> q = _prov.CreateQuery<string>(call); Assert.Equal(new[] { "a", "b", "c" }, q); } private static IEnumerable<char> IncrementCharacters(char start, char end) { for (; start != end; ++start) { yield return start; } } private IQueryable<char> ParameterMethod(char start, char end) { return IncrementCharacters(start, end).AsNaturalQueryable(); } [Fact] public void ParameterMethodCallViaLambda() { ParameterExpression start = Expression.Parameter(typeof(char)); ParameterExpression end = Expression.Parameter(typeof(char)); Expression call = Expression.Call(Expression.Constant(this), "ParameterMethod", new Type[0], start, end); Expression lambda = Expression.Lambda<Func<char, char, IQueryable<char>>>(call, start, end); Expression invoke = Expression.Invoke(lambda, Expression.Constant('b'), Expression.Constant('g')); Assert.Equal("bcdef".ToCharArray(), _prov.CreateQuery<char>(invoke)); } private static class TestLinqExtensions { public static IEnumerable<int> RunningTotals(IEnumerable<int> source) { if (source == null) throw new ArgumentNullException(nameof(source)); return RunningTotalsIterator(source); } public static IEnumerable<int> RunningTotalsIterator(IEnumerable<int> source) { using (var en = source.GetEnumerator()) { if (en.MoveNext()) { int current = en.Current; yield return current; while (en.MoveNext()) yield return current += en.Current; } } } public static IQueryable<int> RunningTotals(IQueryable<int> source) { // A real class would only overload for IQueryable separately if there // was a reason for doing so, but this suffices to test. return RunningTotals(source.AsEnumerable()).AsNaturalQueryable(); } public static IQueryable<int> RunningTotalsNoMatch(IQueryable<int> source) { return RunningTotals(source); } public static IQueryable<int> RunningTotals(IQueryable<int> source, int initialTally) { return RunningTotals(Enumerable.Repeat(initialTally, 1).AsNaturalQueryable().Concat(source)); } } private class TestLinqInstanceNoMatch { public IQueryable<int> RunningTotals(IQueryable<int> source) { return TestLinqExtensions.RunningTotals(source); } } [Fact] public void EnumerableQueryableAsInternalArgumentToMethod() { Expression call = Expression.Call( typeof(TestLinqExtensions) .GetMethods() .First(mi => mi.Name == "RunningTotals" && mi.GetParameters().Length == 1 && mi.GetParameters()[0].ParameterType == typeof(IQueryable<int>)), Expression.Constant(Enumerable.Range(1, 3).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Equal(new[] { 1, 3, 6 }, q); } [Fact] public void EnumerableQueryableAsInternalArgumentToMethodNoMatch() { Expression call = Expression.Call( typeof(TestLinqExtensions) .GetMethods() .First(mi => mi.Name == "RunningTotalsNoMatch" && mi.GetParameters()[0].ParameterType == typeof(IQueryable<int>)), Expression.Constant(Enumerable.Range(1, 3).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Throws<InvalidOperationException>(() => q.GetEnumerator()); } [Fact] public void EnumerableQueryableAsInternalArgumentToMethodNoArgumentMatch() { Expression call = Expression.Call( typeof(TestLinqExtensions) .GetMethods() .First(mi => mi.Name == "RunningTotals" && mi.GetParameters().Length == 2), Expression.Constant(Enumerable.Range(1, 3).AsNaturalQueryable()), Expression.Constant(3) ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Throws<InvalidOperationException>(() => q.GetEnumerator()); } [Fact] public void EnumerableQueryableAsInternalArgumentToInstanceMethodNoMatch() { Expression call = Expression.Call( Expression.Constant(new TestLinqInstanceNoMatch()), typeof(TestLinqInstanceNoMatch) .GetMethods() .First(mi => mi.Name == "RunningTotals"), Expression.Constant(Enumerable.Range(1, 3).AsNaturalQueryable()) ); IQueryable<int> q = _prov.CreateQuery<int>(call); Assert.Throws<InvalidOperationException>(() => q.GetEnumerator()); } } }
using System; using System.IO; using System.Reflection; using System.Threading; using System.Threading.Tasks; using FluentAssertions; using Microsoft.Extensions.Hosting; using NUnit.Framework; using OpenQA.Selenium; using OpenQA.Selenium.Chrome; namespace JustSending.Test { [TestFixture] public class IntegrationTest : IDisposable { // ToDo: Use puppeteer-sharp private string _seleniumDriverPath; private IHost _host; private Uri _appHostName; public IntegrationTest() { var basePath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); _seleniumDriverPath = Path.Combine(Path.GetFullPath("../../../", basePath!), "Drivers"); } [OneTimeSetUp] public async Task Init() { await EnsureAppRunning().ConfigureAwait(false); } [OneTimeTearDown] public void Dispose() { _host.StopAsync().GetAwaiter().GetResult(); _host.Dispose(); } private Task EnsureAppRunning() { const int port = 5000; Environment.SetEnvironmentVariable("ASPNETCORE_URLS", $"http://*:{port}"); Directory.SetCurrentDirectory(Path.Join( "..", "..", "..", "..", "..", "src", "JustSending")); _appHostName = new Uri($"http://localhost:{port}"); _host = Program.BuildWebHost(new string[] { }) .Build(); _host.RunAsync(); return Task.CompletedTask; } private IWebDriver CreateDriver() { // Require an working installation if chrome in dev machine // which is installed during docker-build return CreateDriverChrome(); } private IWebDriver CreateDriverChrome() { var chromeOpt = new ChromeOptions(); chromeOpt.AddArguments("--headless"); chromeOpt.AddArguments("--disable-gpu"); chromeOpt.AddArguments("--no-sandbox"); if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Unix) && !_seleniumDriverPath.EndsWith("Linux")) _seleniumDriverPath += "/Linux"; var driver = new ChromeDriver(_seleniumDriverPath, chromeOpt); return InitialiseDriver(driver); } private IWebDriver InitialiseDriver(IWebDriver driver) { driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(5); driver.Navigate().GoToUrl(_appHostName); return driver; } private void Pair(IWebDriver client1, IWebDriver client2) { var token = client1.FindElement(By.Id("token")).Text; client2.FindElement(By.Id("Token")).SendKeys(token); client2.FindElement(By.Id("connect")).Click(); WaitMs(2000); } private void Navigate(IWebDriver client1, IWebDriver client2) { client1.FindElement(By.Id("new-session")).Click(); client2.FindElement(By.Id("connect")).Click(); WaitMs(2000); } private void WaitMs(int milliseconds) { Thread.Sleep(milliseconds); } [Test] public void DeleteSession() { using (var client1 = CreateDriver()) using (var client2 = CreateDriver()) { Navigate(client1, client2); Pair(client1, client2); client2 .FindElement(By.Id("deleteBtn")) .Click(); WaitMs(1000); client2 .FindElement(By.ClassName("confirm")) .Click(); WaitMs(1000); var redirectsToHome = $"{_appHostName}?ref=app"; client2.Url.Should().Be(redirectsToHome); client1.Url.Should().Be(redirectsToHome); } } [Test] public void PairMessageExchange() { using (var client1 = CreateDriver()) using (var client2 = CreateDriver()) { Navigate(client1, client2); Pair(client1, client2); WaitMs(2000); SendAndVerifyMessage(); client1.FindElement(By.ClassName("navbar-brand")).Click(); WaitMs(500); client2.FindElement(By.ClassName("confirm")).Click(); WaitMs(500); client2.FindElement(By.ClassName("navbar-brand")).Click(); #region Local Functions void SendAndVerifyMessage() { var msgToSend = Guid.NewGuid().ToString("N"); client2.FindElement(By.Id("ComposerText")).SendKeys(msgToSend); client2.FindElement(By.ClassName("sendBtn")).Click(); WaitMs(1000); var textOnClient1 = client1.FindElement(By.CssSelector(".msg-c span.data")).Text; textOnClient1.Should().Be(msgToSend); } #endregion } } [Test] public void RedirectsToLiteSession() { using (var client1 = CreateDriver()) using (var client2 = CreateDriver()) { var id = Guid.NewGuid().ToString("N"); var id2 = Guid.NewGuid().ToString("N"); client1.Navigate().GoToUrl($"{_appHostName}app/lite/{id}/{id2}"); client2.FindElement(By.Id("connect")).Click(); Pair(client1, client2); client2.Url.Should().Be(client1.Url); client2.FindElement(By.Id("erase")).Click(); client2.FindElement(By.Id("erase")).Click(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ServiceModel; using System.ServiceModel.Channels; using System.Threading.Tasks; using Infrastructure.Common; using Xunit; public static partial class XmlSerializerFormatTests { private static readonly string s_basicEndpointAddress = Endpoints.HttpBaseAddress_Basic; [WcfFact] [OuterLoop] public static void XmlSerializerFormat_RoundTrips_String() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var response = serviceProxy.EchoXmlSerializerFormat("message"); Assert.Equal("message", response); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_Using_SupportsFault_RoundTrips_String() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var response = serviceProxy.EchoXmlSerializerFormatSupportFaults("message", false); Assert.Equal("message", response); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_Using_SupportsFault_Throws_FaultException() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var errorMessage = "ErrorMessage"; try { var response = serviceProxy.EchoXmlSerializerFormatSupportFaults(errorMessage, true); } catch (FaultException e) { Assert.Equal(errorMessage, e.Message); return; } // we shouldn't reach here. Assert.True(false); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_RoundTrips_Using_Rpc() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var response = serviceProxy.EchoXmlSerializerFormatUsingRpc("message"); Assert.Equal("message", response); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_RoundTrips_String_AsyncTask() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); Task<string> response = serviceProxy.EchoXmlSerializerFormatAsync("message"); response.Wait(); Assert.True(response != null); Assert.Equal("message", response.Result); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_RoundTrips_CompositeType() { BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var input = new XmlCompositeType(); input.StringValue = "message"; input.BoolValue = false; var response = serviceProxy.GetDataUsingXmlSerializer(input); Assert.True(response != null); Assert.Equal("message", response.StringValue); Assert.True(!input.BoolValue); } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_MessageContract_LoginService() { // *** SETUP *** \\ BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<ILoginService> factory = new ChannelFactory<ILoginService>(binding, endpointAddress); ILoginService serviceProxy = factory.CreateChannel(); var request = new LoginRequest(); request.clientId = "1"; request.user = "2"; request.pwd = "3"; try { // *** EXECUTE *** \\ var response = serviceProxy.Login(request); // *** VALIDATE *** \\ Assert.True(response != null); Assert.Equal("123", response.@return); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [OuterLoop] // The test is for the case where a paramerter type contains a field // never used.The test is to make sure the reflection info of the type // of the unused field would be kept by Net Native toolchain. public static void XmlSerializerFormat_ComplexType_With_FieldType_Never_Used() { // *** SETUP *** \\ BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<IWcfServiceXmlGenerated> factory = new ChannelFactory<IWcfServiceXmlGenerated>(binding, endpointAddress); IWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); var complex = new XmlVeryComplexType(); complex.Id = 1; try { // *** EXECUTE *** \\ var response = serviceProxy.EchoXmlVeryComplexType(complex); // *** VALIDATE *** \\ Assert.True(response != null); Assert.True(response.NonInstantiatedField == null); Assert.Equal(complex.Id, response.Id); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); factory.Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } [WcfFact] [OuterLoop] public static void XmlSerializerFormat_SameNamespace_SameOperation() { // This test covers the scenariow where two service contracts share // the same namespace and have the same method. // *** SETUP *** \\ BasicHttpBinding binding = new BasicHttpBinding(); EndpointAddress endpointAddress = new EndpointAddress(s_basicEndpointAddress); ChannelFactory<ISameNamespaceWithIWcfServiceXmlGenerated> factory = new ChannelFactory<ISameNamespaceWithIWcfServiceXmlGenerated>(binding, endpointAddress); ISameNamespaceWithIWcfServiceXmlGenerated serviceProxy = factory.CreateChannel(); try { // *** EXECUTE *** \\ string response = serviceProxy.EchoXmlSerializerFormat("message"); // *** VALIDATE *** \\ Assert.Equal("message", response); // *** CLEANUP *** \\ ((ICommunicationObject)serviceProxy).Close(); } finally { // *** ENSURE CLEANUP *** \\ ScenarioTestHelpers.CloseCommunicationObjects((ICommunicationObject)serviceProxy, factory); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Reactive.Concurrency; using System.Threading; using Avalonia.Animation; using Avalonia.Controls; using Avalonia.Controls.Templates; using Avalonia.Input; using Avalonia.Input.Platform; using Avalonia.Input.Raw; using Avalonia.Platform; using Avalonia.Rendering; using Avalonia.Styling; using Avalonia.Threading; namespace Avalonia { /// <summary> /// Encapsulates a Avalonia application. /// </summary> /// <remarks> /// The <see cref="Application"/> class encapsulates Avalonia application-specific /// functionality, including: /// - A global set of <see cref="DataTemplates"/>. /// - A global set of <see cref="Styles"/>. /// - A <see cref="FocusManager"/>. /// - An <see cref="InputManager"/>. /// - Registers services needed by the rest of Avalonia in the <see cref="RegisterServices"/> /// method. /// - Tracks the lifetime of the application. /// </remarks> public class Application : IApplicationLifecycle, IGlobalDataTemplates, IGlobalStyles, IStyleRoot, IResourceNode { /// <summary> /// The application-global data templates. /// </summary> private DataTemplates _dataTemplates; private readonly Lazy<IClipboard> _clipboard = new Lazy<IClipboard>(() => (IClipboard)AvaloniaLocator.Current.GetService(typeof(IClipboard))); private readonly Styler _styler = new Styler(); private Styles _styles; private IResourceDictionary _resources; private CancellationTokenSource _mainLoopCancellationTokenSource; /// <summary> /// Initializes a new instance of the <see cref="Application"/> class. /// </summary> public Application() { Windows = new WindowCollection(this); OnExit += OnExiting; } /// <inheritdoc/> public event EventHandler<ResourcesChangedEventArgs> ResourcesChanged; /// <summary> /// Gets the current instance of the <see cref="Application"/> class. /// </summary> /// <value> /// The current instance of the <see cref="Application"/> class. /// </value> public static Application Current { get { return AvaloniaLocator.Current.GetService<Application>(); } } /// <summary> /// Gets or sets the application's global data templates. /// </summary> /// <value> /// The application's global data templates. /// </value> public DataTemplates DataTemplates => _dataTemplates ?? (_dataTemplates = new DataTemplates()); /// <summary> /// Gets the application's focus manager. /// </summary> /// <value> /// The application's focus manager. /// </value> public IFocusManager FocusManager { get; private set; } /// <summary> /// Gets the application's input manager. /// </summary> /// <value> /// The application's input manager. /// </value> public InputManager InputManager { get; private set; } /// <summary> /// Gets the application clipboard. /// </summary> public IClipboard Clipboard => _clipboard.Value; /// <summary> /// Gets the application's global resource dictionary. /// </summary> public IResourceDictionary Resources { get => _resources ?? (Resources = new ResourceDictionary()); set { Contract.Requires<ArgumentNullException>(value != null); var hadResources = false; if (_resources != null) { hadResources = _resources.Count > 0; _resources.ResourcesChanged -= ThisResourcesChanged; } _resources = value; _resources.ResourcesChanged += ThisResourcesChanged; if (hadResources || _resources.Count > 0) { ResourcesChanged?.Invoke(this, new ResourcesChangedEventArgs()); } } } /// <summary> /// Gets the application's global styles. /// </summary> /// <value> /// The application's global styles. /// </value> /// <remarks> /// Global styles apply to all windows in the application. /// </remarks> public Styles Styles => _styles ?? (_styles = new Styles()); /// <inheritdoc/> bool IDataTemplateHost.IsDataTemplatesInitialized => _dataTemplates != null; /// <summary> /// Gets the styling parent of the application, which is null. /// </summary> IStyleHost IStyleHost.StylingParent => null; /// <inheritdoc/> bool IStyleHost.IsStylesInitialized => _styles != null; /// <inheritdoc/> bool IResourceProvider.HasResources => _resources?.Count > 0; /// <inheritdoc/> IResourceNode IResourceNode.ResourceParent => null; /// <summary> /// Gets or sets the <see cref="ExitMode"/>. This property indicates whether the application exits explicitly or implicitly. /// If <see cref="ExitMode"/> is set to OnExplicitExit the application is only closes if Exit is called. /// The default is OnLastWindowClose /// </summary> /// <value> /// The shutdown mode. /// </value> public ExitMode ExitMode { get; set; } /// <summary> /// Gets or sets the main window of the application. /// </summary> /// <value> /// The main window. /// </value> public Window MainWindow { get; set; } /// <summary> /// Gets the open windows of the application. /// </summary> /// <value> /// The windows. /// </value> public WindowCollection Windows { get; } /// <summary> /// Gets or sets a value indicating whether this instance is existing. /// </summary> /// <value> /// <c>true</c> if this instance is existing; otherwise, <c>false</c>. /// </value> internal bool IsExiting { get; set; } /// <summary> /// Initializes the application by loading XAML etc. /// </summary> public virtual void Initialize() { } /// <summary> /// Runs the application's main loop until the <see cref="ICloseable"/> is closed. /// </summary> /// <param name="closable">The closable to track</param> public void Run(ICloseable closable) { if (_mainLoopCancellationTokenSource != null) { throw new Exception("Run should only called once"); } closable.Closed += (s, e) => Exit(); _mainLoopCancellationTokenSource = new CancellationTokenSource(); Dispatcher.UIThread.MainLoop(_mainLoopCancellationTokenSource.Token); // Make sure we call OnExit in case an error happened and Exit() wasn't called explicitly if (!IsExiting) { OnExit?.Invoke(this, EventArgs.Empty); } } /// <summary> /// Runs the application's main loop until some condition occurs that is specified by ExitMode. /// </summary> /// <param name="mainWindow">The main window</param> public void Run(Window mainWindow) { if (_mainLoopCancellationTokenSource != null) { throw new Exception("Run should only called once"); } _mainLoopCancellationTokenSource = new CancellationTokenSource(); if (MainWindow == null) { if (mainWindow == null) { throw new ArgumentNullException(nameof(mainWindow)); } if (!mainWindow.IsVisible) { mainWindow.Show(); } MainWindow = mainWindow; } Dispatcher.UIThread.MainLoop(_mainLoopCancellationTokenSource.Token); // Make sure we call OnExit in case an error happened and Exit() wasn't called explicitly if (!IsExiting) { OnExit?.Invoke(this, EventArgs.Empty); } } /// <summary> /// Runs the application's main loop until the <see cref="CancellationToken"/> is canceled. /// </summary> /// <param name="token">The token to track</param> public void Run(CancellationToken token) { Dispatcher.UIThread.MainLoop(token); // Make sure we call OnExit in case an error happened and Exit() wasn't called explicitly if (!IsExiting) { OnExit?.Invoke(this, EventArgs.Empty); } } /// <summary> /// Exits the application /// </summary> public void Exit() { IsExiting = true; Windows.Clear(); OnExit?.Invoke(this, EventArgs.Empty); _mainLoopCancellationTokenSource?.Cancel(); } /// <inheritdoc/> bool IResourceProvider.TryGetResource(string key, out object value) { value = null; return (_resources?.TryGetResource(key, out value) ?? false) || Styles.TryGetResource(key, out value); } /// <summary> /// Sent when the application is exiting. /// </summary> public event EventHandler OnExit; /// <summary> /// Called when the application is exiting. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected virtual void OnExiting(object sender, EventArgs e) { } /// <summary> /// Register's the services needed by Avalonia. /// </summary> public virtual void RegisterServices() { AvaloniaSynchronizationContext.InstallIfNeeded(); FocusManager = new FocusManager(); InputManager = new InputManager(); AvaloniaLocator.CurrentMutable .Bind<IAccessKeyHandler>().ToTransient<AccessKeyHandler>() .Bind<IGlobalDataTemplates>().ToConstant(this) .Bind<IGlobalStyles>().ToConstant(this) .Bind<IFocusManager>().ToConstant(FocusManager) .Bind<IInputManager>().ToConstant(InputManager) .Bind<IKeyboardNavigationHandler>().ToTransient<KeyboardNavigationHandler>() .Bind<IStyler>().ToConstant(_styler) .Bind<IApplicationLifecycle>().ToConstant(this) .Bind<IScheduler>().ToConstant(AvaloniaScheduler.Instance) .Bind<IDragDropDevice>().ToConstant(DragDropDevice.Instance) .Bind<IPlatformDragSource>().ToTransient<InProcessDragSource>(); var clock = new RenderLoopClock(); AvaloniaLocator.CurrentMutable .Bind<IGlobalClock>().ToConstant(clock) .GetService<IRenderLoop>()?.Add(clock); } private void ThisResourcesChanged(object sender, ResourcesChangedEventArgs e) { ResourcesChanged?.Invoke(this, e); } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Buffers; using System.Diagnostics; using System.IO.Pipelines; using System.Net.Sockets; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using HtcSharp.HttpModule.Connections.Abstractions.Exceptions; using HtcSharp.HttpModule.Shared; using Microsoft.Extensions.Logging; namespace HtcSharp.HttpModule.Transport.Sockets.Internal { // SourceTools-Start // Remote-File C:\ASP\src\Servers\Kestrel\Transport.Sockets\src\Internal\SocketConnection.cs // Start-At-Remote-Line 17 // SourceTools-End internal sealed class SocketConnection : TransportConnection { private static readonly int MinAllocBufferSize = SlabMemoryPool.BlockSize / 2; private static readonly bool IsWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); private static readonly bool IsMacOS = RuntimeInformation.IsOSPlatform(OSPlatform.OSX); private readonly Socket _socket; private readonly ISocketsTrace _trace; private readonly SocketReceiver _receiver; private readonly SocketSender _sender; private readonly CancellationTokenSource _connectionClosedTokenSource = new CancellationTokenSource(); private readonly object _shutdownLock = new object(); private volatile bool _socketDisposed; private volatile Exception _shutdownReason; private Task _processingTask; private readonly TaskCompletionSource<object> _waitForConnectionClosedTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously); private bool _connectionClosed; internal SocketConnection(Socket socket, MemoryPool<byte> memoryPool, PipeScheduler scheduler, ISocketsTrace trace, long? maxReadBufferSize = null, long? maxWriteBufferSize = null) { Debug.Assert(socket != null); Debug.Assert(memoryPool != null); Debug.Assert(trace != null); _socket = socket; MemoryPool = memoryPool; _trace = trace; LocalEndPoint = _socket.LocalEndPoint; RemoteEndPoint = _socket.RemoteEndPoint; ConnectionClosed = _connectionClosedTokenSource.Token; // On *nix platforms, Sockets already dispatches to the ThreadPool. // Yes, the IOQueues are still used for the PipeSchedulers. This is intentional. // https://github.com/aspnet/KestrelHttpServer/issues/2573 var awaiterScheduler = IsWindows ? scheduler : PipeScheduler.Inline; _receiver = new SocketReceiver(_socket, awaiterScheduler); _sender = new SocketSender(_socket, awaiterScheduler); maxReadBufferSize ??= 0; maxWriteBufferSize ??= 0; var inputOptions = new PipeOptions(MemoryPool, PipeScheduler.ThreadPool, scheduler, maxReadBufferSize.Value, maxReadBufferSize.Value / 2, useSynchronizationContext: false); var outputOptions = new PipeOptions(MemoryPool, scheduler, PipeScheduler.ThreadPool, maxWriteBufferSize.Value, maxWriteBufferSize.Value / 2, useSynchronizationContext: false); var pair = DuplexPipe.CreateConnectionPair(inputOptions, outputOptions); // Set the transport and connection id Transport = pair.Transport; Application = pair.Application; } public PipeWriter Input => Application.Output; public PipeReader Output => Application.Input; public override MemoryPool<byte> MemoryPool { get; } public void Start() { _processingTask = StartAsync(); } private async Task StartAsync() { try { // Spawn send and receive logic var receiveTask = DoReceive(); var sendTask = DoSend(); // Now wait for both to complete await receiveTask; await sendTask; _receiver.Dispose(); _sender.Dispose(); } catch (Exception ex) { _trace.LogError(0, ex, $"Unexpected exception in {nameof(SocketConnection)}.{nameof(StartAsync)}."); } } public override void Abort(ConnectionAbortedException abortReason) { // Try to gracefully close the socket to match libuv behavior. Shutdown(abortReason); // Cancel ProcessSends loop after calling shutdown to ensure the correct _shutdownReason gets set. Output.CancelPendingRead(); } // Only called after connection middleware is complete which means the ConnectionClosed token has fired. public override async ValueTask DisposeAsync() { Transport.Input.Complete(); Transport.Output.Complete(); if (_processingTask != null) { await _processingTask; } _connectionClosedTokenSource.Dispose(); } private async Task DoReceive() { Exception error = null; try { await ProcessReceives(); } catch (SocketException ex) when (IsConnectionResetError(ex.SocketErrorCode)) { // This could be ignored if _shutdownReason is already set. error = new ConnectionResetException(ex.Message, ex); // There's still a small chance that both DoReceive() and DoSend() can log the same connection reset. // Both logs will have the same ConnectionId. I don't think it's worthwhile to lock just to avoid this. if (!_socketDisposed) { _trace.ConnectionReset(ConnectionId); } } catch (Exception ex) when ((ex is SocketException socketEx && IsConnectionAbortError(socketEx.SocketErrorCode)) || ex is ObjectDisposedException) { // This exception should always be ignored because _shutdownReason should be set. error = ex; if (!_socketDisposed) { // This is unexpected if the socket hasn't been disposed yet. _trace.ConnectionError(ConnectionId, error); } } catch (Exception ex) { // This is unexpected. error = ex; _trace.ConnectionError(ConnectionId, error); } finally { // If Shutdown() has already bee called, assume that was the reason ProcessReceives() exited. Input.Complete(_shutdownReason ?? error); FireConnectionClosed(); await _waitForConnectionClosedTcs.Task; } } private async Task ProcessReceives() { // Resolve `input` PipeWriter via the IDuplexPipe interface prior to loop start for performance. var input = Input; while (true) { // Wait for data before allocating a buffer. await _receiver.WaitForDataAsync(); // Ensure we have some reasonable amount of buffer space var buffer = input.GetMemory(MinAllocBufferSize); var bytesReceived = await _receiver.ReceiveAsync(buffer); if (bytesReceived == 0) { // FIN _trace.ConnectionReadFin(ConnectionId); break; } input.Advance(bytesReceived); var flushTask = input.FlushAsync(); var paused = !flushTask.IsCompleted; if (paused) { _trace.ConnectionPause(ConnectionId); } var result = await flushTask; if (paused) { _trace.ConnectionResume(ConnectionId); } if (result.IsCompleted || result.IsCanceled) { // Pipe consumer is shut down, do we stop writing break; } } } private async Task DoSend() { Exception shutdownReason = null; Exception unexpectedError = null; try { await ProcessSends(); } catch (SocketException ex) when (IsConnectionResetError(ex.SocketErrorCode)) { shutdownReason = new ConnectionResetException(ex.Message, ex); _trace.ConnectionReset(ConnectionId); } catch (Exception ex) when ((ex is SocketException socketEx && IsConnectionAbortError(socketEx.SocketErrorCode)) || ex is ObjectDisposedException) { // This should always be ignored since Shutdown() must have already been called by Abort(). shutdownReason = ex; } catch (Exception ex) { shutdownReason = ex; unexpectedError = ex; _trace.ConnectionError(ConnectionId, unexpectedError); } finally { Shutdown(shutdownReason); // Complete the output after disposing the socket Output.Complete(unexpectedError); // Cancel any pending flushes so that the input loop is un-paused Input.CancelPendingFlush(); } } private async Task ProcessSends() { // Resolve `output` PipeReader via the IDuplexPipe interface prior to loop start for performance. var output = Output; while (true) { var result = await output.ReadAsync(); if (result.IsCanceled) { break; } var buffer = result.Buffer; var end = buffer.End; var isCompleted = result.IsCompleted; if (!buffer.IsEmpty) { await _sender.SendAsync(buffer); } output.AdvanceTo(end); if (isCompleted) { break; } } } private void FireConnectionClosed() { // Guard against scheduling this multiple times if (_connectionClosed) { return; } _connectionClosed = true; ThreadPool.UnsafeQueueUserWorkItem(state => { state.CancelConnectionClosedToken(); state._waitForConnectionClosedTcs.TrySetResult(null); }, this, preferLocal: false); } private void Shutdown(Exception shutdownReason) { lock (_shutdownLock) { if (_socketDisposed) { return; } // Make sure to close the connection only after the _aborted flag is set. // Without this, the RequestsCanBeAbortedMidRead test will sometimes fail when // a BadHttpRequestException is thrown instead of a TaskCanceledException. _socketDisposed = true; // shutdownReason should only be null if the output was completed gracefully, so no one should ever // ever observe the nondescript ConnectionAbortedException except for connection middleware attempting // to half close the connection which is currently unsupported. _shutdownReason = shutdownReason ?? new ConnectionAbortedException("The Socket transport's send loop completed gracefully."); _trace.ConnectionWriteFin(ConnectionId, _shutdownReason.Message); try { // Try to gracefully close the socket even for aborts to match libuv behavior. _socket.Shutdown(SocketShutdown.Both); } catch { // Ignore any errors from Socket.Shutdown() since we're tearing down the connection anyway. } _socket.Dispose(); } } private void CancelConnectionClosedToken() { try { _connectionClosedTokenSource.Cancel(); } catch (Exception ex) { _trace.LogError(0, ex, $"Unexpected exception in {nameof(SocketConnection)}.{nameof(CancelConnectionClosedToken)}."); } } private static bool IsConnectionResetError(SocketError errorCode) { // A connection reset can be reported as SocketError.ConnectionAborted on Windows. // ProtocolType can be removed once https://github.com/dotnet/corefx/issues/31927 is fixed. return errorCode == SocketError.ConnectionReset || errorCode == SocketError.Shutdown || (errorCode == SocketError.ConnectionAborted && IsWindows) || (errorCode == SocketError.ProtocolType && IsMacOS); } private static bool IsConnectionAbortError(SocketError errorCode) { // Calling Dispose after ReceiveAsync can cause an "InvalidArgument" error on *nix. return errorCode == SocketError.OperationAborted || errorCode == SocketError.Interrupted || (errorCode == SocketError.InvalidArgument && !IsWindows); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. ////////////////////////////////////////////////////////// // L-1-2-1.cs - Beta1 Layout Test - RDawson // // Tests layout of classes using 2-deep inheritance in // the same assembly and module // // See ReadMe.txt in the same project as this source for // further details about these tests. // using System; class Test{ public static int Main(){ int mi_RetCode; C c = new C(); mi_RetCode = c.Test(); if(mi_RetCode == 100) Console.WriteLine("Pass"); else Console.WriteLine("FAIL"); return mi_RetCode; } } class A{ ////////////////////////////// // Instance Fields public int FldPubInst; private int FldPrivInst; protected int FldFamInst; //Translates to "family" internal int FldAsmInst; //Translates to "assembly" protected internal int FldFoaInst; //Translates to "famorassem" ////////////////////////////// // Static Fields public static int FldPubStat; private static int FldPrivStat; protected static int FldFamStat; //family internal static int FldAsmStat; //assembly protected internal int FldFoaStat; //famorassem ////////////////////////////// // Instance Methods public int MethPubInst(){ Console.WriteLine("A::MethPubInst()"); return 100; } private int MethPrivInst(){ Console.WriteLine("A::MethPrivInst()"); return 100; } protected int MethFamInst(){ Console.WriteLine("A::MethFamInst()"); return 100; } internal int MethAsmInst(){ Console.WriteLine("A::MethAsmInst()"); return 100; } protected internal int MethFoaInst(){ Console.WriteLine("A::MethFoaInst()"); return 100; } ////////////////////////////// // Static Methods public static int MethPubStat(){ Console.WriteLine("A::MethPubStat()"); return 100; } private static int MethPrivStat(){ Console.WriteLine("A::MethPrivStat()"); return 100; } protected static int MethFamStat(){ Console.WriteLine("A::MethFamStat()"); return 100; } internal static int MethAsmStat(){ Console.WriteLine("A::MethAsmStat()"); return 100; } protected internal static int MethFoaStat(){ Console.WriteLine("A::MethFoaStat()"); return 100; } ////////////////////////////// // Virtual Instance Methods public virtual int MethPubVirt(){ Console.WriteLine("A::MethPubVirt()"); return 100; } //@csharp - Note that C# won't compile an illegal private virtual function //So there is no negative testing MethPrivVirt() here. protected virtual int MethFamVirt(){ Console.WriteLine("A::MethFamVirt()"); return 100; } internal virtual int MethAsmVirt(){ Console.WriteLine("A::MethAsmVirt()"); return 100; } protected internal virtual int MethFoaVirt(){ Console.WriteLine("A::MethFoaVirt()"); return 100; } } class B : A{ //@todo - Class B is currently a simple placeholder to force N-Deep inheritance... //However, a non-trivial class B that might hide some members of A as a visiblity //test is a test that we need to think about and develop. That is not currently the //focus of this test (maybe in the near future), but for now we're happy forcing //a N-Deep inheritance. Such instances have, in the past, proven worthy of //investigation. public int placeholder; } class C : B{ public int Test(){ int mi_RetCode = 100; ///////////////////////////////// // Test instance field access FldPubInst = 100; if(FldPubInst != 100) mi_RetCode = 0; //@csharp - Note that C# will not compile an illegal access of FldPrivInst //So there is no negative test here, it should be covered elsewhere and //should throw a FielAccessException within the runtime. (IL sources is //the most logical, only?, choice) FldFamInst = 100; if(FldFamInst != 100) mi_RetCode = 0; FldAsmInst = 100; if(FldAsmInst != 100) mi_RetCode = 0; FldFoaInst = 100; if(FldFoaInst != 100) mi_RetCode = 0; ///////////////////////////////// // Test static field access FldPubStat = 100; if(FldPubStat != 100) mi_RetCode = 0; //@csharp - Again, note C# won't do private field access FldFamStat = 100; if(FldFamStat != 100) mi_RetCode = 0; FldAsmStat = 100; if(FldAsmStat != 100) mi_RetCode = 0; FldFoaStat = 100; if(FldFoaStat != 100) mi_RetCode = 0; ///////////////////////////////// // Test instance method access if(MethPubInst() != 100) mi_RetCode = 0; //@csharp - C# won't do private method access if(MethFamInst() != 100) mi_RetCode = 0; if(MethAsmInst() != 100) mi_RetCode = 0; if(MethFoaInst() != 100) mi_RetCode = 0; ///////////////////////////////// // Test static method access if(MethPubStat() != 100) mi_RetCode = 0; //@csharp - C# won't do private method access if(MethFamStat() != 100) mi_RetCode = 0; if(MethAsmStat() != 100) mi_RetCode = 0; if(MethFoaStat() != 100) mi_RetCode = 0; ///////////////////////////////// // Test virtual method access if(MethPubVirt() != 100) mi_RetCode = 0; //@csharp - C# won't do private method access if(MethFamVirt() != 100) mi_RetCode = 0; if(MethAsmVirt() != 100) mi_RetCode = 0; if(MethFoaVirt() != 100) mi_RetCode = 0; return mi_RetCode; } }
//------------------------------------------------------------------------------ // <copyright file="XmlBinaryWriter.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> //------------------------------------------------------------------------------ using System; using System.Collections; using System.IO; using System.Text; using System.Diagnostics; using System.Globalization; namespace System.Xml { // This is mostly just a copy of code in SqlTypes.SqlDecimal internal struct BinXmlSqlDecimal { internal byte m_bLen; internal byte m_bPrec; internal byte m_bScale; internal byte m_bSign; internal uint m_data1; internal uint m_data2; internal uint m_data3; internal uint m_data4; public bool IsPositive { get { return (m_bSign == 0); } } private static readonly byte NUMERIC_MAX_PRECISION = 38; // Maximum precision of numeric private static readonly byte MaxPrecision = NUMERIC_MAX_PRECISION; // max SS precision private static readonly byte MaxScale = NUMERIC_MAX_PRECISION; // max SS scale private static readonly int x_cNumeMax = 4; private static readonly long x_lInt32Base = ((long)1) << 32; // 2**32 private static readonly ulong x_ulInt32Base = ((ulong)1) << 32; // 2**32 private static readonly ulong x_ulInt32BaseForMod = x_ulInt32Base - 1; // 2**32 - 1 (0xFFF...FF) internal static readonly ulong x_llMax = Int64.MaxValue; // Max of Int64 //private static readonly uint x_ulBase10 = 10; private static readonly double DUINT_BASE = (double)x_lInt32Base; // 2**32 private static readonly double DUINT_BASE2 = DUINT_BASE * DUINT_BASE; // 2**64 private static readonly double DUINT_BASE3 = DUINT_BASE2 * DUINT_BASE; // 2**96 //private static readonly double DMAX_NUME = 1.0e+38; // Max value of numeric //private static readonly uint DBL_DIG = 17; // Max decimal digits of double //private static readonly byte x_cNumeDivScaleMin = 6; // Minimum result scale of numeric division // Array of multipliers for lAdjust and Ceiling/Floor. private static readonly uint[] x_rgulShiftBase = new uint[9] { 10, 10 * 10, 10 * 10 * 10, 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10, 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10 * 10 }; public BinXmlSqlDecimal (byte[] data, int offset, bool trim) { byte b = data[offset]; switch (b) { case 7: m_bLen = 1; break; case 11: m_bLen = 2; break; case 15: m_bLen = 3; break; case 19: m_bLen = 4; break; default: throw new XmlException(Res.XmlBinary_InvalidSqlDecimal, (string[])null); } m_bPrec = data[offset+1]; m_bScale = data[offset+2]; m_bSign = 0 == data[offset+3] ? (byte)1 : (byte)0; m_data1 = UIntFromByteArray(data, offset+4); m_data2 = (m_bLen > 1) ? UIntFromByteArray(data, offset+8) : 0; m_data3 = (m_bLen > 2) ? UIntFromByteArray(data, offset+12) : 0; m_data4 = (m_bLen > 3) ? UIntFromByteArray(data, offset+16) : 0; if (m_bLen == 4 && m_data4 == 0) m_bLen = 3; if (m_bLen == 3 && m_data3 == 0) m_bLen = 2; if (m_bLen == 2 && m_data2 == 0) m_bLen = 1; AssertValid(); if (trim) { TrimTrailingZeros(); AssertValid(); } } public void Write(Stream strm) { strm.WriteByte((byte)(this.m_bLen * 4 + 3)); strm.WriteByte(this.m_bPrec); strm.WriteByte(this.m_bScale); strm.WriteByte(0 == this.m_bSign ? (byte)1 : (byte)0); WriteUI4(this.m_data1, strm); if (this.m_bLen > 1) { WriteUI4(this.m_data2, strm); if (this.m_bLen > 2) { WriteUI4(this.m_data3, strm); if (this.m_bLen > 3) { WriteUI4(this.m_data4, strm); } } } } private void WriteUI4(uint val, Stream strm) { strm.WriteByte((byte)(val & 0xFF)); strm.WriteByte((byte)((val >> 8) & 0xFF)); strm.WriteByte((byte)((val >> 16) & 0xFF)); strm.WriteByte((byte)((val >> 24) & 0xFF)); } private static uint UIntFromByteArray(byte[] data, int offset) { int val = (data[offset]) << 0; val |= (data[offset+1]) << 8; val |= (data[offset+2]) << 16; val |= (data[offset+3]) << 24; return unchecked((uint)val); } // check whether is zero private bool FZero() { return (m_data1 == 0) && (m_bLen <= 1); } // Store data back from rguiData[] to m_data* private void StoreFromWorkingArray(uint[] rguiData) { Debug.Assert(rguiData.Length == 4); m_data1 = rguiData[0]; m_data2 = rguiData[1]; m_data3 = rguiData[2]; m_data4 = rguiData[3]; } // Find the case where we overflowed 10**38, but not 2**128 private bool FGt10_38(uint[] rglData) { //Debug.Assert(rglData.Length == 4, "rglData.Length == 4", "Wrong array length: " + rglData.Length.ToString(CultureInfo.InvariantCulture)); return rglData[3] >= 0x4b3b4ca8L && ((rglData[3] > 0x4b3b4ca8L) || (rglData[2] > 0x5a86c47aL) || (rglData[2] == 0x5a86c47aL) && (rglData[1] >= 0x098a2240L)); } // Multi-precision one super-digit divide in place. // U = U / D, // R = U % D // Length of U can decrease private static void MpDiv1(uint[] rgulU, // InOut| U ref int ciulU, // InOut| # of digits in U uint iulD, // In | D out uint iulR // Out | R ) { Debug.Assert(rgulU.Length == x_cNumeMax); uint ulCarry = 0; ulong dwlAccum; ulong ulD = (ulong)iulD; int idU = ciulU; Debug.Assert(iulD != 0, "iulD != 0", "Divided by zero!"); Debug.Assert(iulD > 0, "iulD > 0", "Invalid data: less than zero"); Debug.Assert(ciulU > 0, "ciulU > 0", "No data in the array"); while (idU > 0) { idU--; dwlAccum = (((ulong)ulCarry) << 32) + (ulong)(rgulU[idU]); rgulU[idU] = (uint)(dwlAccum / ulD); ulCarry = (uint)(dwlAccum - (ulong)rgulU[idU] * ulD); // (ULONG) (dwlAccum % iulD) } iulR = ulCarry; MpNormalize(rgulU, ref ciulU); } // Normalize multi-precision number - remove leading zeroes private static void MpNormalize(uint[] rgulU, // In | Number ref int ciulU // InOut| # of digits ) { while (ciulU > 1 && rgulU[ciulU - 1] == 0) ciulU--; } // AdjustScale() // // Adjust number of digits to the right of the decimal point. // A positive adjustment increases the scale of the numeric value // while a negative adjustment decreases the scale. When decreasing // the scale for the numeric value, the remainder is checked and // rounded accordingly. // internal void AdjustScale(int digits, bool fRound) { uint ulRem; //Remainder when downshifting uint ulShiftBase; //What to multiply by to effect scale adjust bool fNeedRound = false; //Do we really need to round? byte bNewScale, bNewPrec; int lAdjust = digits; //If downshifting causes truncation of data if (lAdjust + m_bScale < 0) throw new XmlException(Res.SqlTypes_ArithTruncation, (string)null); //If uphifting causes scale overflow if (lAdjust + m_bScale > NUMERIC_MAX_PRECISION) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); bNewScale = (byte)(lAdjust + m_bScale); bNewPrec = (byte)(Math.Min(NUMERIC_MAX_PRECISION, Math.Max(1, lAdjust + m_bPrec))); if (lAdjust > 0) { m_bScale = bNewScale; m_bPrec = bNewPrec; while (lAdjust > 0) { //if lAdjust>=9, downshift by 10^9 each time, otherwise by the full amount if (lAdjust >= 9) { ulShiftBase = x_rgulShiftBase[8]; lAdjust -= 9; } else { ulShiftBase = x_rgulShiftBase[lAdjust - 1]; lAdjust = 0; } MultByULong(ulShiftBase); } } else if (lAdjust < 0) { do { if (lAdjust <= -9) { ulShiftBase = x_rgulShiftBase[8]; lAdjust += 9; } else { ulShiftBase = x_rgulShiftBase[-lAdjust - 1]; lAdjust = 0; } ulRem = DivByULong(ulShiftBase); } while (lAdjust < 0); // Do we really need to round? fNeedRound = (ulRem >= ulShiftBase / 2); m_bScale = bNewScale; m_bPrec = bNewPrec; } AssertValid(); // After adjusting, if the result is 0 and remainder is less than 5, // set the sign to be positive and return. if (fNeedRound && fRound) { // If remainder is 5 or above, increment/decrement by 1. AddULong(1); } else if (FZero()) this.m_bSign = 0; } // AddULong() // // Add ulAdd to this numeric. The result will be returned in *this. // // Parameters: // this - IN Operand1 & OUT Result // ulAdd - IN operand2. // private void AddULong(uint ulAdd) { ulong dwlAccum = (ulong)ulAdd; int iData; // which UI4 in this we are on int iDataMax = (int)m_bLen; // # of UI4s in this uint[] rguiData = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; // Add, starting at the LS UI4 until out of UI4s or no carry iData = 0; do { dwlAccum += (ulong)rguiData[iData]; rguiData[iData] = (uint)dwlAccum; // equivalent to mod x_dwlBaseUI4 dwlAccum >>= 32; // equivalent to dwlAccum /= x_dwlBaseUI4; if (0 == dwlAccum) { StoreFromWorkingArray(rguiData); return; } iData++; } while (iData < iDataMax); // There is carry at the end Debug.Assert(dwlAccum < x_ulInt32Base, "dwlAccum < x_lInt32Base", ""); // Either overflowed if (iData == x_cNumeMax) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); // Or need to extend length by 1 UI4 rguiData[iData] = (uint)dwlAccum; m_bLen++; if (FGt10_38(rguiData)) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); StoreFromWorkingArray(rguiData); } // multiply by a long integer private void MultByULong(uint uiMultiplier) { int iDataMax = m_bLen; // How many UI4s currently in *this ulong dwlAccum = 0; // accumulated sum ulong dwlNextAccum = 0; // accumulation past dwlAccum int iData; // which UI4 in *This we are on. uint[] rguiData = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; for (iData = 0; iData < iDataMax; iData++) { Debug.Assert(dwlAccum < x_ulInt32Base); ulong ulTemp = (ulong)rguiData[iData]; dwlNextAccum = ulTemp * (ulong)uiMultiplier; dwlAccum += dwlNextAccum; if (dwlAccum < dwlNextAccum) // Overflow of int64 add dwlNextAccum = x_ulInt32Base; // how much to add to dwlAccum after div x_dwlBaseUI4 else dwlNextAccum = 0; rguiData[iData] = (uint)dwlAccum; // equivalent to mod x_dwlBaseUI4 dwlAccum = (dwlAccum >> 32) + dwlNextAccum; // equivalent to div x_dwlBaseUI4 } // If any carry, if (dwlAccum != 0) { // Either overflowed Debug.Assert(dwlAccum < x_ulInt32Base, "dwlAccum < x_dwlBaseUI4", "Integer overflow"); if (iDataMax == x_cNumeMax) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); // Or extend length by one uint rguiData[iDataMax] = (uint)dwlAccum; m_bLen++; } if (FGt10_38(rguiData)) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); StoreFromWorkingArray(rguiData); } // DivByULong() // // Divide numeric value by a ULONG. The result will be returned // in the dividend *this. // // Parameters: // this - IN Dividend & OUT Result // ulDivisor - IN Divisor // Returns: - OUT Remainder // internal uint DivByULong(uint iDivisor) { ulong dwlDivisor = (ulong)iDivisor; ulong dwlAccum = 0; //Accumulated sum uint ulQuotientCur = 0; // Value of the current UI4 of the quotient bool fAllZero = true; // All of the quotient (so far) has been 0 int iData; //Which UI4 currently on // Check for zero divisor. if (dwlDivisor == 0) throw new XmlException(Res.SqlTypes_DivideByZero, (string)null); // Copy into array, so that we can iterate through the data uint[] rguiData = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; // Start from the MS UI4 of quotient, divide by divisor, placing result // in quotient and carrying the remainder. //DEVNOTE DWORDLONG sufficient accumulator since: // Accum < Divisor <= 2^32 - 1 at start each loop // initially,and mod end previous loop // Accum*2^32 < 2^64 - 2^32 // multiply both side by 2^32 (x_dwlBaseUI4) // Accum*2^32 + m_rgulData < 2^64 // rglData < 2^32 for (iData = m_bLen; iData > 0; iData--) { Debug.Assert(dwlAccum < dwlDivisor); dwlAccum = (dwlAccum << 32) + (ulong)(rguiData[iData - 1]); // dwlA*x_dwlBaseUI4 + rglData Debug.Assert((dwlAccum / dwlDivisor) < x_ulInt32Base); //Update dividend to the quotient. ulQuotientCur = (uint)(dwlAccum / dwlDivisor); rguiData[iData - 1] = ulQuotientCur; //Remainder to be carried to the next lower significant byte. dwlAccum = dwlAccum % dwlDivisor; // While current part of quotient still 0, reduce length fAllZero = fAllZero && (ulQuotientCur == 0); if (fAllZero) m_bLen--; } StoreFromWorkingArray(rguiData); // If result is 0, preserve sign but set length to 5 if (fAllZero) m_bLen = 1; AssertValid(); // return the remainder Debug.Assert(dwlAccum < x_ulInt32Base); return (uint)dwlAccum; } //Determine the number of uints needed for a numeric given a precision //Precision Length // 0 invalid // 1-9 1 // 10-19 2 // 20-28 3 // 29-38 4 // The array in Shiloh. Listed here for comparison. //private static readonly byte[] rgCLenFromPrec = new byte[] {5,5,5,5,5,5,5,5,5,9,9,9,9,9, // 9,9,9,9,9,13,13,13,13,13,13,13,13,13,17,17,17,17,17,17,17,17,17,17}; private static readonly byte[] rgCLenFromPrec = new byte[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 }; private static byte CLenFromPrec(byte bPrec) { Debug.Assert(bPrec <= MaxPrecision && bPrec > 0, "bPrec <= MaxPrecision && bPrec > 0", "Invalid numeric precision"); return rgCLenFromPrec[bPrec - 1]; } private static char ChFromDigit(uint uiDigit) { Debug.Assert(uiDigit < 10); return (char)(uiDigit + '0'); } public Decimal ToDecimal() { if ((int)m_data4 != 0 || m_bScale > 28) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); return new Decimal((int)m_data1, (int)m_data2, (int)m_data3, !IsPositive, m_bScale); } void TrimTrailingZeros() { uint[] rgulNumeric = new uint[4] { m_data1, m_data2, m_data3, m_data4}; int culLen = m_bLen; uint ulRem; //Remainder of a division by x_ulBase10, i.e.,least significant digit // special-case 0 if (culLen == 1 && rgulNumeric[0] == 0) { m_bScale = 0; return; } while (m_bScale > 0 && (culLen > 1 || rgulNumeric[0] != 0)) { MpDiv1 (rgulNumeric, ref culLen, 10, out ulRem); if ( ulRem == 0 ) { m_data1 = rgulNumeric[0]; m_data2 = rgulNumeric[1]; m_data3 = rgulNumeric[2]; m_data4 = rgulNumeric[3]; m_bScale--; } else { break; } } if (m_bLen == 4 && m_data4 == 0) m_bLen = 3; if (m_bLen == 3 && m_data3 == 0) m_bLen = 2; if (m_bLen == 2 && m_data2 == 0) m_bLen = 1; } public override String ToString() { AssertValid(); // Make local copy of data to avoid modifying input. uint[] rgulNumeric = new uint[4] { m_data1, m_data2, m_data3, m_data4}; int culLen = m_bLen; char[] pszTmp = new char[NUMERIC_MAX_PRECISION + 1]; //Local Character buffer to hold //the decimal digits, from the //lowest significant to highest significant int iDigits = 0;//Number of significant digits uint ulRem; //Remainder of a division by x_ulBase10, i.e.,least significant digit // Build the final numeric string by inserting the sign, reversing // the order and inserting the decimal number at the correct position //Retrieve each digit from the lowest significant digit while (culLen > 1 || rgulNumeric[0] != 0) { MpDiv1 (rgulNumeric, ref culLen, 10, out ulRem); //modulo x_ulBase10 is the lowest significant digit pszTmp[iDigits++] = ChFromDigit(ulRem); } // if scale of the number has not been // reached pad remaining number with zeros. while (iDigits <= m_bScale) { pszTmp[iDigits++] = ChFromDigit(0); } bool fPositive = IsPositive; // Increment the result length if negative (need to add '-') int uiResultLen = fPositive ? iDigits : iDigits + 1; // Increment the result length if scale > 0 (need to add '.') if (m_bScale > 0) uiResultLen++; char[] szResult = new char[uiResultLen]; int iCurChar = 0; if (!fPositive) szResult[iCurChar ++] = '-'; while (iDigits > 0) { if (iDigits-- == m_bScale) szResult[iCurChar ++] = '.'; szResult[iCurChar ++] = pszTmp[iDigits]; } AssertValid(); return new String(szResult); } // Is this RE numeric valid? [System.Diagnostics.Conditional("DEBUG")] private void AssertValid() { // Scale,Prec in range Debug.Assert(m_bScale <= NUMERIC_MAX_PRECISION, "m_bScale <= NUMERIC_MAX_PRECISION", "In AssertValid"); Debug.Assert(m_bScale <= m_bPrec, "m_bScale <= m_bPrec", "In AssertValid"); Debug.Assert(m_bScale >= 0, "m_bScale >= 0", "In AssertValid"); Debug.Assert(m_bPrec > 0, "m_bPrec > 0", "In AssertValid"); Debug.Assert(CLenFromPrec(m_bPrec) >= m_bLen, "CLenFromPrec(m_bPrec) >= m_bLen", "In AssertValid"); Debug.Assert(m_bLen <= x_cNumeMax, "m_bLen <= x_cNumeMax", "In AssertValid"); uint[] rglData = new uint[4] { m_data1, m_data2, m_data3, m_data4 }; // highest UI4 is non-0 unless value "zero" if (rglData[m_bLen - 1] == 0) { Debug.Assert(m_bLen == 1, "m_bLen == 1", "In AssertValid"); } // All UI4s from length to end are 0 for (int iulData = m_bLen; iulData < x_cNumeMax; iulData++) Debug.Assert(rglData[iulData] == 0, "rglData[iulData] == 0", "In AssertValid"); } } internal struct BinXmlSqlMoney { long data; public BinXmlSqlMoney(int v) { this.data = v; } public BinXmlSqlMoney(long v) { this.data = v; } public Decimal ToDecimal() { bool neg; ulong v; if (this.data < 0) { neg = true; v = (ulong)unchecked(-this.data); } else { neg = false; v = (ulong)this.data; } // SQL Server stores money8 as ticks of 1/10000. const byte MoneyScale = 4; return new Decimal(unchecked((int)v), unchecked((int)(v >> 32)), 0, neg, MoneyScale); } public override String ToString() { Decimal money = ToDecimal(); // Formatting of SqlMoney: At least two digits after decimal point return money.ToString("#0.00##", CultureInfo.InvariantCulture); } } internal abstract class BinXmlDateTime { const int MaxFractionDigits = 7; static internal int[] KatmaiTimeScaleMultiplicator = new int[8] { 10000000, 1000000, 100000, 10000, 1000, 100, 10, 1, }; static void Write2Dig( StringBuilder sb, int val ) { Debug.Assert(val >= 0 && val < 100); sb.Append((char)('0' + (val/10))); sb.Append((char)('0' + (val%10))); } static void Write4DigNeg(StringBuilder sb, int val) { Debug.Assert(val > -10000 && val < 10000); if (val < 0) { val = -val; sb.Append('-'); } Write2Dig(sb, val/100); Write2Dig(sb, val%100); } static void Write3Dec(StringBuilder sb, int val) { Debug.Assert(val >= 0 && val < 1000); int c3 = val % 10; val /= 10; int c2 = val % 10; val /= 10; int c1 = val; sb.Append('.'); sb.Append((char)('0'+c1)); sb.Append((char)('0'+c2)); sb.Append((char)('0'+c3)); } static void WriteDate(StringBuilder sb, int yr, int mnth, int day) { Write4DigNeg(sb, yr); sb.Append('-'); Write2Dig(sb, mnth); sb.Append('-'); Write2Dig(sb, day); } static void WriteTime(StringBuilder sb, int hr, int min, int sec, int ms) { Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); sb.Append(':'); Write2Dig(sb, sec); if (ms != 0) { Write3Dec(sb, ms); } } static void WriteTimeFullPrecision(StringBuilder sb, int hr, int min, int sec, int fraction) { Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); sb.Append(':'); Write2Dig(sb, sec); if (fraction != 0) { int fractionDigits = MaxFractionDigits; while (fraction % 10 == 0) { fractionDigits --; fraction /= 10; } char[] charArray = new char[fractionDigits]; while(fractionDigits > 0) { fractionDigits--; charArray[fractionDigits] = (char)(fraction % 10 + '0'); fraction /= 10; } sb.Append('.'); sb.Append(charArray); } } static void WriteTimeZone(StringBuilder sb, TimeSpan zone) { bool negTimeZone = true; if (zone.Ticks < 0) { negTimeZone = false; zone = zone.Negate(); } WriteTimeZone(sb, negTimeZone, zone.Hours, zone.Minutes); } static void WriteTimeZone(StringBuilder sb, bool negTimeZone, int hr, int min) { if (hr == 0 && min == 0) { sb.Append('Z'); } else { sb.Append(negTimeZone ? '+' : '-'); Write2Dig(sb, hr); sb.Append(':'); Write2Dig(sb, min); } } static void BreakDownXsdDateTime(long val, out int yr, out int mnth, out int day, out int hr, out int min, out int sec, out int ms) { if (val < 0) goto Error; long date = val / 4; // trim indicator bits ms = (int)(date % 1000); date /= 1000; sec = (int)(date % 60); date /= 60; min = (int)(date % 60); date /= 60; hr = (int)(date % 24); date /= 24; day = (int)(date % 31) + 1; date /= 31; mnth = (int)(date % 12) + 1; date /= 12; yr = (int)(date - 9999); if (yr < -9999 || yr > 9999) goto Error; return; Error: throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); } static void BreakDownXsdDate(long val, out int yr, out int mnth, out int day, out bool negTimeZone, out int hr, out int min) { if (val < 0) goto Error; val = val / 4; // trim indicator bits int totalMin = (int)(val % (29*60)) - 60*14; long totalDays = val / (29*60); if (negTimeZone = (totalMin < 0)) totalMin = -totalMin; min = totalMin % 60; hr = totalMin / 60; day = (int)(totalDays % 31) + 1; totalDays /= 31; mnth = (int)(totalDays % 12) + 1; yr = (int)(totalDays / 12) - 9999; if (yr < -9999 || yr > 9999) goto Error; return; Error: throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); } static void BreakDownXsdTime(long val, out int hr, out int min, out int sec, out int ms) { if (val < 0) goto Error; val = val / 4; // trim indicator bits ms = (int)(val % 1000); val /= 1000; sec = (int)(val % 60); val /= 60; min = (int)(val % 60); hr = (int)(val / 60); if (0 > hr || hr > 23) goto Error; return; Error: throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); } public static string XsdDateTimeToString(long val) { int yr; int mnth; int day; int hr; int min; int sec; int ms; BreakDownXsdDateTime(val, out yr, out mnth, out day, out hr, out min, out sec, out ms); StringBuilder sb = new StringBuilder(20); WriteDate(sb, yr, mnth, day); sb.Append('T'); WriteTime(sb, hr, min, sec, ms); sb.Append('Z'); return sb.ToString(); } public static DateTime XsdDateTimeToDateTime(long val) { int yr; int mnth; int day; int hr; int min; int sec; int ms; BreakDownXsdDateTime(val, out yr, out mnth, out day, out hr, out min, out sec, out ms); return new DateTime(yr, mnth, day, hr, min, sec, ms, DateTimeKind.Utc); } public static string XsdDateToString(long val) { int yr; int mnth; int day; int hr; int min; bool negTimeZ; BreakDownXsdDate(val, out yr, out mnth, out day, out negTimeZ, out hr, out min); StringBuilder sb = new StringBuilder(20); WriteDate(sb, yr, mnth, day); WriteTimeZone(sb, negTimeZ, hr, min); return sb.ToString(); } public static DateTime XsdDateToDateTime(long val) { int yr; int mnth; int day; int hr; int min; bool negTimeZ; BreakDownXsdDate(val, out yr, out mnth, out day, out negTimeZ, out hr, out min); DateTime d = new DateTime(yr, mnth, day, 0, 0, 0, DateTimeKind.Utc); // adjust for timezone int adj = (negTimeZ ? -1 : 1) * ( (hr * 60) + min ); return TimeZone.CurrentTimeZone.ToLocalTime( d.AddMinutes(adj) ); } public static string XsdTimeToString(long val) { int hr; int min; int sec; int ms; BreakDownXsdTime(val, out hr, out min, out sec, out ms); StringBuilder sb = new StringBuilder(16); WriteTime(sb, hr, min, sec, ms); sb.Append('Z'); return sb.ToString(); } public static DateTime XsdTimeToDateTime(long val) { int hr; int min; int sec; int ms; BreakDownXsdTime(val, out hr, out min, out sec, out ms); return new DateTime(1, 1, 1, hr, min, sec, ms, DateTimeKind.Utc); } public static string SqlDateTimeToString(int dateticks, uint timeticks) { DateTime dateTime = SqlDateTimeToDateTime(dateticks, timeticks); string format = (dateTime.Millisecond != 0) ? "yyyy/MM/dd\\THH:mm:ss.ffff" : "yyyy/MM/dd\\THH:mm:ss"; return dateTime.ToString(format, CultureInfo.InvariantCulture); } public static DateTime SqlDateTimeToDateTime(int dateticks, uint timeticks) { DateTime SQLBaseDate = new DateTime(1900, 1, 1); //long millisecond = (long)(((ulong)timeticks * 20 + (ulong)3) / (ulong)6); long millisecond = (long)(timeticks / SQLTicksPerMillisecond + 0.5); return SQLBaseDate.Add( new TimeSpan( dateticks * TimeSpan.TicksPerDay + millisecond * TimeSpan.TicksPerMillisecond ) ); } // Number of (100ns) ticks per time unit private static readonly double SQLTicksPerMillisecond = 0.3; public static readonly int SQLTicksPerSecond = 300; public static readonly int SQLTicksPerMinute = SQLTicksPerSecond * 60; public static readonly int SQLTicksPerHour = SQLTicksPerMinute * 60; private static readonly int SQLTicksPerDay = SQLTicksPerHour * 24; public static string SqlSmallDateTimeToString(short dateticks, ushort timeticks) { DateTime dateTime = SqlSmallDateTimeToDateTime(dateticks, timeticks); return dateTime.ToString("yyyy/MM/dd\\THH:mm:ss", CultureInfo.InvariantCulture); } public static DateTime SqlSmallDateTimeToDateTime(short dateticks, ushort timeticks) { return SqlDateTimeToDateTime( (int)dateticks, (uint)(timeticks * SQLTicksPerMinute) ); } // Conversions of the Katmai date & time types to DateTime public static DateTime XsdKatmaiDateToDateTime(byte[] data, int offset) { // Katmai SQL type "DATE" long dateTicks = GetKatmaiDateTicks(data, ref offset); DateTime dt = new DateTime(dateTicks); return dt; } public static DateTime XsdKatmaiDateTimeToDateTime(byte[] data, int offset) { // Katmai SQL type "DATETIME2" long timeTicks = GetKatmaiTimeTicks(data, ref offset); long dateTicks = GetKatmaiDateTicks(data, ref offset); DateTime dt = new DateTime(dateTicks + timeTicks); return dt; } public static DateTime XsdKatmaiTimeToDateTime(byte[] data, int offset) { // TIME without zone is stored as DATETIME2 return XsdKatmaiDateTimeToDateTime(data, offset); } public static DateTime XsdKatmaiDateOffsetToDateTime( byte[] data, int offset ) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiDateOffsetToDateTimeOffset(data, offset).LocalDateTime; } public static DateTime XsdKatmaiDateTimeOffsetToDateTime(byte[] data, int offset) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset).LocalDateTime; } public static DateTime XsdKatmaiTimeOffsetToDateTime(byte[] data, int offset) { // read the timezoned value into DateTimeOffset and then convert to local time return XsdKatmaiTimeOffsetToDateTimeOffset(data, offset).LocalDateTime; } // Conversions of the Katmai date & time types to DateTimeOffset public static DateTimeOffset XsdKatmaiDateToDateTimeOffset( byte[] data, int offset ) { // read the value into DateTime and then convert it to DateTimeOffset, which adds local time zone return (DateTimeOffset)XsdKatmaiDateToDateTime(data, offset); } public static DateTimeOffset XsdKatmaiDateTimeToDateTimeOffset(byte[] data, int offset) { // read the value into DateTime and then convert it to DateTimeOffset, which adds local time zone return (DateTimeOffset)XsdKatmaiDateTimeToDateTime(data, offset); } public static DateTimeOffset XsdKatmaiTimeToDateTimeOffset(byte[] data, int offset) { // read the value into DateTime and then convert it to DateTimeOffset, which adds local time zone return (DateTimeOffset)XsdKatmaiTimeToDateTime(data, offset); } public static DateTimeOffset XsdKatmaiDateOffsetToDateTimeOffset(byte[] data, int offset) { // DATE with zone is stored as DATETIMEOFFSET return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); } public static DateTimeOffset XsdKatmaiDateTimeOffsetToDateTimeOffset(byte[] data, int offset) { // Katmai SQL type "DATETIMEOFFSET" long timeTicks = GetKatmaiTimeTicks(data, ref offset); long dateTicks = GetKatmaiDateTicks(data, ref offset); long zoneTicks = GetKatmaiTimeZoneTicks(data, offset); // The DATETIMEOFFSET values are serialized in UTC, but DateTimeOffset takes adjusted time -> we need to add zoneTicks DateTimeOffset dto = new DateTimeOffset(dateTicks + timeTicks + zoneTicks, new TimeSpan(zoneTicks)); return dto; } public static DateTimeOffset XsdKatmaiTimeOffsetToDateTimeOffset(byte[] data, int offset) { // TIME with zone is stored as DATETIMEOFFSET return XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); } // Conversions of the Katmai date & time types to string public static string XsdKatmaiDateToString(byte[] data, int offset) { DateTime dt = XsdKatmaiDateToDateTime(data, offset); StringBuilder sb = new StringBuilder(10); WriteDate(sb, dt.Year, dt.Month, dt.Day); return sb.ToString(); } public static string XsdKatmaiDateTimeToString(byte[] data, int offset) { DateTime dt = XsdKatmaiDateTimeToDateTime(data, offset); StringBuilder sb = new StringBuilder(33); WriteDate(sb, dt.Year, dt.Month, dt.Day); sb.Append('T'); WriteTimeFullPrecision(sb, dt.Hour, dt.Minute, dt.Second, GetFractions(dt)); return sb.ToString(); } public static string XsdKatmaiTimeToString(byte[] data, int offset) { DateTime dt = XsdKatmaiTimeToDateTime(data, offset); StringBuilder sb = new StringBuilder(16); WriteTimeFullPrecision(sb, dt.Hour, dt.Minute, dt.Second, GetFractions(dt)); return sb.ToString(); } public static string XsdKatmaiDateOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiDateOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(16); WriteDate(sb, dto.Year, dto.Month, dto.Day); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } public static string XsdKatmaiDateTimeOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiDateTimeOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(39); WriteDate(sb, dto.Year, dto.Month, dto.Day); sb.Append('T'); WriteTimeFullPrecision(sb, dto.Hour, dto.Minute, dto.Second, GetFractions(dto)); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } public static string XsdKatmaiTimeOffsetToString(byte[] data, int offset) { DateTimeOffset dto = XsdKatmaiTimeOffsetToDateTimeOffset(data, offset); StringBuilder sb = new StringBuilder(22); WriteTimeFullPrecision(sb, dto.Hour, dto.Minute, dto.Second, GetFractions(dto)); WriteTimeZone(sb, dto.Offset); return sb.ToString(); } // Helper methods for the Katmai date & time types static long GetKatmaiDateTicks(byte[] data, ref int pos) { int p = pos; pos = p + 3; return (data[p] | data[p + 1] << 8 | data[p + 2] << 16) * TimeSpan.TicksPerDay; } static long GetKatmaiTimeTicks(byte[] data, ref int pos) { int p = pos; byte scale = data[p]; long timeTicks; p++; if (scale <= 2) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); pos = p + 3; } else if (scale <= 4) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); timeTicks |= ((long)data[p + 3] << 24); pos = p + 4; } else if (scale <= 7) { timeTicks = data[p] | (data[p + 1] << 8) | (data[p + 2] << 16); timeTicks |= ((long)data[p + 3] << 24) | ((long)data[p + 4] << 32); pos = p + 5; } else { throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); } return timeTicks * KatmaiTimeScaleMultiplicator[scale]; } static long GetKatmaiTimeZoneTicks(byte[] data, int pos) { return (short)(data[pos] | data[pos + 1] << 8) * TimeSpan.TicksPerMinute; } static int GetFractions(DateTime dt) { return (int)(dt.Ticks - new DateTime(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second).Ticks); } static int GetFractions(DateTimeOffset dt) { return (int)(dt.Ticks - new DateTime(dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second).Ticks); } /* const long SqlDateTicks2Ticks = (long)10000 * 1000 * 60 * 60 * 24; const long SqlBaseDate = 693595; public static void DateTime2SqlDateTime(DateTime datetime, out int dateticks, out uint timeticks) { dateticks = (int)(datetime.Ticks / SqlDateTicks2Ticks) - 693595; double time = (double)(datetime.Ticks % SqlDateTicks2Ticks); time = time / 10000; // adjust to ms time = time * 0.3 + .5; // adjust to sqlticks (and round correctly) timeticks = (uint)time; } public static void DateTime2SqlSmallDateTime(DateTime datetime, out short dateticks, out ushort timeticks) { dateticks = (short)((int)(datetime.Ticks / SqlDateTicks2Ticks) - 693595); int time = (int)(datetime.Ticks % SqlDateTicks2Ticks); timeticks = (ushort)(time / (10000 * 1000 * 60)); // adjust to min } public static long DateTime2XsdTime(DateTime datetime) { // adjust to ms return (datetime.TimeOfDay.Ticks / 10000) * 4 + 0; } public static long DateTime2XsdDateTime(DateTime datetime) { long t = datetime.TimeOfDay.Ticks / 10000; t += (datetime.Day-1) * (long)1000*60*60*24; t += (datetime.Month-1) * (long)1000*60*60*24*31; int year = datetime.Year; if (year < -9999 || year > 9999) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); t += (datetime.Year+9999) * (long)1000*60*60*24*31*12; return t*4 + 2; } public static long DateTime2XsdDate(DateTime datetime) { // compute local offset long tzOffset = -TimeZone.CurrentTimeZone.GetUtcOffset(datetime).Ticks / TimeSpan.TicksPerMinute; tzOffset += 14*60; // adjust datetime to UTC datetime = TimeZone.CurrentTimeZone.ToUniversalTime(datetime); Debug.Assert( tzOffset >= 0 ); int year = datetime.Year; if (year < -9999 || year > 9999) throw new XmlException(Res.SqlTypes_ArithOverflow, (string)null); long t = (datetime.Day - 1) + 31*(datetime.Month - 1) + 31*12*((long)(year+9999)); t *= (29*60); // adjust in timezone t += tzOffset; return t*4+1; } * */ } }
#region License // Copyright 2010 Buu Nguyen, Morten Mertner // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // The latest version of this file can be found at http://fasterflect.codeplex.com/ #endregion using System; using System.Linq; using System.Collections.Generic; namespace Fasterflect { /// <summary> /// Extension methods for inspecting types. /// </summary> public static class TypeExtensions { #region Implements /// <summary> /// Returns true if the supplied <paramref name="type"/> implements the given interface <typeparamref name="T"/>. /// </summary> /// <typeparam name="T">The type (interface) to check for.</typeparam> /// <param name="type">The type to check.</param> /// <returns>True if the given type implements the specified interface.</returns> /// <remarks>This method is for interfaces only. Use <seealso cref="Inherits"/> for class types and <seealso cref="InheritsOrImplements"/> /// to check both interfaces and classes.</remarks> public static bool Implements<T>( this Type type ) { return type.Implements( typeof(T) ); } /// <summary> /// Returns true of the supplied <paramref name="type"/> implements the given interface <paramref name="interfaceType"/>. If the given /// interface type is a generic type definition this method will use the generic type definition of any implemented interfaces /// to determine the result. /// </summary> /// <param name="interfaceType">The interface type to check for.</param> /// <param name="type">The type to check.</param> /// <returns>True if the given type implements the specified interface.</returns> /// <remarks>This method is for interfaces only. Use <seealso cref="Inherits"/> for classes and <seealso cref="InheritsOrImplements"/> /// to check both interfaces and classes.</remarks> public static bool Implements( this Type type, Type interfaceType ) { if( type == null || interfaceType == null || type == interfaceType ) return false; if( interfaceType.IsGenericTypeDefinition && type.GetInterfaces().Where( t => t.IsGenericType ).Select( t => t.GetGenericTypeDefinition() ).Any( gt => gt == interfaceType ) ) { return true; } return interfaceType.IsAssignableFrom( type ); } #endregion #region Inherits /// <summary> /// Returns true if the supplied <paramref name="type"/> inherits from the given class <typeparamref name="T"/>. /// </summary> /// <typeparam name="T">The type (class) to check for.</typeparam> /// <param name="type">The type to check.</param> /// <returns>True if the given type inherits from the specified class.</returns> /// <remarks>This method is for classes only. Use <seealso cref="Implements"/> for interface types and <seealso cref="InheritsOrImplements"/> /// to check both interfaces and classes.</remarks> public static bool Inherits<T>( this Type type ) { return type.Inherits( typeof(T) ); } /// <summary> /// Returns true if the supplied <paramref name="type"/> inherits from the given class <paramref name="baseType"/>. /// </summary> /// <param name="baseType">The type (class) to check for.</param> /// <param name="type">The type to check.</param> /// <returns>True if the given type inherits from the specified class.</returns> /// <remarks>This method is for classes only. Use <seealso cref="Implements"/> for interface types and <seealso cref="InheritsOrImplements"/> /// to check both interfaces and classes.</remarks> public static bool Inherits( this Type type, Type baseType ) { if( baseType == null || type == null || type == baseType ) return false; var rootType = typeof(object); if( baseType == rootType ) return true; while( type != null && type != rootType ) { var current = type.IsGenericType && baseType.IsGenericTypeDefinition ? type.GetGenericTypeDefinition() : type; if( baseType == current ) return true; type = type.BaseType; } return false; } #endregion #region InheritsOrImplements /// <summary> /// Returns true if the supplied <paramref name="type"/> inherits from or implements the type <typeparamref name="T"/>. /// </summary> /// <typeparam name="T">The base type to check for.</typeparam> /// <param name="type">The type to check.</param> /// <returns>True if the given type inherits from or implements the specified base type.</returns> public static bool InheritsOrImplements<T>( this Type type ) { return type.InheritsOrImplements( typeof(T) ); } /// <summary> /// Returns true of the supplied <paramref name="type"/> inherits from or implements the type <paramref name="baseType"/>. /// </summary> /// <param name="baseType">The base type to check for.</param> /// <param name="type">The type to check.</param> /// <returns>True if the given type inherits from or implements the specified base type.</returns> public static bool InheritsOrImplements( this Type type, Type baseType ) { if( type == null || baseType == null ) return false; return baseType.IsInterface ? type.Implements( baseType ) : type.Inherits( baseType ); } #endregion #region IsFrameworkType #region IsFrameworkType Helpers private static readonly List<byte[]> tokens = new List<byte[]> { new byte[] { 0xb7, 0x7a, 0x5c, 0x56, 0x19, 0x34, 0xe0, 0x89 }, new byte[] { 0x31, 0xbf, 0x38, 0x56, 0xad, 0x36, 0x4e, 0x35 }, new byte[] { 0xb0, 0x3f, 0x5f, 0x7f, 0x11, 0xd5, 0x0a, 0x3a } }; internal class ByteArrayEqualityComparer : EqualityComparer<byte[]> { public override bool Equals( byte[] x, byte[] y ) { return x != null && y != null && x.SequenceEqual( y ); } public override int GetHashCode( byte[] obj ) { return obj.GetHashCode(); } } #endregion /// <summary> /// Returns true if the supplied type is defined in an assembly signed by Microsoft. /// </summary> public static bool IsFrameworkType( this Type type ) { if( type == null ) { throw new ArgumentNullException( "type" ); } byte[] publicKeyToken = type.Assembly.GetName().GetPublicKeyToken(); return publicKeyToken != null && tokens.Contains( publicKeyToken, new ByteArrayEqualityComparer() ); } #endregion #region Name (with generic pretty-printing) /// <summary> /// Returns the C# name, including any generic parameters, of the supplied <paramref name="type"/>. /// </summary> /// <param name="type">The type to return the name for.</param> /// <returns>The type name formatted as you'd write it in C#.</returns> public static string Name( this Type type ) { if( type.IsArray ) { return string.Format( "{0}[]", type.GetElementType().Name() ); } if( type.ContainsGenericParameters || type.IsGenericType ) { if( type.BaseType == typeof(Nullable<>) || (type.BaseType == typeof(ValueType) && type.UnderlyingSystemType.Name.StartsWith( "Nullable" )) ) { return GetCSharpTypeName( type.GetGenericArguments().Single().Name ) + "?"; } int index = type.Name.IndexOf( "`" ); string genericTypeName = index > 0 ? type.Name.Substring( 0, index ) : type.Name; string genericArgs = string.Join( ",", type.GetGenericArguments().Select( t => t.Name() ).ToArray() ); return genericArgs.Length == 0 ? genericTypeName : genericTypeName + "<" + genericArgs + ">"; } return GetCSharpTypeName( type.Name ); } private static string GetCSharpTypeName( string typeName ) { switch( typeName ) { case "String": case "Object": case "Void": case "Byte": case "Double": case "Decimal": return typeName.ToLower(); case "Int16": return "short"; case "Int32": return "int"; case "Int64": return "long"; case "Single": return "float"; case "Boolean": return "bool"; default: return typeName; } } #endregion } }
/* * MethodResponse.cs - Implementation of the * "System.Runtime.Remoting.Messaging.MethodResponse" class. * * Copyright (C) 2003 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Runtime.Remoting.Messaging { #if CONFIG_REMOTING using System.Collections; using System.Reflection; using System.Runtime.Serialization; [Serializable] [CLSCompliant(false)] public class MethodResponse : IMethodReturnMessage, ISerializable, IMethodMessage, IMessage, ISerializationRootObject, IMessageDictionary { // Internal state. protected IDictionary ExternalProperties; protected IDictionary InternalProperties; private Object[] outArgs; private String methodName; private String typeName; private String uri; private bool hasVarArgs; private bool isSoap; private LogicalCallContext context; private MethodBase method; private ParameterInfo[] parameters; private Type[] signature; private Exception exception; private Object returnValue; // Constructors. public MethodResponse(Header[] h1, IMethodCallMessage mcm) { isSoap = true; // This form is used for SOAP requests. if(mcm == null) { throw new ArgumentNullException("mcm"); } methodName = mcm.MethodName; typeName = mcm.TypeName; method = mcm.MethodBase; hasVarArgs = mcm.HasVarArgs; if(h1 != null) { foreach(Header header in h1) { ProcessHeader(header.Name, header.Value); } } } internal MethodResponse(IMethodReturnMessage mrm) { outArgs = mrm.OutArgs; methodName = mrm.MethodName; typeName = mrm.TypeName; uri = mrm.Uri; hasVarArgs = mrm.HasVarArgs; context = mrm.LogicalCallContext; method = mrm.MethodBase; exception = mrm.Exception; returnValue = mrm.ReturnValue; } // Implement the IMethodCallMessage interface. public virtual IDictionary Properties { get { if(InternalProperties == null) { InternalProperties = new Hashtable(); } if(ExternalProperties == null) { ExternalProperties = new MessageProperties (this, InternalProperties); } return ExternalProperties; } } public int ArgCount { get { return OutArgCount; } } public Object[] Args { get { return OutArgs; } } public bool HasVarArgs { get { return hasVarArgs; } } public LogicalCallContext LogicalCallContext { get { if(context == null) { context = new LogicalCallContext(); } return context; } } public MethodBase MethodBase { get { return method; } } public String MethodName { get { return methodName; } } public Object MethodSignature { get { if(signature == null) { FetchParameters(); if(parameters != null) { signature = new Type [parameters.Length]; int posn; for(posn = 0; posn < signature.Length; ++posn) { signature[posn] = parameters[posn].ParameterType; } } } return signature; } } public String TypeName { get { return typeName; } } public String Uri { get { return uri; } set { uri = value; } } public Object GetArg(int argNum) { return GetOutArg(argNum); } public String GetArgName(int index) { return GetOutArgName(index); } public Exception Exception { get { return exception; } } public int OutArgCount { get { if(outArgs != null) { return outArgs.Length; } else { return 0; } } } public Object[] OutArgs { get { return outArgs; } } public Object ReturnValue { get { return returnValue; } } public Object GetOutArg(int argNum) { return outArgs[argNum]; } public String GetOutArgName(int index) { FetchParameters(); if(parameters != null && outArgs != null) { int posn; for(posn = 0; posn < parameters.Length; ++posn) { if(parameters[posn].ParameterType.IsByRef) { if(index == 0) { return parameters[posn].Name; } --index; } } } throw new IndexOutOfRangeException(_("Arg_InvalidArrayIndex")); } // Implement the ISerializable interface. public virtual void GetObjectData(SerializationInfo info, StreamingContext context) { // Not needed. throw new NotSupportedException(); } // Handle incoming headers. public virtual Object HeaderHandler(Header[] h) { // Extract the method name from the headers, if present. if(h != null && h.Length != 0 && h[0].Name == "__methodName") { methodName = (String)(h[0].Value); if(h.Length != 1) { Header[] nh = new Header [h.Length - 1]; Array.Copy(h, 1, nh, 0, h.Length - 1); nh = h; } else { h = null; } } // Process the headers to set the message properties. if(h != null) { foreach(Header header in h) { ProcessHeader(header.Name, header.Value); } } return null; } // Process a header. private void ProcessHeader(String name, Object value) { Properties[name] = value; } // Fetch the parameter information from the method block. private void FetchParameters() { if(parameters == null && method != null) { parameters = method.GetParameters(); } } // Set the root object data for a SOAP method call. [TODO] private void RootSetSoapObjectData(SerializationInfo info) { // TODO } // Set the root object data for this method call. public void RootSetObjectData(SerializationInfo info, StreamingContext context) { if(info == null) { throw new ArgumentNullException("info"); } // Use a different algorithm for SOAP messages. if(isSoap) { RootSetSoapObjectData(info); return; } // De-serialize the supplied data. SerializationInfoEnumerator se = info.GetEnumerator(); while(se.MoveNext()) { if(se.Name == "__return") { exception = null; } else if(se.Name == "__fault") { exception = (Exception)(se.Value); } else { ProcessHeader(se.Name, se.Value); } } } // Implement the IMessageDictionary interface. String[] IMessageDictionary.SpecialProperties { get { return SpecialProperties; } } Object IMessageDictionary.GetSpecialProperty(String name) { return GetSpecialProperty(name); } void IMessageDictionary.SetSpecialProperty(String name, Object value) { SetSpecialProperty(name, value); } internal virtual String[] SpecialProperties { get { if(Exception == null) { return new String[] { "__Uri", "__MethodName", "__MethodSignature", "__TypeName", "__Return", "__OutArgs", "__CallContext" }; } else { return new String[] { "__Uri", "__MethodName", "__MethodSignature", "__TypeName", "__CallContext" }; } } } internal virtual Object GetSpecialProperty(String name) { switch(name) { case "__Uri": return Uri; case "__MethodName": return MethodName; case "__MethodSignature": return MethodSignature; case "__TypeName": return TypeName; case "__Return": if(Exception != null) { return Exception; } else { return ReturnValue; } case "__OutArgs": return OutArgs; case "__CallContext": return LogicalCallContext; } return null; } internal virtual void SetSpecialProperty(String name, Object value) { switch(name) { case "__Uri": Uri = (String)value; break; case "__CallContext": context = (LogicalCallContext)value; break; } } }; // class MethodResponse #endif // CONFIG_REMOTING }; // namespace System.Runtime.Remoting.Messaging
using System; using System.Collections; using System.Collections.ObjectModel; using Newtonsoft.Json; using System.Collections.Generic; namespace TropoCSharp.Tropo { /// <summary> /// Ask is essentially a say that requires input; it requests information from the caller and waits for a response. /// </summary> public class Ask : TropoBase { [JsonProperty(PropertyName = "attempts")] public int? Attempts { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "bargein")] public bool? Bargein { get; set; } [JsonProperty(PropertyName = "interdigitTimeout")] public int? InterdigitTimeout { get; set; } [JsonProperty(PropertyName = "minConfidence")] public int? MinConfidence { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "recognizer")] public string Recognizer { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "choices")] public Choices Choices { get; set; } public Say Say { set { Says.Add(value); } } [JsonProperty(PropertyName = "say")] public ICollection<Say> Says { get; set; } [JsonProperty(PropertyName = "sensitivity")] public int? Sensitivity { get; set; } [JsonProperty(PropertyName = "speechCompleteTimeout")] public float? SpeechCompleteTimeout { get; set; } [JsonProperty(PropertyName = "speechIncompleteTimeout")] public float? SpeechIncompleteTimeout { get; set; } [JsonProperty(PropertyName = "timeout")] public float? Timeout { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } [JsonProperty(PropertyName = "asrLogSecurity")] public string AsrLogSecurity { get; set; } [JsonProperty(PropertyName = "maskTemplate")] public string MaskTemplate { get; set; } public Ask() { Says = new Collection<Say>(); } public Ask(Choices choices, string name, Say say) { Says = new Collection<Say>(); Choices = choices; Name = name; Say = say; } } /// <summary> /// answer verb. /// </summary> public class Answer : TropoBase { [JsonProperty(PropertyName = "headers")] public IDictionary<String, String> Headers { get; set; } public Answer() { } } /// <summary> /// Initiates an outbound call or a text conversation. Note that this action is only valid when there is no active WebAPI call. /// </summary> public class Call : TropoBase { [JsonProperty(PropertyName = "to")] public IEnumerable<String> To { get; set; } [JsonProperty(PropertyName = "from")] public string From { get; set; } [JsonProperty(PropertyName = "network")] public string Network { get; set; } [JsonProperty(PropertyName = "channel")] public string Channel { get; set; } [JsonProperty(PropertyName = "answerOnMedia")] public bool? AnswerOnMedia { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "headers")] public IDictionary<String, String> Headers { get; set; } [JsonProperty(PropertyName = "recording")] public StartRecording Recording { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "timeout")] public float? Timeout { get; set; } [JsonProperty(PropertyName = "machineDetection")] public MachineDetection MachineDetection { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } [JsonProperty(PropertyName = "callbackUrl")] public string CallbackUrl { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } [JsonProperty(PropertyName = "label")] public string Label { get; set; } public Call() { } public Call(String to) { To = new List<String> {to}; } public Call(IEnumerable<String> to) { To = to; } } /// <summary> /// The grammar to use in recognizing and validating input. /// </summary> public class Choices : TropoBase { [JsonProperty(PropertyName = "value")] public string Value { get; set; } [JsonProperty(PropertyName = "mode")] public string Mode { get; set; } [JsonProperty(PropertyName = "terminator")] public string Terminator { get; set; } public Choices() { } public Choices(string @value) { Value = @value; } public Choices(string @value, string mode, string terminator) { Value = @value; Mode = mode; Terminator = terminator; } } /// <summary> /// The grammar to use in record and startRecording verb. /// </summary> public class RecordUrlTuple : TropoBase { [JsonProperty(PropertyName = "url")] public string Url { get; set; } [JsonProperty(PropertyName = "username")] public string Username { get; set; } [JsonProperty(PropertyName = "password")] public string Password { get; set; } [JsonProperty(PropertyName = "method")] public string Method { get; set; } public RecordUrlTuple() { } public RecordUrlTuple(string @url) { Url = @url; } public RecordUrlTuple(string @url, string username, string password, string method) { Url = @url; Username = username; Password = password; Method = method; } } /// <summary> /// The grammar for outbound call could use the ability to identify whether your call reached a live human or not. /// </summary> public class MachineDetection : TropoBase { [JsonProperty(PropertyName = "introduction")] public string Introduction { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } public MachineDetection() { } public MachineDetection(string introduction) { Introduction = introduction; } public MachineDetection(string introduction, string voice) { Introduction = introduction; Voice = voice; } } /// <summary> /// This action allows multiple lines in separate sessions to be conferenced together so that the parties on each line can talk to each other simultaneously. /// </summary> public class Conference : TropoBase { [JsonProperty(PropertyName = "id")] public string Id { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "interdigitTimeout")] public int? InterdigitTimeout { get; set; } [JsonProperty(PropertyName = "mute")] public bool? Mute { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "playTones")] public bool? PlayTones { get; set; } [JsonProperty(PropertyName = "terminator")] public string Terminator { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "joinPrompt")] public JoinPrompt JoinPrompt { get; set; } [JsonProperty(PropertyName = "leavePrompt")] public LeavePrompt LeavePrompt { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } public Conference() { } } /// <summary> /// Defines a prompt that plays to all participants of a conference when someone joins the conference. /// </summary> public class JoinPrompt : TropoBase { [JsonProperty(PropertyName = "value")] public string Value { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } public JoinPrompt() { } public JoinPrompt(string value) { Value = value; } public JoinPrompt(string value, string voice) { Value = value; Voice = voice; } } /// <summary> /// Defines a prompt that plays to all participants of a conference when someone leaves the conference. /// </summary> public class LeavePrompt : TropoBase { [JsonProperty(PropertyName = "value")] public string Value { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } public LeavePrompt() { } public LeavePrompt(string value) { Value = value; } public LeavePrompt(string value, string voice) { Value = value; Voice = voice; } } /// <summary> /// This action instructs Tropo to "hang-up" or disconnect the session associated with the current session. /// </summary> public class Hangup : TropoBase { public Hangup() { } } /// <summary> /// Creates a call, says something and then hangs up, all in one step. This is particularly useful for sending out a quick SMS or IM. /// </summary> public class Message : TropoBase { [JsonProperty(PropertyName = "say")] public Say Say { get; set; } [JsonProperty(PropertyName = "to")] public IEnumerable<String> To { get; set; } [JsonProperty(PropertyName = "from")] public string From { get; set; } [JsonProperty(PropertyName = "network")] public string Network { get; set; } [JsonProperty(PropertyName = "channel")] public string Channel { get; set; } [JsonProperty(PropertyName = "answerOnMedia")] public bool? AnswerOnMedia { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "timeout")] public float? Timeout { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } public Message() { } } /// <summary> /// This action determines the event(s) to be handled. /// </summary> public class On : TropoBase { [JsonProperty(PropertyName = "event")] public string Event { get; set; } [JsonProperty(PropertyName = "next")] public string Next { get; set; } [JsonProperty(PropertyName = "say")] public Say Say { get; set; } [JsonProperty(PropertyName = "post")] public string Post { get; set; } public On() { } public On(string @event, string next, Say say) { Event = @event; Next = next; Say = say; } } /// <summary> /// Plays a prompt (audio file or text to speech) then optionally waits for a response from the caller and records it. /// </summary> public class Record : TropoBase { [JsonProperty(PropertyName = "attempts")] public int? Attempts { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "bargein")] public bool? Bargein { get; set; } [JsonProperty(PropertyName = "beep")] public bool? Beep { get; set; } [JsonProperty(PropertyName = "choices")] public Choices Choices { get; set; } [JsonProperty(PropertyName = "format")] public string Format { get; set; } [JsonProperty(PropertyName = "interdigitTimeout")] public int? InterdigitTimeout { get; set; } [JsonProperty(PropertyName = "maxSilence")] public float? MaxSilence { get; set; } [JsonProperty(PropertyName = "maxTime")] public float? MaxTime { get; set; } [JsonProperty(PropertyName = "method")] public string Method { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "say")] public Say Say { get; set; } [JsonProperty(PropertyName = "timeout")] public float? Timeout { get; set; } [JsonProperty(PropertyName = "password")] public string Password { get; set; } [JsonProperty(PropertyName = "transcription")] public Transcription Transcription { get; set; } [JsonProperty(PropertyName = "username")] public string Username { get; set; } [JsonProperty(PropertyName = "url")] public string Url { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } [JsonProperty(PropertyName = "asyncUpload")] public bool? AsyncUpload { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } [JsonProperty(PropertyName = "recordURL")] public IEnumerable<RecordUrlTuple> RecordingURL { get; set; } [JsonProperty(PropertyName = "sensitivity")] public float? Sensitivity { get; set; } public Record() { } } /// <summary> /// This is used to deflect the call to a third party SIP address. This action must be called before the call is answered. /// </summary> public class Redirect : TropoBase { [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "to")] public string To { get; set; } public Redirect() { } } /// <summary> /// Reject an incoming call. /// </summary> public class Reject : TropoBase { public Reject() { } } /// <summary> /// When the current session is a voice channel this key will either play a message or an audio file from a URL. /// In the case of an text channel it will send the text back to the user via instant messaging or SMS. /// </summary> public class Say : TropoBase { [JsonProperty(PropertyName = "value")] public string Value { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "media")] public Array Media { get; set; } [JsonProperty(PropertyName = "as")] public string As { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } /// <summary> /// say in ask has event property /// </summary> [JsonProperty(PropertyName = "event")] public string Event { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } public Say() { } public Say(string @value) { Value = @value; } public Say(string @value, string @event) { Value = @value; Event = @event; } } /// <summary> /// Allows Tropo applications to begin recording the current session. /// </summary> public class StartRecording : TropoBase { [JsonProperty(PropertyName = "asyncUpload")] public bool? AsyncUpload { get; set; } [JsonProperty(PropertyName = "format")] public string Format { get; set; } [JsonProperty(PropertyName = "method")] public string Method { get; set; } [JsonProperty(PropertyName = "url")] public object Url { get; set; } [JsonProperty(PropertyName = "username")] public string Username { get; set; } [JsonProperty(PropertyName = "password")] public string Password { get; set; } [JsonProperty(PropertyName = "transcriptionID")] public string TranscriptionID { get; set; } [JsonProperty(PropertyName = "transcriptionLanguage")] public string TranscriptionLanguage { get; set; } [JsonProperty(PropertyName = "transcriptionEmailFormat")] public string TranscriptionEmailFormat { get; set; } [JsonProperty(PropertyName = "transcriptionOutURI")] public string TranscriptionOutURI { get; set; } public StartRecording() { } public StartRecording(string format, string method, string url, string username, string password) { Format = format; Method = method; Url = url; Username = username; Password = password; } } /// <summary> /// This action stops the recording of the current call after startCallRecording has been called. /// </summary> public class StopRecording : TropoBase { public StopRecording() { } } /// <summary> /// Transcribes spoken text. /// </summary> public class Transcription : TropoBase { [JsonProperty(PropertyName = "id")] public string Id { get; set; } [JsonProperty(PropertyName = "url")] public string Url { get; set; } [JsonProperty(PropertyName = "emailFormat")] public string EmailFormat { get; set; } [JsonProperty(PropertyName = "language")] public string Language { get; set; } public Transcription() { } } /// <summary> /// This will transfer an already answered call to another destination / phone number. /// </summary> public class Transfer : TropoBase { [JsonProperty(PropertyName = "to")] public IEnumerable<String> To { get; set; } [JsonProperty(PropertyName = "from")] public string From { get; set; } [JsonProperty(PropertyName = "answerOnMedia")] public bool? AnswerOnMedia { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array allowSignals { get; set; } [JsonProperty(PropertyName = "machineDetection")] public MachineDetection MachineDetection { get; set; } [JsonProperty(PropertyName = "choices")] public Choices Choices { get; set; } [JsonProperty(PropertyName = "headers")] public IDictionary<String, String> Headers { get; set; } [JsonProperty(PropertyName = "interdigitTimeout")] public float? InterdigitTimeout { get; set; } [JsonProperty(PropertyName = "ringRepeat")] public int? RingRepeat { get; set; } [JsonProperty(PropertyName = "playTones")] public bool? PlayTones { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "on")] public On On { get; set; } [JsonProperty(PropertyName = "required")] public bool? Required { get; set; } [JsonProperty(PropertyName = "terminator")] public string Terminator { get; set; } [JsonProperty(PropertyName = "timeout")] public float? Timeout { get; set; } [JsonProperty(PropertyName = "voice")] public string Voice { get; set; } [JsonProperty(PropertyName = "callbackUrl")] public string CallbackUrl { get; set; } [JsonProperty(PropertyName = "promptLogSecurity")] public string PromptLogSecurity { get; set; } [JsonProperty(PropertyName = "label")] public string Label { get; set; } public Transfer() { } } /// <summary> /// This will make the thread sleep in milliseconds /// </summary> public class Wait : TropoBase { [JsonProperty(PropertyName = "milliseconds")] public int? Milliseconds { get; set; } [JsonProperty(PropertyName = "allowSignals")] public Array AllowSignals { get; set; } public Wait() { } } /// This will turn logging on/off /// </summary> public class GeneralLogSecurity : TropoBase { [JsonProperty(PropertyName = "generalLogSecurity")] public string State { get; set; } public GeneralLogSecurity() { } } /// <summary> /// Defnies an endoint for transfer and redirects. /// </summary> public class Endpoint { [JsonProperty(PropertyName = "to")] public string To { get; set; } [JsonProperty(PropertyName = "id")] public string Id { get; set; } [JsonProperty(PropertyName = "e164Id")] public string E164Id { get; set; } [JsonProperty(PropertyName = "channel")] public string Channel { get; set; } [JsonProperty(PropertyName = "name")] public string Name { get; set; } [JsonProperty(PropertyName = "network")] public string Network { get; set; } public Endpoint() { } public Endpoint(string id, string channel, string name, string network) { Id = id; Channel = channel; Name = name; Network = network; } public Endpoint(string id, string e164Id, string channel, string name, string network) { Id = id; E164Id = e164Id; Channel = channel; Name = name; Network = network; } public Endpoint(string to) { To = to; } } /// <summary> /// Defnies an entity for MMS media. /// </summary> public class MMSMediaItem { [JsonProperty(PropertyName = "status")] public string Status { get; set; } [JsonProperty(PropertyName = "media")] public string Media { get; set; } [JsonProperty(PropertyName = "text")] public string Text { get; set; } [JsonProperty(PropertyName = "disposition")] public string Disposition { get; set; } public MMSMediaItem() { } public MMSMediaItem(string status, string media, string text, string disposition) { Status = status; Media = media; Text = text; Disposition = disposition; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.WindowsAzure.Storage; using Microsoft.WindowsAzure.Storage.Table; using Orleans.AzureUtils; using Orleans.Providers; using Orleans.Providers.Azure; using Orleans.Runtime; using Orleans.Runtime.Configuration; using Orleans.Serialization; namespace Orleans.Storage { /// <summary> /// Simple storage provider for writing grain state data to Azure table storage. /// </summary> /// <remarks> /// <para> /// Required configuration params: <c>DataConnectionString</c> /// </para> /// <para> /// Optional configuration params: /// <c>TableName</c> -- defaults to <c>OrleansGrainState</c> /// <c>DeleteStateOnClear</c> -- defaults to <c>false</c> /// </para> /// </remarks> /// <example> /// Example configuration for this storage provider in OrleansConfiguration.xml file: /// <code> /// &lt;OrleansConfiguration xmlns="urn:orleans"> /// &lt;Globals> /// &lt;StorageProviders> /// &lt;Provider Type="Orleans.Storage.AzureTableStorage" Name="AzureStore" /// DataConnectionString="UseDevelopmentStorage=true" /// DeleteStateOnClear="true" /// /> /// &lt;/StorageProviders> /// </code> /// </example> public class AzureTableStorage : IStorageProvider, IRestExceptionDecoder { internal const string DataConnectionStringPropertyName = "DataConnectionString"; internal const string TableNamePropertyName = "TableName"; internal const string DeleteOnClearPropertyName = "DeleteStateOnClear"; internal const string UseJsonFormatPropertyName = "UseJsonFormat"; internal const string TableNameDefaultValue = "OrleansGrainState"; private string dataConnectionString; private string tableName; private string serviceId; private GrainStateTableDataManager tableDataManager; private bool isDeleteStateOnClear; private static int counter; private readonly int id; // each property can hold 64KB of data and each entity can take 1MB in total, so 15 full properties take // 15 * 64 = 960 KB leaving room for the primary key, timestamp etc private const int MAX_DATA_CHUNK_SIZE = 64 * 1024; private const int MAX_STRING_PROPERTY_LENGTH = 32 * 1024; private const int MAX_DATA_CHUNKS_COUNT = 15; private const string BINARY_DATA_PROPERTY_NAME = "Data"; private const string STRING_DATA_PROPERTY_NAME = "StringData"; private bool useJsonFormat; private Newtonsoft.Json.JsonSerializerSettings jsonSettings; /// <summary> Name of this storage provider instance. </summary> /// <see cref="IProvider.Name"/> public string Name { get; private set; } /// <summary> Logger used by this storage provider instance. </summary> /// <see cref="IStorageProvider.Log"/> public Logger Log { get; private set; } /// <summary> Default constructor </summary> public AzureTableStorage() { tableName = TableNameDefaultValue; id = Interlocked.Increment(ref counter); } /// <summary> Initialization function for this storage provider. </summary> /// <see cref="IProvider.Init"/> public Task Init(string name, IProviderRuntime providerRuntime, IProviderConfiguration config) { Name = name; serviceId = providerRuntime.ServiceId.ToString(); if (!config.Properties.ContainsKey(DataConnectionStringPropertyName) || string.IsNullOrWhiteSpace(config.Properties[DataConnectionStringPropertyName])) throw new ArgumentException("DataConnectionString property not set"); dataConnectionString = config.Properties["DataConnectionString"]; if (config.Properties.ContainsKey(TableNamePropertyName)) tableName = config.Properties[TableNamePropertyName]; isDeleteStateOnClear = config.Properties.ContainsKey(DeleteOnClearPropertyName) && "true".Equals(config.Properties[DeleteOnClearPropertyName], StringComparison.OrdinalIgnoreCase); Log = providerRuntime.GetLogger("Storage.AzureTableStorage." + id); var initMsg = string.Format("Init: Name={0} ServiceId={1} Table={2} DeleteStateOnClear={3}", Name, serviceId, tableName, isDeleteStateOnClear); if (config.Properties.ContainsKey(UseJsonFormatPropertyName)) useJsonFormat = "true".Equals(config.Properties[UseJsonFormatPropertyName], StringComparison.OrdinalIgnoreCase); this.jsonSettings = OrleansJsonSerializer.UpdateSerializerSettings(OrleansJsonSerializer.GetDefaultSerializerSettings(), config); initMsg = String.Format("{0} UseJsonFormat={1}", initMsg, useJsonFormat); Log.Info((int)AzureProviderErrorCode.AzureTableProvider_InitProvider, initMsg); Log.Info((int)AzureProviderErrorCode.AzureTableProvider_ParamConnectionString, "AzureTableStorage Provider is using DataConnectionString: {0}", ConfigUtilities.PrintDataConnectionInfo(dataConnectionString)); tableDataManager = new GrainStateTableDataManager(tableName, dataConnectionString, Log); return tableDataManager.InitTableAsync(); } // Internal method to initialize for testing internal void InitLogger(Logger logger) { Log = logger; } /// <summary> Shutdown this storage provider. </summary> /// <see cref="IProvider.Close"/> public Task Close() { tableDataManager = null; return TaskDone.Done; } /// <summary> Read state data function for this storage provider. </summary> /// <see cref="IStorageProvider.ReadStateAsync"/> public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { if (tableDataManager == null) throw new ArgumentException("GrainState-Table property not initialized"); string pk = GetKeyString(grainReference); if (Log.IsVerbose3) Log.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_ReadingData, "Reading: GrainType={0} Pk={1} Grainid={2} from Table={3}", grainType, pk, grainReference, tableName); string partitionKey = pk; string rowKey = grainType; GrainStateRecord record = await tableDataManager.Read(partitionKey, rowKey).ConfigureAwait(false); if (record != null) { var entity = record.Entity; if (entity != null) { var loadedState = ConvertFromStorageFormat(entity); grainState.State = loadedState ?? Activator.CreateInstance(grainState.State.GetType()); grainState.ETag = record.ETag; } } // Else leave grainState in previous default condition } /// <summary> Write state data function for this storage provider. </summary> /// <see cref="IStorageProvider.WriteStateAsync"/> public async Task WriteStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { if (tableDataManager == null) throw new ArgumentException("GrainState-Table property not initialized"); string pk = GetKeyString(grainReference); if (Log.IsVerbose3) Log.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_WritingData, "Writing: GrainType={0} Pk={1} Grainid={2} ETag={3} to Table={4}", grainType, pk, grainReference, grainState.ETag, tableName); var entity = new DynamicTableEntity(pk, grainType); ConvertToStorageFormat(grainState.State, entity); var record = new GrainStateRecord { Entity = entity, ETag = grainState.ETag }; try { await tableDataManager.Write(record); grainState.ETag = record.ETag; } catch (StorageException exc) { Log.Error((int)AzureProviderErrorCode.AzureTableProvider_WriteError, $"Error Writing: GrainType={grainType} Grainid={grainReference} ETag={grainState.ETag} to Table={tableName} Exception={exc.Message}", exc); if (exc.IsUpdateConditionNotSatisfiedError()) { throw new TableStorageUpdateConditionNotSatisfiedException(grainType, grainReference, tableName, "Unknown", grainState.ETag, exc); } throw; } catch (Exception exc) { Log.Error((int)AzureProviderErrorCode.AzureTableProvider_WriteError, $"Error Writing: GrainType={grainType} Grainid={grainReference} ETag={grainState.ETag} to Table={tableName} Exception={exc.Message}", exc); throw; } } /// <summary> Clear / Delete state data function for this storage provider. </summary> /// <remarks> /// If the <c>DeleteStateOnClear</c> is set to <c>true</c> then the table row /// for this grain will be deleted / removed, otherwise the table row will be /// cleared by overwriting with default / null values. /// </remarks> /// <see cref="IStorageProvider.ClearStateAsync"/> public async Task ClearStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { if (tableDataManager == null) throw new ArgumentException("GrainState-Table property not initialized"); string pk = GetKeyString(grainReference); if (Log.IsVerbose3) Log.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_WritingData, "Clearing: GrainType={0} Pk={1} Grainid={2} ETag={3} DeleteStateOnClear={4} from Table={5}", grainType, pk, grainReference, grainState.ETag, isDeleteStateOnClear, tableName); var entity = new DynamicTableEntity(pk, grainType); var record = new GrainStateRecord { Entity = entity, ETag = grainState.ETag }; string operation = "Clearing"; try { if (isDeleteStateOnClear) { operation = "Deleting"; await tableDataManager.Delete(record).ConfigureAwait(false); } else { await tableDataManager.Write(record).ConfigureAwait(false); } grainState.ETag = record.ETag; // Update in-memory data to the new ETag } catch (Exception exc) { Log.Error((int)AzureProviderErrorCode.AzureTableProvider_DeleteError, string.Format("Error {0}: GrainType={1} Grainid={2} ETag={3} from Table={4} Exception={5}", operation, grainType, grainReference, grainState.ETag, tableName, exc.Message), exc); throw; } } /// <summary> /// Serialize to Azure storage format in either binary or JSON format. /// </summary> /// <param name="grainState">The grain state data to be serialized</param> /// <param name="entity">The Azure table entity the data should be stored in</param> /// <remarks> /// See: /// http://msdn.microsoft.com/en-us/library/system.web.script.serialization.javascriptserializer.aspx /// for more on the JSON serializer. /// </remarks> internal void ConvertToStorageFormat(object grainState, DynamicTableEntity entity) { int dataSize; IEnumerable<EntityProperty> properties; string basePropertyName; if (useJsonFormat) { // http://james.newtonking.com/json/help/index.html?topic=html/T_Newtonsoft_Json_JsonConvert.htm string data = Newtonsoft.Json.JsonConvert.SerializeObject(grainState, jsonSettings); if (Log.IsVerbose3) Log.Verbose3("Writing JSON data size = {0} for grain id = Partition={1} / Row={2}", data.Length, entity.PartitionKey, entity.RowKey); // each Unicode character takes 2 bytes dataSize = data.Length * 2; properties = SplitStringData(data).Select(t => new EntityProperty(t)); basePropertyName = STRING_DATA_PROPERTY_NAME; } else { // Convert to binary format byte[] data = SerializationManager.SerializeToByteArray(grainState); if (Log.IsVerbose3) Log.Verbose3("Writing binary data size = {0} for grain id = Partition={1} / Row={2}", data.Length, entity.PartitionKey, entity.RowKey); dataSize = data.Length; properties = SplitBinaryData(data).Select(t => new EntityProperty(t)); basePropertyName = BINARY_DATA_PROPERTY_NAME; } CheckMaxDataSize(dataSize, MAX_DATA_CHUNK_SIZE * MAX_DATA_CHUNKS_COUNT); foreach (var keyValuePair in properties.Zip(GetPropertyNames(basePropertyName), (property, name) => new KeyValuePair<string, EntityProperty>(name, property))) { entity.Properties.Add(keyValuePair); } } private void CheckMaxDataSize(int dataSize, int maxDataSize) { if (dataSize > maxDataSize) { var msg = string.Format("Data too large to write to Azure table. Size={0} MaxSize={1}", dataSize, maxDataSize); Log.Error(0, msg); throw new ArgumentOutOfRangeException("GrainState.Size", msg); } } private static IEnumerable<string> SplitStringData(string stringData) { var startIndex = 0; while (startIndex < stringData.Length) { var chunkSize = Math.Min(MAX_STRING_PROPERTY_LENGTH, stringData.Length - startIndex); yield return stringData.Substring(startIndex, chunkSize); startIndex += chunkSize; } } private static IEnumerable<byte[]> SplitBinaryData(byte[] binaryData) { var startIndex = 0; while (startIndex < binaryData.Length) { var chunkSize = Math.Min(MAX_DATA_CHUNK_SIZE, binaryData.Length - startIndex); var chunk = new byte[chunkSize]; Array.Copy(binaryData, startIndex, chunk, 0, chunkSize); yield return chunk; startIndex += chunkSize; } } private static IEnumerable<string> GetPropertyNames(string basePropertyName) { yield return basePropertyName; for (var i = 1; i < MAX_DATA_CHUNKS_COUNT; ++i) { yield return basePropertyName + i; } } private static IEnumerable<byte[]> ReadBinaryDataChunks(DynamicTableEntity entity) { foreach (var binaryDataPropertyName in GetPropertyNames(BINARY_DATA_PROPERTY_NAME)) { EntityProperty dataProperty; if (entity.Properties.TryGetValue(binaryDataPropertyName, out dataProperty)) { switch (dataProperty.PropertyType) { // if TablePayloadFormat.JsonNoMetadata is used case EdmType.String: var stringValue = dataProperty.StringValue; if (!string.IsNullOrEmpty(stringValue)) { yield return Convert.FromBase64String(stringValue); } break; // if any payload type providing metadata is used case EdmType.Binary: var binaryValue = dataProperty.BinaryValue; if (binaryValue != null && binaryValue.Length > 0) { yield return binaryValue; } break; } } } } private static byte[] ReadBinaryData(DynamicTableEntity entity) { var dataChunks = ReadBinaryDataChunks(entity).ToArray(); var dataSize = dataChunks.Select(d => d.Length).Sum(); var result = new byte[dataSize]; var startIndex = 0; foreach (var dataChunk in dataChunks) { Array.Copy(dataChunk, 0, result, startIndex, dataChunk.Length); startIndex += dataChunk.Length; } return result; } private static IEnumerable<string> ReadStringDataChunks(DynamicTableEntity entity) { foreach (var stringDataPropertyName in GetPropertyNames(STRING_DATA_PROPERTY_NAME)) { EntityProperty dataProperty; if (entity.Properties.TryGetValue(stringDataPropertyName, out dataProperty)) { var data = dataProperty.StringValue; if (!string.IsNullOrEmpty(data)) { yield return data; } } } } private static string ReadStringData(DynamicTableEntity entity) { return string.Join(string.Empty, ReadStringDataChunks(entity)); } /// <summary> /// Deserialize from Azure storage format /// </summary> /// <param name="entity">The Azure table entity the stored data</param> internal object ConvertFromStorageFormat(DynamicTableEntity entity) { var binaryData = ReadBinaryData(entity); var stringData = ReadStringData(entity); object dataValue = null; try { if (binaryData.Length > 0) { // Rehydrate dataValue = SerializationManager.DeserializeFromByteArray<object>(binaryData); } else if (!string.IsNullOrEmpty(stringData)) { dataValue = Newtonsoft.Json.JsonConvert.DeserializeObject<object>(stringData, jsonSettings); } // Else, no data found } catch (Exception exc) { var sb = new StringBuilder(); if (binaryData.Length > 0) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.Data={0}", binaryData); } else if (!string.IsNullOrEmpty(stringData)) { sb.AppendFormat("Unable to convert from storage format GrainStateEntity.StringData={0}", stringData); } if (dataValue != null) { sb.AppendFormat("Data Value={0} Type={1}", dataValue, dataValue.GetType()); } Log.Error(0, sb.ToString(), exc); throw new AggregateException(sb.ToString(), exc); } return dataValue; } private string GetKeyString(GrainReference grainReference) { var key = String.Format("{0}_{1}", serviceId, grainReference.ToKeyString()); return AzureStorageUtils.SanitizeTableProperty(key); } internal class GrainStateRecord { public string ETag { get; set; } public DynamicTableEntity Entity { get; set; } } private class GrainStateTableDataManager { public string TableName { get; private set; } private readonly AzureTableDataManager<DynamicTableEntity> tableManager; private readonly Logger logger; public GrainStateTableDataManager(string tableName, string storageConnectionString, Logger logger) { this.logger = logger; TableName = tableName; tableManager = new AzureTableDataManager<DynamicTableEntity>(tableName, storageConnectionString); } public Task InitTableAsync() { return tableManager.InitTableAsync(); } public async Task<GrainStateRecord> Read(string partitionKey, string rowKey) { if (logger.IsVerbose3) logger.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_Storage_Reading, "Reading: PartitionKey={0} RowKey={1} from Table={2}", partitionKey, rowKey, TableName); try { Tuple<DynamicTableEntity, string> data = await tableManager.ReadSingleTableEntryAsync(partitionKey, rowKey).ConfigureAwait(false); if (data == null || data.Item1 == null) { if (logger.IsVerbose2) logger.Verbose2((int)AzureProviderErrorCode.AzureTableProvider_DataNotFound, "DataNotFound reading: PartitionKey={0} RowKey={1} from Table={2}", partitionKey, rowKey, TableName); return null; } DynamicTableEntity stateEntity = data.Item1; var record = new GrainStateRecord { Entity = stateEntity, ETag = data.Item2 }; if (logger.IsVerbose3) logger.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_Storage_DataRead, "Read: PartitionKey={0} RowKey={1} from Table={2} with ETag={3}", stateEntity.PartitionKey, stateEntity.RowKey, TableName, record.ETag); return record; } catch (Exception exc) { if (AzureStorageUtils.TableStorageDataNotFound(exc)) { if (logger.IsVerbose2) logger.Verbose2((int)AzureProviderErrorCode.AzureTableProvider_DataNotFound, "DataNotFound reading (exception): PartitionKey={0} RowKey={1} from Table={2} Exception={3}", partitionKey, rowKey, TableName, LogFormatter.PrintException(exc)); return null; // No data } throw; } } public async Task Write(GrainStateRecord record) { var entity = record.Entity; if (logger.IsVerbose3) logger.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_Storage_Writing, "Writing: PartitionKey={0} RowKey={1} to Table={2} with ETag={3}", entity.PartitionKey, entity.RowKey, TableName, record.ETag); string eTag = String.IsNullOrEmpty(record.ETag) ? await tableManager.CreateTableEntryAsync(entity).ConfigureAwait(false) : await tableManager.UpdateTableEntryAsync(entity, record.ETag).ConfigureAwait(false); record.ETag = eTag; } public async Task Delete(GrainStateRecord record) { var entity = record.Entity; if (logger.IsVerbose3) logger.Verbose3((int)AzureProviderErrorCode.AzureTableProvider_Storage_Writing, "Deleting: PartitionKey={0} RowKey={1} from Table={2} with ETag={3}", entity.PartitionKey, entity.RowKey, TableName, record.ETag); await tableManager.DeleteTableEntryAsync(entity, record.ETag).ConfigureAwait(false); record.ETag = null; } } /// <summary> Decodes Storage exceptions.</summary> public bool DecodeException(Exception e, out HttpStatusCode httpStatusCode, out string restStatus, bool getRESTErrors = false) { return AzureStorageUtils.EvaluateException(e, out httpStatusCode, out restStatus, getRESTErrors); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using System.Xml.Linq; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Common; using Microsoft.WindowsAzure.Common.Internals; using Microsoft.WindowsAzure.Management.Storage; using Microsoft.WindowsAzure.Management.Storage.Models; namespace Microsoft.WindowsAzure.Management.Storage { /// <summary> /// The Service Management API includes operations for managing the storage /// accounts beneath your subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460790.aspx for /// more information) /// </summary> internal partial class StorageAccountOperations : IServiceOperations<StorageManagementClient>, Microsoft.WindowsAzure.Management.Storage.IStorageAccountOperations { /// <summary> /// Initializes a new instance of the StorageAccountOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal StorageAccountOperations(StorageManagementClient client) { this._client = client; } private StorageManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.WindowsAzure.Management.Storage.StorageManagementClient. /// </summary> public StorageManagementClient Client { get { return this._client; } } /// <summary> /// The Begin Creating Storage Account operation creates a new storage /// account in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Required. Parameters supplied to the Begin Creating Storage Account /// operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async System.Threading.Tasks.Task<OperationResponse> BeginCreatingAsync(StorageAccountCreateParameters parameters, CancellationToken cancellationToken) { // Validate if (parameters == null) { throw new ArgumentNullException("parameters"); } if (parameters.Description != null && parameters.Description.Length > 1024) { throw new ArgumentOutOfRangeException("parameters.Description"); } if (parameters.Label == null) { throw new ArgumentNullException("parameters.Label"); } if (parameters.Label.Length > 100) { throw new ArgumentOutOfRangeException("parameters.Label"); } if (parameters.Name == null) { throw new ArgumentNullException("parameters.Name"); } if (parameters.Name.Length < 3) { throw new ArgumentOutOfRangeException("parameters.Name"); } if (parameters.Name.Length > 24) { throw new ArgumentOutOfRangeException("parameters.Name"); } foreach (char nameChar in parameters.Name) { if (char.IsLower(nameChar) == false && char.IsDigit(nameChar) == false) { throw new ArgumentOutOfRangeException("parameters.Name"); } } // TODO: Validate parameters.Name is a valid DNS name. // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "BeginCreatingAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement createStorageServiceInputElement = new XElement(XName.Get("CreateStorageServiceInput", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(createStorageServiceInputElement); XElement serviceNameElement = new XElement(XName.Get("ServiceName", "http://schemas.microsoft.com/windowsazure")); serviceNameElement.Value = parameters.Name; createStorageServiceInputElement.Add(serviceNameElement); if (parameters.Description != null) { XElement descriptionElement = new XElement(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); descriptionElement.Value = parameters.Description; createStorageServiceInputElement.Add(descriptionElement); } else { XElement emptyElement = new XElement(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); XAttribute nilAttribute = new XAttribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance"), ""); nilAttribute.Value = "true"; emptyElement.Add(nilAttribute); createStorageServiceInputElement.Add(emptyElement); } XElement labelElement = new XElement(XName.Get("Label", "http://schemas.microsoft.com/windowsazure")); labelElement.Value = TypeConversion.ToBase64String(parameters.Label); createStorageServiceInputElement.Add(labelElement); if (parameters.AffinityGroup != null) { XElement affinityGroupElement = new XElement(XName.Get("AffinityGroup", "http://schemas.microsoft.com/windowsazure")); affinityGroupElement.Value = parameters.AffinityGroup; createStorageServiceInputElement.Add(affinityGroupElement); } if (parameters.Location != null) { XElement locationElement = new XElement(XName.Get("Location", "http://schemas.microsoft.com/windowsazure")); locationElement.Value = parameters.Location; createStorageServiceInputElement.Add(locationElement); } if (parameters.ExtendedProperties != null) { if (parameters.ExtendedProperties is ILazyCollection == false || ((ILazyCollection)parameters.ExtendedProperties).IsInitialized) { XElement extendedPropertiesDictionaryElement = new XElement(XName.Get("ExtendedProperties", "http://schemas.microsoft.com/windowsazure")); foreach (KeyValuePair<string, string> pair in parameters.ExtendedProperties) { string extendedPropertiesKey = pair.Key; string extendedPropertiesValue = pair.Value; XElement extendedPropertiesElement = new XElement(XName.Get("ExtendedProperty", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesDictionaryElement.Add(extendedPropertiesElement); XElement extendedPropertiesKeyElement = new XElement(XName.Get("Name", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesKeyElement.Value = extendedPropertiesKey; extendedPropertiesElement.Add(extendedPropertiesKeyElement); XElement extendedPropertiesValueElement = new XElement(XName.Get("Value", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesValueElement.Value = extendedPropertiesValue; extendedPropertiesElement.Add(extendedPropertiesValueElement); } createStorageServiceInputElement.Add(extendedPropertiesDictionaryElement); } } if (parameters.AccountType != null) { XElement accountTypeElement = new XElement(XName.Get("AccountType", "http://schemas.microsoft.com/windowsazure")); accountTypeElement.Value = parameters.AccountType; createStorageServiceInputElement.Add(accountTypeElement); } requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result OperationResponse result = null; result = new OperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Check Name Availability operation checks if a storage account /// name is available for use in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/jj154125.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Required. The desired storage account name to check for /// availability. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response to a storage account check name availability request. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Storage.Models.CheckNameAvailabilityResponse> CheckNameAvailabilityAsync(string accountName, CancellationToken cancellationToken) { // Validate if (accountName == null) { throw new ArgumentNullException("accountName"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); Tracing.Enter(invocationId, this, "CheckNameAvailabilityAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/operations/isavailable/" + accountName.Trim(); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result CheckNameAvailabilityResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new CheckNameAvailabilityResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement availabilityResponseElement = responseDoc.Element(XName.Get("AvailabilityResponse", "http://schemas.microsoft.com/windowsazure")); if (availabilityResponseElement != null) { XElement resultElement = availabilityResponseElement.Element(XName.Get("Result", "http://schemas.microsoft.com/windowsazure")); if (resultElement != null) { bool resultInstance = bool.Parse(resultElement.Value); result.IsAvailable = resultInstance; } XElement reasonElement = availabilityResponseElement.Element(XName.Get("Reason", "http://schemas.microsoft.com/windowsazure")); if (reasonElement != null) { bool isNil = false; XAttribute nilAttribute = reasonElement.Attribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance")); if (nilAttribute != null) { isNil = nilAttribute.Value == "true"; } if (isNil == false) { string reasonInstance = reasonElement.Value; result.Reason = reasonInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Create Storage Account operation creates a new storage account /// in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Required. Parameters supplied to the Create Storage Account /// operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> public async System.Threading.Tasks.Task<OperationStatusResponse> CreateAsync(StorageAccountCreateParameters parameters, CancellationToken cancellationToken) { StorageManagementClient client = this.Client; bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "CreateAsync", tracingParameters); } try { if (shouldTrace) { client = this.Client.WithHandler(new ClientRequestTrackingHandler(invocationId)); } cancellationToken.ThrowIfCancellationRequested(); OperationResponse response = await client.StorageAccounts.BeginCreatingAsync(parameters, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); OperationStatusResponse result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); int delayInSeconds = 30; if (client.LongRunningOperationInitialTimeout >= 0) { delayInSeconds = client.LongRunningOperationInitialTimeout; } while ((result.Status != OperationStatus.InProgress) == false) { cancellationToken.ThrowIfCancellationRequested(); await TaskEx.Delay(delayInSeconds * 1000, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); delayInSeconds = 30; if (client.LongRunningOperationRetryTimeout >= 0) { delayInSeconds = client.LongRunningOperationRetryTimeout; } } if (shouldTrace) { Tracing.Exit(invocationId, result); } if (result.Status != OperationStatus.Succeeded) { if (result.Error != null) { CloudException ex = new CloudException(result.Error.Code + " : " + result.Error.Message); ex.ErrorCode = result.Error.Code; ex.ErrorMessage = result.Error.Message; if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } else { CloudException ex = new CloudException(""); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } } return result; } finally { if (client != null && shouldTrace) { client.Dispose(); } } } /// <summary> /// The Delete Storage Account operation deletes the specified storage /// account from Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264517.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Required. The name of the storage account to be deleted. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async System.Threading.Tasks.Task<OperationResponse> DeleteAsync(string accountName, CancellationToken cancellationToken) { // Validate if (accountName == null) { throw new ArgumentNullException("accountName"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); Tracing.Enter(invocationId, this, "DeleteAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/" + accountName.Trim(); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Delete; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result OperationResponse result = null; result = new OperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Get Storage Account Properties operation returns system /// properties for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460802.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Required. Name of the storage account to get properties for. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The Get Storage Account Properties operation response. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Storage.Models.StorageAccountGetResponse> GetAsync(string accountName, CancellationToken cancellationToken) { // Validate if (accountName == null) { throw new ArgumentNullException("accountName"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); Tracing.Enter(invocationId, this, "GetAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/" + accountName.Trim(); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result StorageAccountGetResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new StorageAccountGetResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement storageServiceElement = responseDoc.Element(XName.Get("StorageService", "http://schemas.microsoft.com/windowsazure")); if (storageServiceElement != null) { StorageAccount storageServiceInstance = new StorageAccount(); result.StorageAccount = storageServiceInstance; XElement urlElement = storageServiceElement.Element(XName.Get("Url", "http://schemas.microsoft.com/windowsazure")); if (urlElement != null) { Uri urlInstance = TypeConversion.TryParseUri(urlElement.Value); storageServiceInstance.Uri = urlInstance; } XElement serviceNameElement = storageServiceElement.Element(XName.Get("ServiceName", "http://schemas.microsoft.com/windowsazure")); if (serviceNameElement != null) { string serviceNameInstance = serviceNameElement.Value; storageServiceInstance.Name = serviceNameInstance; } XElement storageServicePropertiesElement = storageServiceElement.Element(XName.Get("StorageServiceProperties", "http://schemas.microsoft.com/windowsazure")); if (storageServicePropertiesElement != null) { StorageAccountProperties storageServicePropertiesInstance = new StorageAccountProperties(); storageServiceInstance.Properties = storageServicePropertiesInstance; XElement descriptionElement = storageServicePropertiesElement.Element(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); if (descriptionElement != null) { bool isNil = false; XAttribute nilAttribute = descriptionElement.Attribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance")); if (nilAttribute != null) { isNil = nilAttribute.Value == "true"; } if (isNil == false) { string descriptionInstance = descriptionElement.Value; storageServicePropertiesInstance.Description = descriptionInstance; } } XElement affinityGroupElement = storageServicePropertiesElement.Element(XName.Get("AffinityGroup", "http://schemas.microsoft.com/windowsazure")); if (affinityGroupElement != null) { string affinityGroupInstance = affinityGroupElement.Value; storageServicePropertiesInstance.AffinityGroup = affinityGroupInstance; } XElement locationElement = storageServicePropertiesElement.Element(XName.Get("Location", "http://schemas.microsoft.com/windowsazure")); if (locationElement != null) { string locationInstance = locationElement.Value; storageServicePropertiesInstance.Location = locationInstance; } XElement labelElement = storageServicePropertiesElement.Element(XName.Get("Label", "http://schemas.microsoft.com/windowsazure")); if (labelElement != null) { string labelInstance = TypeConversion.FromBase64String(labelElement.Value); storageServicePropertiesInstance.Label = labelInstance; } XElement statusElement = storageServicePropertiesElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure")); if (statusElement != null) { StorageAccountStatus statusInstance = ((StorageAccountStatus)Enum.Parse(typeof(StorageAccountStatus), statusElement.Value, true)); storageServicePropertiesInstance.Status = statusInstance; } XElement endpointsSequenceElement = storageServicePropertiesElement.Element(XName.Get("Endpoints", "http://schemas.microsoft.com/windowsazure")); if (endpointsSequenceElement != null) { foreach (XElement endpointsElement in endpointsSequenceElement.Elements(XName.Get("Endpoint", "http://schemas.microsoft.com/windowsazure"))) { storageServicePropertiesInstance.Endpoints.Add(TypeConversion.TryParseUri(endpointsElement.Value)); } } XElement geoPrimaryRegionElement = storageServicePropertiesElement.Element(XName.Get("GeoPrimaryRegion", "http://schemas.microsoft.com/windowsazure")); if (geoPrimaryRegionElement != null) { string geoPrimaryRegionInstance = geoPrimaryRegionElement.Value; storageServicePropertiesInstance.GeoPrimaryRegion = geoPrimaryRegionInstance; } XElement statusOfPrimaryElement = storageServicePropertiesElement.Element(XName.Get("StatusOfPrimary", "http://schemas.microsoft.com/windowsazure")); if (statusOfPrimaryElement != null && string.IsNullOrEmpty(statusOfPrimaryElement.Value) == false) { GeoRegionStatus statusOfPrimaryInstance = ((GeoRegionStatus)Enum.Parse(typeof(GeoRegionStatus), statusOfPrimaryElement.Value, true)); storageServicePropertiesInstance.StatusOfGeoPrimaryRegion = statusOfPrimaryInstance; } XElement lastGeoFailoverTimeElement = storageServicePropertiesElement.Element(XName.Get("LastGeoFailoverTime", "http://schemas.microsoft.com/windowsazure")); if (lastGeoFailoverTimeElement != null && string.IsNullOrEmpty(lastGeoFailoverTimeElement.Value) == false) { DateTime lastGeoFailoverTimeInstance = DateTime.Parse(lastGeoFailoverTimeElement.Value, CultureInfo.InvariantCulture); storageServicePropertiesInstance.LastGeoFailoverTime = lastGeoFailoverTimeInstance; } XElement geoSecondaryRegionElement = storageServicePropertiesElement.Element(XName.Get("GeoSecondaryRegion", "http://schemas.microsoft.com/windowsazure")); if (geoSecondaryRegionElement != null) { string geoSecondaryRegionInstance = geoSecondaryRegionElement.Value; storageServicePropertiesInstance.GeoSecondaryRegion = geoSecondaryRegionInstance; } XElement statusOfSecondaryElement = storageServicePropertiesElement.Element(XName.Get("StatusOfSecondary", "http://schemas.microsoft.com/windowsazure")); if (statusOfSecondaryElement != null && string.IsNullOrEmpty(statusOfSecondaryElement.Value) == false) { GeoRegionStatus statusOfSecondaryInstance = ((GeoRegionStatus)Enum.Parse(typeof(GeoRegionStatus), statusOfSecondaryElement.Value, true)); storageServicePropertiesInstance.StatusOfGeoSecondaryRegion = statusOfSecondaryInstance; } XElement accountTypeElement = storageServicePropertiesElement.Element(XName.Get("AccountType", "http://schemas.microsoft.com/windowsazure")); if (accountTypeElement != null) { string accountTypeInstance = accountTypeElement.Value; storageServicePropertiesInstance.AccountType = accountTypeInstance; } } XElement extendedPropertiesSequenceElement = storageServiceElement.Element(XName.Get("ExtendedProperties", "http://schemas.microsoft.com/windowsazure")); if (extendedPropertiesSequenceElement != null) { foreach (XElement extendedPropertiesElement in extendedPropertiesSequenceElement.Elements(XName.Get("ExtendedProperty", "http://schemas.microsoft.com/windowsazure"))) { string extendedPropertiesKey = extendedPropertiesElement.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure")).Value; string extendedPropertiesValue = extendedPropertiesElement.Element(XName.Get("Value", "http://schemas.microsoft.com/windowsazure")).Value; storageServiceInstance.ExtendedProperties.Add(extendedPropertiesKey, extendedPropertiesValue); } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Get Storage Keys operation returns the primary and secondary /// access keys for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460785.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Required. The name of the desired storage account. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Storage.Models.StorageAccountGetKeysResponse> GetKeysAsync(string accountName, CancellationToken cancellationToken) { // Validate if (accountName == null) { throw new ArgumentNullException("accountName"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); Tracing.Enter(invocationId, this, "GetKeysAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/" + accountName.Trim() + "/keys"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result StorageAccountGetKeysResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new StorageAccountGetKeysResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement storageServiceElement = responseDoc.Element(XName.Get("StorageService", "http://schemas.microsoft.com/windowsazure")); if (storageServiceElement != null) { XElement urlElement = storageServiceElement.Element(XName.Get("Url", "http://schemas.microsoft.com/windowsazure")); if (urlElement != null) { Uri urlInstance = TypeConversion.TryParseUri(urlElement.Value); result.Uri = urlInstance; } XElement storageServiceKeysElement = storageServiceElement.Element(XName.Get("StorageServiceKeys", "http://schemas.microsoft.com/windowsazure")); if (storageServiceKeysElement != null) { XElement primaryElement = storageServiceKeysElement.Element(XName.Get("Primary", "http://schemas.microsoft.com/windowsazure")); if (primaryElement != null) { string primaryInstance = primaryElement.Value; result.PrimaryKey = primaryInstance; } XElement secondaryElement = storageServiceKeysElement.Element(XName.Get("Secondary", "http://schemas.microsoft.com/windowsazure")); if (secondaryElement != null) { string secondaryInstance = secondaryElement.Value; result.SecondaryKey = secondaryInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The List Storage Accounts operation lists the storage accounts /// available under the current subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460787.aspx /// for more information) /// </summary> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The List Storage Accounts operation response. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Storage.Models.StorageAccountListResponse> ListAsync(CancellationToken cancellationToken) { // Validate // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); Tracing.Enter(invocationId, this, "ListAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result StorageAccountListResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new StorageAccountListResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement storageServicesSequenceElement = responseDoc.Element(XName.Get("StorageServices", "http://schemas.microsoft.com/windowsazure")); if (storageServicesSequenceElement != null) { foreach (XElement storageServicesElement in storageServicesSequenceElement.Elements(XName.Get("StorageService", "http://schemas.microsoft.com/windowsazure"))) { StorageAccount storageServiceInstance = new StorageAccount(); result.StorageAccounts.Add(storageServiceInstance); XElement urlElement = storageServicesElement.Element(XName.Get("Url", "http://schemas.microsoft.com/windowsazure")); if (urlElement != null) { Uri urlInstance = TypeConversion.TryParseUri(urlElement.Value); storageServiceInstance.Uri = urlInstance; } XElement serviceNameElement = storageServicesElement.Element(XName.Get("ServiceName", "http://schemas.microsoft.com/windowsazure")); if (serviceNameElement != null) { string serviceNameInstance = serviceNameElement.Value; storageServiceInstance.Name = serviceNameInstance; } XElement storageServicePropertiesElement = storageServicesElement.Element(XName.Get("StorageServiceProperties", "http://schemas.microsoft.com/windowsazure")); if (storageServicePropertiesElement != null) { StorageAccountProperties storageServicePropertiesInstance = new StorageAccountProperties(); storageServiceInstance.Properties = storageServicePropertiesInstance; XElement descriptionElement = storageServicePropertiesElement.Element(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); if (descriptionElement != null) { bool isNil = false; XAttribute nilAttribute = descriptionElement.Attribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance")); if (nilAttribute != null) { isNil = nilAttribute.Value == "true"; } if (isNil == false) { string descriptionInstance = descriptionElement.Value; storageServicePropertiesInstance.Description = descriptionInstance; } } XElement affinityGroupElement = storageServicePropertiesElement.Element(XName.Get("AffinityGroup", "http://schemas.microsoft.com/windowsazure")); if (affinityGroupElement != null) { string affinityGroupInstance = affinityGroupElement.Value; storageServicePropertiesInstance.AffinityGroup = affinityGroupInstance; } XElement locationElement = storageServicePropertiesElement.Element(XName.Get("Location", "http://schemas.microsoft.com/windowsazure")); if (locationElement != null) { string locationInstance = locationElement.Value; storageServicePropertiesInstance.Location = locationInstance; } XElement labelElement = storageServicePropertiesElement.Element(XName.Get("Label", "http://schemas.microsoft.com/windowsazure")); if (labelElement != null) { string labelInstance = TypeConversion.FromBase64String(labelElement.Value); storageServicePropertiesInstance.Label = labelInstance; } XElement statusElement = storageServicePropertiesElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure")); if (statusElement != null) { StorageAccountStatus statusInstance = ((StorageAccountStatus)Enum.Parse(typeof(StorageAccountStatus), statusElement.Value, true)); storageServicePropertiesInstance.Status = statusInstance; } XElement endpointsSequenceElement = storageServicePropertiesElement.Element(XName.Get("Endpoints", "http://schemas.microsoft.com/windowsazure")); if (endpointsSequenceElement != null) { foreach (XElement endpointsElement in endpointsSequenceElement.Elements(XName.Get("Endpoint", "http://schemas.microsoft.com/windowsazure"))) { storageServicePropertiesInstance.Endpoints.Add(TypeConversion.TryParseUri(endpointsElement.Value)); } } XElement geoPrimaryRegionElement = storageServicePropertiesElement.Element(XName.Get("GeoPrimaryRegion", "http://schemas.microsoft.com/windowsazure")); if (geoPrimaryRegionElement != null) { string geoPrimaryRegionInstance = geoPrimaryRegionElement.Value; storageServicePropertiesInstance.GeoPrimaryRegion = geoPrimaryRegionInstance; } XElement statusOfPrimaryElement = storageServicePropertiesElement.Element(XName.Get("StatusOfPrimary", "http://schemas.microsoft.com/windowsazure")); if (statusOfPrimaryElement != null && string.IsNullOrEmpty(statusOfPrimaryElement.Value) == false) { GeoRegionStatus statusOfPrimaryInstance = ((GeoRegionStatus)Enum.Parse(typeof(GeoRegionStatus), statusOfPrimaryElement.Value, true)); storageServicePropertiesInstance.StatusOfGeoPrimaryRegion = statusOfPrimaryInstance; } XElement lastGeoFailoverTimeElement = storageServicePropertiesElement.Element(XName.Get("LastGeoFailoverTime", "http://schemas.microsoft.com/windowsazure")); if (lastGeoFailoverTimeElement != null && string.IsNullOrEmpty(lastGeoFailoverTimeElement.Value) == false) { DateTime lastGeoFailoverTimeInstance = DateTime.Parse(lastGeoFailoverTimeElement.Value, CultureInfo.InvariantCulture); storageServicePropertiesInstance.LastGeoFailoverTime = lastGeoFailoverTimeInstance; } XElement geoSecondaryRegionElement = storageServicePropertiesElement.Element(XName.Get("GeoSecondaryRegion", "http://schemas.microsoft.com/windowsazure")); if (geoSecondaryRegionElement != null) { string geoSecondaryRegionInstance = geoSecondaryRegionElement.Value; storageServicePropertiesInstance.GeoSecondaryRegion = geoSecondaryRegionInstance; } XElement statusOfSecondaryElement = storageServicePropertiesElement.Element(XName.Get("StatusOfSecondary", "http://schemas.microsoft.com/windowsazure")); if (statusOfSecondaryElement != null && string.IsNullOrEmpty(statusOfSecondaryElement.Value) == false) { GeoRegionStatus statusOfSecondaryInstance = ((GeoRegionStatus)Enum.Parse(typeof(GeoRegionStatus), statusOfSecondaryElement.Value, true)); storageServicePropertiesInstance.StatusOfGeoSecondaryRegion = statusOfSecondaryInstance; } XElement accountTypeElement = storageServicePropertiesElement.Element(XName.Get("AccountType", "http://schemas.microsoft.com/windowsazure")); if (accountTypeElement != null) { string accountTypeInstance = accountTypeElement.Value; storageServicePropertiesInstance.AccountType = accountTypeInstance; } } XElement extendedPropertiesSequenceElement = storageServicesElement.Element(XName.Get("ExtendedProperties", "http://schemas.microsoft.com/windowsazure")); if (extendedPropertiesSequenceElement != null) { foreach (XElement extendedPropertiesElement in extendedPropertiesSequenceElement.Elements(XName.Get("ExtendedProperty", "http://schemas.microsoft.com/windowsazure"))) { string extendedPropertiesKey = extendedPropertiesElement.Element(XName.Get("Name", "http://schemas.microsoft.com/windowsazure")).Value; string extendedPropertiesValue = extendedPropertiesElement.Element(XName.Get("Value", "http://schemas.microsoft.com/windowsazure")).Value; storageServiceInstance.ExtendedProperties.Add(extendedPropertiesKey, extendedPropertiesValue); } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Regenerate Keys operation regenerates the primary or secondary /// access key for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460795.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Required. Parameters supplied to the Regenerate Keys operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.Storage.Models.StorageAccountRegenerateKeysResponse> RegenerateKeysAsync(StorageAccountRegenerateKeysParameters parameters, CancellationToken cancellationToken) { // Validate if (parameters == null) { throw new ArgumentNullException("parameters"); } if (parameters.Name == null) { throw new ArgumentNullException("parameters.Name"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "RegenerateKeysAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/" + parameters.Name.Trim() + "/keys?action=regenerate"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement regenerateKeysElement = new XElement(XName.Get("RegenerateKeys", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(regenerateKeysElement); XElement keyTypeElement = new XElement(XName.Get("KeyType", "http://schemas.microsoft.com/windowsazure")); keyTypeElement.Value = parameters.KeyType.ToString(); regenerateKeysElement.Add(keyTypeElement); requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result StorageAccountRegenerateKeysResponse result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new StorageAccountRegenerateKeysResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement storageServiceElement = responseDoc.Element(XName.Get("StorageService", "http://schemas.microsoft.com/windowsazure")); if (storageServiceElement != null) { XElement urlElement = storageServiceElement.Element(XName.Get("Url", "http://schemas.microsoft.com/windowsazure")); if (urlElement != null) { Uri urlInstance = TypeConversion.TryParseUri(urlElement.Value); result.Uri = urlInstance; } XElement storageServiceKeysElement = storageServiceElement.Element(XName.Get("StorageServiceKeys", "http://schemas.microsoft.com/windowsazure")); if (storageServiceKeysElement != null) { XElement primaryElement = storageServiceKeysElement.Element(XName.Get("Primary", "http://schemas.microsoft.com/windowsazure")); if (primaryElement != null) { string primaryInstance = primaryElement.Value; result.PrimaryKey = primaryInstance; } XElement secondaryElement = storageServiceKeysElement.Element(XName.Get("Secondary", "http://schemas.microsoft.com/windowsazure")); if (secondaryElement != null) { string secondaryInstance = secondaryElement.Value; result.SecondaryKey = secondaryInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Update Storage Account operation updates the label and the /// description, and enables or disables the geo-replication status /// for a storage account in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264516.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Required. Name of the storage account to update. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Update Storage Account /// operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async System.Threading.Tasks.Task<OperationResponse> UpdateAsync(string accountName, StorageAccountUpdateParameters parameters, CancellationToken cancellationToken) { // Validate if (accountName == null) { throw new ArgumentNullException("accountName"); } if (accountName.Length < 3) { throw new ArgumentOutOfRangeException("accountName"); } if (accountName.Length > 24) { throw new ArgumentOutOfRangeException("accountName"); } foreach (char accountNameChar in accountName) { if (char.IsLower(accountNameChar) == false && char.IsDigit(accountNameChar) == false) { throw new ArgumentOutOfRangeException("accountName"); } } // TODO: Validate accountName is a valid DNS name. if (parameters == null) { throw new ArgumentNullException("parameters"); } if (parameters.Description != null && parameters.Description.Length > 1024) { throw new ArgumentOutOfRangeException("parameters.Description"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("accountName", accountName); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "UpdateAsync", tracingParameters); } // Construct URL string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/storageservices/" + accountName.Trim(); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Put; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2014-10-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement updateStorageServiceInputElement = new XElement(XName.Get("UpdateStorageServiceInput", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(updateStorageServiceInputElement); if (parameters.Description != null) { XElement descriptionElement = new XElement(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); descriptionElement.Value = parameters.Description; updateStorageServiceInputElement.Add(descriptionElement); } else { XElement emptyElement = new XElement(XName.Get("Description", "http://schemas.microsoft.com/windowsazure")); XAttribute nilAttribute = new XAttribute(XName.Get("nil", "http://www.w3.org/2001/XMLSchema-instance"), ""); nilAttribute.Value = "true"; emptyElement.Add(nilAttribute); updateStorageServiceInputElement.Add(emptyElement); } if (parameters.Label != null) { XElement labelElement = new XElement(XName.Get("Label", "http://schemas.microsoft.com/windowsazure")); labelElement.Value = TypeConversion.ToBase64String(parameters.Label); updateStorageServiceInputElement.Add(labelElement); } if (parameters.ExtendedProperties != null) { if (parameters.ExtendedProperties is ILazyCollection == false || ((ILazyCollection)parameters.ExtendedProperties).IsInitialized) { XElement extendedPropertiesDictionaryElement = new XElement(XName.Get("ExtendedProperties", "http://schemas.microsoft.com/windowsazure")); foreach (KeyValuePair<string, string> pair in parameters.ExtendedProperties) { string extendedPropertiesKey = pair.Key; string extendedPropertiesValue = pair.Value; XElement extendedPropertiesElement = new XElement(XName.Get("ExtendedProperty", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesDictionaryElement.Add(extendedPropertiesElement); XElement extendedPropertiesKeyElement = new XElement(XName.Get("Name", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesKeyElement.Value = extendedPropertiesKey; extendedPropertiesElement.Add(extendedPropertiesKeyElement); XElement extendedPropertiesValueElement = new XElement(XName.Get("Value", "http://schemas.microsoft.com/windowsazure")); extendedPropertiesValueElement.Value = extendedPropertiesValue; extendedPropertiesElement.Add(extendedPropertiesValueElement); } updateStorageServiceInputElement.Add(extendedPropertiesDictionaryElement); } } if (parameters.AccountType != null) { XElement accountTypeElement = new XElement(XName.Get("AccountType", "http://schemas.microsoft.com/windowsazure")); accountTypeElement.Value = parameters.AccountType; updateStorageServiceInputElement.Add(accountTypeElement); } requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result OperationResponse result = null; result = new OperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
//----------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- namespace System.ServiceModel.Dispatcher { using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Runtime; using System.Runtime.CompilerServices; using System.ServiceModel; using System.ServiceModel.Activation; using System.ServiceModel.Channels; using System.ServiceModel.Diagnostics; using System.ServiceModel.Diagnostics.Application; using System.Transactions; class TransactionBehavior { bool isConcurrent; IsolationLevel isolation = ServiceBehaviorAttribute.DefaultIsolationLevel; DispatchRuntime dispatch; TimeSpan timeout = TimeSpan.Zero; bool isTransactedReceiveChannelDispatcher = false; internal TransactionBehavior() { } internal TransactionBehavior(DispatchRuntime dispatch) { this.isConcurrent = (dispatch.ConcurrencyMode == ConcurrencyMode.Multiple || dispatch.ConcurrencyMode == ConcurrencyMode.Reentrant); this.dispatch = dispatch; this.isTransactedReceiveChannelDispatcher = dispatch.ChannelDispatcher.IsTransactedReceive; // Don't pull in System.Transactions.dll if we don't need it if (dispatch.ChannelDispatcher.TransactionIsolationLevelSet) { this.InitializeIsolationLevel(dispatch); } this.timeout = TransactionBehavior.NormalizeTimeout(dispatch.ChannelDispatcher.TransactionTimeout); } internal static Exception CreateFault(string reasonText, string codeString, bool isNetDispatcherFault) { string faultCodeNamespace, action; // 'Transactions' action should be used only when we expect to have a TransactionChannel in the channel stack // otherwise one should use the NetDispatch action. if (isNetDispatcherFault) { faultCodeNamespace = FaultCodeConstants.Namespaces.NetDispatch; action = FaultCodeConstants.Actions.NetDispatcher; } else { faultCodeNamespace = FaultCodeConstants.Namespaces.Transactions; action = FaultCodeConstants.Actions.Transactions; } FaultReason reason = new FaultReason(reasonText, CultureInfo.CurrentCulture); FaultCode code = FaultCode.CreateSenderFaultCode(codeString, faultCodeNamespace); return new FaultException(reason, code, action); } internal static TransactionBehavior CreateIfNeeded(DispatchRuntime dispatch) { if (TransactionBehavior.NeedsTransactionBehavior(dispatch)) { return new TransactionBehavior(dispatch); } else { return null; } } internal static TimeSpan NormalizeTimeout(TimeSpan timeout) { if (TimeSpan.Zero == timeout) { timeout = TransactionManager.DefaultTimeout; } else if (TimeSpan.Zero != TransactionManager.MaximumTimeout && timeout > TransactionManager.MaximumTimeout) { timeout = TransactionManager.MaximumTimeout; } return timeout; } internal static CommittableTransaction CreateTransaction(IsolationLevel isolation, TimeSpan timeout) { TransactionOptions options = new TransactionOptions(); options.IsolationLevel = isolation; options.Timeout = timeout; return new CommittableTransaction(options); } internal void SetCurrent(ref MessageRpc rpc) { if (!this.isConcurrent) { rpc.InstanceContext.Transaction.SetCurrent(ref rpc); } } internal void ResolveOutcome(ref MessageRpc rpc) { if ((rpc.InstanceContext != null) && (rpc.transaction != null)) { TransactionInstanceContextFacet context = rpc.InstanceContext.Transaction; if (context != null) { context.CheckIfTxCompletedAndUpdateAttached(ref rpc, this.isConcurrent); } rpc.Transaction.Complete(rpc.Error); } } Transaction GetInstanceContextTransaction(ref MessageRpc rpc) { return rpc.InstanceContext.Transaction.Attached; } [MethodImpl(MethodImplOptions.NoInlining)] void InitializeIsolationLevel(DispatchRuntime dispatch) { this.isolation = dispatch.ChannelDispatcher.TransactionIsolationLevel; } static bool NeedsTransactionBehavior(DispatchRuntime dispatch) { DispatchOperation unhandled = dispatch.UnhandledDispatchOperation; if ((unhandled != null) && (unhandled.TransactionRequired)) { return true; } if (dispatch.ChannelDispatcher.IsTransactedReceive) //check if we have transacted receive { return true; } for (int i = 0; i < dispatch.Operations.Count; i++) { DispatchOperation operation = dispatch.Operations[i]; if (operation.TransactionRequired) { return true; } } return false; } internal void ResolveTransaction(ref MessageRpc rpc) { if (rpc.Operation.HasDefaultUnhandledActionInvoker) { // we ignore unhandled operations return; } Transaction contextTransaction = null; //If we are inside a TransactedReceiveScope in workflow, then we need to look into the PPD and not the InstanceContext //to get the contextTransaction if (rpc.Operation.IsInsideTransactedReceiveScope) { // We may want to use an existing transaction for the instance. IInstanceTransaction instanceTransaction = rpc.Operation.Invoker as IInstanceTransaction; if (instanceTransaction != null) { contextTransaction = instanceTransaction.GetTransactionForInstance(rpc.OperationContext); } if (contextTransaction != null) { if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxSourceTxScopeRequiredUsingExistingTransaction, SR.GetString(SR.TraceCodeTxSourceTxScopeRequiredUsingExistingTransaction, contextTransaction.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } } else { contextTransaction = this.GetInstanceContextTransaction(ref rpc); } Transaction transaction = null; try { transaction = TransactionMessageProperty.TryGetTransaction(rpc.Request); } catch (TransactionException e) { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionUnmarshalFailed, e.Message), FaultCodeConstants.Codes.TransactionUnmarshalingFailed, false)); } if (rpc.Operation.TransactionRequired) { if (transaction != null) { if (this.isTransactedReceiveChannelDispatcher) { if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxSourceTxScopeRequiredIsTransactedTransport, SR.GetString(SR.TraceCodeTxSourceTxScopeRequiredIsTransactedTransport, transaction.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } else { if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxSourceTxScopeRequiredIsTransactionFlow, SR.GetString(SR.TraceCodeTxSourceTxScopeRequiredIsTransactionFlow, transaction.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } if (PerformanceCounters.PerformanceCountersEnabled) { PerformanceCounters.TxFlowed(PerformanceCounters.GetEndpointDispatcher(), rpc.Operation.Name); } bool sameTransaction = false; if (rpc.Operation.IsInsideTransactedReceiveScope) { sameTransaction = transaction.Equals(contextTransaction); } else { sameTransaction = transaction == contextTransaction; } if (!sameTransaction) { try { transaction = transaction.DependentClone(DependentCloneOption.RollbackIfNotComplete); } catch (TransactionException e) { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } } } } } else { // We got a transaction from the ChannelHandler. // Transport is transacted. // But operation doesn't require the transaction, so no one ever will commit it. // Because of that we have to commit it here. if (transaction != null && this.isTransactedReceiveChannelDispatcher) { try { if (null != rpc.TransactedBatchContext) { rpc.TransactedBatchContext.ForceCommit(); rpc.TransactedBatchContext = null; } else { TransactionInstanceContextFacet.Complete(transaction, null); } } finally { transaction.Dispose(); transaction = null; } } } InstanceContext context = rpc.InstanceContext; if (context.Transaction.ShouldReleaseInstance && !this.isConcurrent) { if (context.Behavior.ReleaseServiceInstanceOnTransactionComplete) { context.ReleaseServiceInstance(); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxReleaseServiceInstanceOnCompletion, SR.GetString(SR.TraceCodeTxReleaseServiceInstanceOnCompletion, contextTransaction.TransactionInformation.LocalIdentifier) ); } } context.Transaction.ShouldReleaseInstance = false; if (transaction == null || transaction == contextTransaction) { rpc.Transaction.Current = contextTransaction; throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } else { contextTransaction = null; } } if (rpc.Operation.TransactionRequired) { if (transaction == null) { if (contextTransaction != null) { transaction = contextTransaction; if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxSourceTxScopeRequiredIsAttachedTransaction, SR.GetString(SR.TraceCodeTxSourceTxScopeRequiredIsAttachedTransaction, transaction.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } else { transaction = TransactionBehavior.CreateTransaction(this.isolation, this.timeout); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxSourceTxScopeRequiredIsCreateNewTransaction, SR.GetString(SR.TraceCodeTxSourceTxScopeRequiredIsCreateNewTransaction, transaction.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } } if ((this.isolation != IsolationLevel.Unspecified) && (transaction.IsolationLevel != this.isolation)) { throw TraceUtility.ThrowHelperError(TransactionBehavior.CreateFault (SR.GetString(SR.IsolationLevelMismatch2, transaction.IsolationLevel, this.isolation), FaultCodeConstants.Codes.TransactionIsolationLevelMismatch, false), rpc.Request); } rpc.Transaction.Current = transaction; rpc.InstanceContext.Transaction.AddReference(ref rpc, rpc.Transaction.Current, true); try { rpc.Transaction.Clone = transaction.Clone(); if (rpc.Operation.IsInsideTransactedReceiveScope) { //It is because we want to synchronize the dispatcher processing of messages with the commit //processing that is started by the completion of a TransactedReceiveScope. We need to make sure //that all the dispatcher processing is done and we can do that by creating a blocking dependent clone and only //completing it after all of the message processing is done for a given TransactionRpcFacet rpc.Transaction.CreateDependentClone(); } } catch (ObjectDisposedException e)//transaction may be async aborted { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } rpc.InstanceContext.Transaction.AddReference(ref rpc, rpc.Transaction.Clone, false); rpc.OperationContext.TransactionFacet = rpc.Transaction; if (!rpc.Operation.TransactionAutoComplete) { rpc.Transaction.SetIncomplete(); } } } internal void InitializeCallContext(ref MessageRpc rpc) { if (rpc.Operation.TransactionRequired) { rpc.Transaction.ThreadEnter(ref rpc.Error); } } internal void ClearCallContext(ref MessageRpc rpc) { if (rpc.Operation.TransactionRequired) { rpc.Transaction.ThreadLeave(); } } } internal class TransactionRpcFacet { //internal members // Current is the original transaction that we created/flowed/whatever. This is // the "current" transaction used by the operation, and we keep it around so we // can commit it, complete it, etc. // // Clone is a clone of Current. We keep it around to pass into TransactionScope // so that System.Transactions.Transaction.Current is not CommittableTransaction // or anything dangerous like that. internal Transaction Current; internal Transaction Clone; internal DependentTransaction dependentClone; internal bool IsCompleted = true; internal MessageRpc rpc; TransactionScope scope; bool transactionSetComplete = false; // To track if user has called SetTransactionComplete() internal TransactionRpcFacet() { } internal TransactionRpcFacet(ref MessageRpc rpc) { this.rpc = rpc; } // Calling Complete will Commit or Abort the transaction based on, // error - If any user error is propagated to the service we abort the transaction unless SetTransactionComplete was successful. // transactionDoomed - If internal error occurred and this error may or may not be propagated // by the user to the service. Abort the Tx if transactionDoomed is set true. // // If the user violates the following rules, the transaction is doomed. // User cannot call TransactionSetComplete() when TransactionAutoComplete is true. // User cannot call TransactionSetComplete() multiple times. [MethodImpl(MethodImplOptions.NoInlining)] internal void Complete(Exception error) { if (!object.ReferenceEquals(this.Current, null)) { TransactedBatchContext batchContext = this.rpc.TransactedBatchContext; if (null != batchContext) { if (null == error) { batchContext.Complete(); } else { batchContext.ForceRollback(); } batchContext.InDispatch = false; } else { if (this.transactionSetComplete) { // Commit the transaction when TransactionSetComplete() is called and // even when an exception(non transactional) happens after this call. rpc.InstanceContext.Transaction.CompletePendingTransaction(this.Current, null); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusCompletedForSetComplete, SR.GetString(SR.TraceCodeTxCompletionStatusCompletedForSetComplete, this.Current.TransactionInformation.LocalIdentifier, this.rpc.Operation.Name) ); } } else if (this.IsCompleted || (error != null)) { rpc.InstanceContext.Transaction.CompletePendingTransaction(this.Current, error); } } if (this.rpc.Operation.IsInsideTransactedReceiveScope) { //We are done with the message processing associated with this TransactionRpcFacet so a commit that may have //been started by a TransactedReceiveScope can move forward. this.CompleteDependentClone(); } this.Current = null; } } internal void SetIncomplete() { this.IsCompleted = false; } internal void Completed() { if (this.scope == null) { return; } // Prohibit user from calling SetTransactionComplete() when TransactionAutoComplete is set to true. // Transaction will be aborted. if (this.rpc.Operation.TransactionAutoComplete) { try { this.Current.Rollback(); } catch (ObjectDisposedException e) { //we don't want to mask the real error here DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException( SR.GetString(SR.SFxTransactionInvalidSetTransactionComplete, rpc.Operation.Name, rpc.Host.Description.Name))); } // Prohibit user from calling SetTransactionComplete() multiple times. // Transaction will be aborted. else if (this.transactionSetComplete) { try { this.Current.Rollback(); } catch (ObjectDisposedException e) { //we don't want to mask the real error here DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); } throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException( SR.GetString(SR.SFxMultiSetTransactionComplete, rpc.Operation.Name, rpc.Host.Description.Name))); } this.transactionSetComplete = true; this.IsCompleted = true; this.scope.Complete(); } [MethodImpl(MethodImplOptions.NoInlining)] internal void ThreadEnter(ref Exception error) { Transaction clone = this.Clone; if ((clone != null) && (error == null)) { if (TD.TransactionScopeCreateIsEnabled()) { if (clone != null && clone.TransactionInformation != null) { TD.TransactionScopeCreate(rpc.EventTraceActivity, clone.TransactionInformation.LocalIdentifier, clone.TransactionInformation.DistributedIdentifier); } } this.scope = this.rpc.InstanceContext.Transaction.CreateTransactionScope(clone); this.transactionSetComplete = false; } } [MethodImpl(MethodImplOptions.NoInlining)] internal void ThreadLeave() { if (this.scope != null) { if (!this.transactionSetComplete) { this.scope.Complete(); } try { this.scope.Dispose(); this.scope = null; } catch (TransactionException e) { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } } } internal void CreateDependentClone() { if ((this.dependentClone == null) && (this.Clone != null)) { this.dependentClone = this.Clone.DependentClone(DependentCloneOption.BlockCommitUntilComplete); } } internal void CompleteDependentClone() { if (this.dependentClone != null) { this.dependentClone.Complete(); } } } internal sealed class TransactionInstanceContextFacet { internal Transaction waiting; // waiting to become Single because Single is on his way out. internal Transaction Attached; IResumeMessageRpc paused; // the IResumeMessageRpc for this.waiting. object mutex; Transaction current; // the one true transaction when Concurrency=false. InstanceContext instanceContext; Dictionary<Transaction, RemoveReferenceRM> pending; // When Concurrency=true, all the still pending guys. bool shouldReleaseInstance = false; internal TransactionInstanceContextFacet(InstanceContext instanceContext) { this.instanceContext = instanceContext; this.mutex = instanceContext.ThisLock; } // ........................................................................................................ // no need to lock the following property because it's used only if Concurrency = false internal bool ShouldReleaseInstance { get { return this.shouldReleaseInstance; } set { this.shouldReleaseInstance = value; } } // ........................................................................................................ [MethodImpl(MethodImplOptions.NoInlining)] internal void CheckIfTxCompletedAndUpdateAttached(ref MessageRpc rpc, bool isConcurrent) { if (rpc.Transaction.Current == null) { return; } lock (this.mutex) { if (!isConcurrent) { if (this.shouldReleaseInstance) { this.shouldReleaseInstance = false; if (rpc.Error == null) //we don't want to mask the initial error { rpc.Error = TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true); DiagnosticUtility.TraceHandledException(rpc.Error, TraceEventType.Error); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusCompletedForAsyncAbort, SR.GetString(SR.TraceCodeTxCompletionStatusCompletedForAsyncAbort, rpc.Transaction.Current.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } } if (rpc.Transaction.IsCompleted || (rpc.Error != null)) { if (DiagnosticUtility.ShouldTraceInformation) { if (rpc.Error != null) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusCompletedForError, SR.GetString(SR.TraceCodeTxCompletionStatusCompletedForError, rpc.Transaction.Current.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } else { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusCompletedForAutocomplete, SR.GetString(SR.TraceCodeTxCompletionStatusCompletedForAutocomplete, rpc.Transaction.Current.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } this.Attached = null; if (!(waiting == null)) { // tx processing requires failfast when state is inconsistent DiagnosticUtility.FailFast("waiting should be null when resetting current"); } this.current = null; } else { this.Attached = rpc.Transaction.Current; if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusRemainsAttached, SR.GetString(SR.TraceCodeTxCompletionStatusRemainsAttached, rpc.Transaction.Current.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } } else if (!this.pending.ContainsKey(rpc.Transaction.Current)) { //transaction has been asynchronously aborted if (rpc.Error == null) //we don't want to mask the initial error { rpc.Error = TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true); DiagnosticUtility.TraceHandledException(rpc.Error, TraceEventType.Error); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxCompletionStatusCompletedForAsyncAbort, SR.GetString(SR.TraceCodeTxCompletionStatusCompletedForAsyncAbort, rpc.Transaction.Current.TransactionInformation.LocalIdentifier, rpc.Operation.Name) ); } } } } } // ........................................................................................................ internal void CompletePendingTransaction(Transaction transaction, Exception error) { lock (this.mutex) { if (this.pending.ContainsKey(transaction)) { Complete(transaction, error); } } } // ........................................................................................................ internal static void Complete(Transaction transaction, Exception error) { try { if (error == null) { CommittableTransaction commit = (transaction as CommittableTransaction); if (commit != null) { commit.Commit(); } else { DependentTransaction complete = (transaction as DependentTransaction); if (complete != null) { complete.Complete(); } } } else { transaction.Rollback(); } } catch (TransactionException e) { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } } // ........................................................................................................ internal TransactionScope CreateTransactionScope(Transaction transaction) { lock (this.mutex) { if (this.pending.ContainsKey(transaction)) { try { return new TransactionScope(transaction); } catch (TransactionException e) { DiagnosticUtility.TraceHandledException(e, TraceEventType.Error); //we'll rethrow below } } } //the transaction was asynchronously aborted throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(TransactionBehavior.CreateFault(SR.GetString(SR.SFxTransactionAsyncAborted), FaultCodeConstants.Codes.TransactionAborted, true)); } // ........................................................................................................ internal void SetCurrent(ref MessageRpc rpc) { Transaction requestTransaction = rpc.Transaction.Current; if (!(requestTransaction != null)) { // tx processing requires failfast when state is inconsistent DiagnosticUtility.FailFast("we should never get here with a requestTransaction null"); } lock (this.mutex) { if (this.current == null) { this.current = requestTransaction; } else if (this.current != requestTransaction) { this.waiting = requestTransaction; this.paused = rpc.Pause(); } else { rpc.Transaction.Current = this.current; //rpc.Transaction.Current should get the dependent clone } } } // ........................................................................................................ internal void AddReference(ref MessageRpc rpc, Transaction tx, bool updateCallCount) { lock (this.mutex) { if (this.pending == null) { this.pending = new Dictionary<Transaction, RemoveReferenceRM>(); } if (tx != null) { if (this.pending == null) { this.pending = new Dictionary<Transaction, RemoveReferenceRM>(); } RemoveReferenceRM rm; if (!this.pending.TryGetValue(tx, out rm)) { RemoveReferenceRM rrm = new RemoveReferenceRM(this.instanceContext, tx, rpc.Operation.Name); rrm.CallCount = 1; this.pending.Add(tx, rrm); } else if (updateCallCount) { rm.CallCount += 1; } } } } internal void RemoveReference(Transaction tx) { lock (this.mutex) { if (tx.Equals(this.current)) { if (this.waiting != null) { this.current = waiting; this.waiting = null; if (instanceContext.Behavior.ReleaseServiceInstanceOnTransactionComplete) { instanceContext.ReleaseServiceInstance(); if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxReleaseServiceInstanceOnCompletion, SR.GetString(SR.TraceCodeTxReleaseServiceInstanceOnCompletion, tx.TransactionInformation.LocalIdentifier) ); } } bool alreadyResumedNoLock; this.paused.Resume(out alreadyResumedNoLock); if (alreadyResumedNoLock) { Fx.Assert("TransactionBehavior resumed more than once for same call."); } } else { this.shouldReleaseInstance = true; this.current = null; } } if (this.pending != null) { if (this.pending.ContainsKey(tx)) { this.pending.Remove(tx); } } } } // ........................................................................................................ abstract class VolatileBase : ISinglePhaseNotification { protected InstanceContext InstanceContext; protected Transaction Transaction; protected VolatileBase(InstanceContext instanceContext, Transaction transaction) { this.InstanceContext = instanceContext; this.Transaction = transaction; this.Transaction.EnlistVolatile(this, EnlistmentOptions.None); } protected abstract void Completed(); public virtual void Commit(Enlistment enlistment) { this.Completed(); } public virtual void InDoubt(Enlistment enlistment) { this.Completed(); } public virtual void Rollback(Enlistment enlistment) { this.Completed(); } public virtual void SinglePhaseCommit(SinglePhaseEnlistment enlistment) { enlistment.Committed(); this.Completed(); } public void Prepare(PreparingEnlistment preparingEnlistment) { preparingEnlistment.Prepared(); } } sealed class RemoveReferenceRM : VolatileBase { string operation; long callCount = 0; EndpointDispatcher endpointDispatcher; internal RemoveReferenceRM(InstanceContext instanceContext, Transaction tx, string operation) : base(instanceContext, tx) { this.operation = operation; if (PerformanceCounters.PerformanceCountersEnabled) { this.endpointDispatcher = PerformanceCounters.GetEndpointDispatcher(); } AspNetEnvironment.Current.IncrementBusyCount(); if (AspNetEnvironment.Current.TraceIncrementBusyCountIsEnabled()) { AspNetEnvironment.Current.TraceIncrementBusyCount(this.GetType().FullName); } } internal long CallCount { get { return this.callCount; } set { this.callCount = value; } } protected override void Completed() { this.InstanceContext.Transaction.RemoveReference(this.Transaction); AspNetEnvironment.Current.DecrementBusyCount(); if (AspNetEnvironment.Current.TraceDecrementBusyCountIsEnabled()) { AspNetEnvironment.Current.TraceDecrementBusyCount(this.GetType().FullName); } } public override void SinglePhaseCommit(SinglePhaseEnlistment enlistment) { if (PerformanceCounters.PerformanceCountersEnabled) { PerformanceCounters.TxCommitted(this.endpointDispatcher, CallCount); } base.SinglePhaseCommit(enlistment); } public override void Commit(Enlistment enlistment) { if (PerformanceCounters.PerformanceCountersEnabled) { PerformanceCounters.TxCommitted(this.endpointDispatcher, CallCount); } base.Commit(enlistment); } public override void Rollback(Enlistment enlistment) { if (PerformanceCounters.PerformanceCountersEnabled) { PerformanceCounters.TxAborted(this.endpointDispatcher, CallCount); } if (DiagnosticUtility.ShouldTraceInformation) { TraceUtility.TraceEvent(TraceEventType.Information, TraceCode.TxAsyncAbort, SR.GetString(SR.TraceCodeTxAsyncAbort, this.Transaction.TransactionInformation.LocalIdentifier) ); } base.Rollback(enlistment); } public override void InDoubt(Enlistment enlistment) { if (PerformanceCounters.PerformanceCountersEnabled) { PerformanceCounters.TxInDoubt(this.endpointDispatcher, CallCount); } base.InDoubt(enlistment); } } } internal enum ExclusiveInstanceContextTransactionResult { Acquired, Wait, Fault }; }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Text; using System.Xml.XPath; namespace MS.Internal.Xml.Cache { /// <summary> /// The 0th node in each page contains a non-null reference to an XPathNodePageInfo internal class that provides /// information about that node's page. The other fields in the 0th node are undefined and should never /// be used. /// </summary> sealed internal class XPathNodePageInfo { private int _pageNum; private int _nodeCount; private XPathNode[] _pagePrev; private XPathNode[] _pageNext; /// <summary> /// Constructor. /// </summary> public XPathNodePageInfo(XPathNode[] pagePrev, int pageNum) { _pagePrev = pagePrev; _pageNum = pageNum; _nodeCount = 1; // Every node page contains PageInfo at 0th position } /// <summary> /// Return the sequential page number of the page containing nodes that share this information atom. /// </summary> public int PageNumber { get { return _pageNum; } } /// <summary> /// Return the number of nodes allocated in this page. /// </summary> public int NodeCount { get { return _nodeCount; } set { _nodeCount = value; } } /// <summary> /// Return the previous node page in the document. /// </summary> public XPathNode[] PreviousPage { get { return _pagePrev; } } /// <summary> /// Return the next node page in the document. /// </summary> public XPathNode[] NextPage { get { return _pageNext; } set { _pageNext = value; } } } /// <summary> /// There is a great deal of redundancy in typical Xml documents. Even in documents with thousands or millions /// of nodes, there are a small number of common names and types. And since nodes are allocated in pages in /// document order, nodes on the same page with the same name and type are likely to have the same sibling and /// parent pages as well. /// Redundant information is shared by creating immutable, atomized objects. This is analogous to the /// string.Intern() operation. If a node's name, type, or parent/sibling pages are modified, then a new /// InfoAtom needs to be obtained, since other nodes may still be referencing the old InfoAtom. /// </summary> sealed internal class XPathNodeInfoAtom : IEquatable<XPathNodeInfoAtom> { private string _localName; private string _namespaceUri; private string _prefix; private string _baseUri; private XPathNode[] _pageParent; private XPathNode[] _pageSibling; private XPathNode[] _pageSimilar; private XPathDocument _doc; private int _lineNumBase; private int _linePosBase; private int _hashCode; private int _localNameHash; private XPathNodeInfoAtom _next; private XPathNodePageInfo _pageInfo; /// <summary> /// Construct information for the 0th node in each page. The only field which is defined is this.pageInfo, /// and it contains information about that page (pageNum, nextPage, etc.). /// </summary> public XPathNodeInfoAtom(XPathNodePageInfo pageInfo) { _pageInfo = pageInfo; } /// <summary> /// Construct a new shared information atom. This method should only be used by the XNodeInfoTable. /// </summary> public XPathNodeInfoAtom(string localName, string namespaceUri, string prefix, string baseUri, XPathNode[] pageParent, XPathNode[] pageSibling, XPathNode[] pageSimilar, XPathDocument doc, int lineNumBase, int linePosBase) { Init(localName, namespaceUri, prefix, baseUri, pageParent, pageSibling, pageSimilar, doc, lineNumBase, linePosBase); } /// <summary> /// Initialize an existing shared information atom. This method should only be used by the XNodeInfoTable. /// </summary> public void Init(string localName, string namespaceUri, string prefix, string baseUri, XPathNode[] pageParent, XPathNode[] pageSibling, XPathNode[] pageSimilar, XPathDocument doc, int lineNumBase, int linePosBase) { Debug.Assert(localName != null && namespaceUri != null && prefix != null && doc != null); _localName = localName; _namespaceUri = namespaceUri; _prefix = prefix; _baseUri = baseUri; _pageParent = pageParent; _pageSibling = pageSibling; _pageSimilar = pageSimilar; _doc = doc; _lineNumBase = lineNumBase; _linePosBase = linePosBase; _next = null; _pageInfo = null; _hashCode = 0; _localNameHash = 0; for (int i = 0; i < _localName.Length; i++) unchecked { _localNameHash += (_localNameHash << 7) ^ _localName[i]; } } /// <summary> /// Returns information about the node page. Only the 0th node on each page has this property defined. /// </summary> public XPathNodePageInfo PageInfo { get { return _pageInfo; } } /// <summary> /// Return the local name part of nodes that share this information atom. /// </summary> public string LocalName { get { return _localName; } } /// <summary> /// Return the namespace name part of nodes that share this information atom. /// </summary> public string NamespaceUri { get { return _namespaceUri; } } /// <summary> /// Return the prefix name part of nodes that share this information atom. /// </summary> public string Prefix { get { return _prefix; } } /// <summary> /// Return the base Uri of nodes that share this information atom. /// </summary> public string BaseUri { get { return _baseUri; } } /// <summary> /// Return the page containing the next sibling of nodes that share this information atom. /// </summary> public XPathNode[] SiblingPage { get { return _pageSibling; } } /// <summary> /// Return the page containing the next element having a name which has same hashcode as this element. /// </summary> public XPathNode[] SimilarElementPage { get { return _pageSimilar; } } /// <summary> /// Return the page containing the parent of nodes that share this information atom. /// </summary> public XPathNode[] ParentPage { get { return _pageParent; } } /// <summary> /// Return the page containing the owner document of nodes that share this information atom. /// </summary> public XPathDocument Document { get { return _doc; } } /// <summary> /// Return the line number to which a line number offset stored in the XPathNode is added. /// </summary> public int LineNumberBase { get { return _lineNumBase; } } /// <summary> /// Return the line position to which a line position offset stored in the XPathNode is added. /// </summary> public int LinePositionBase { get { return _linePosBase; } } /// <summary> /// Return cached hash code of the local name of nodes which share this information atom. /// </summary> public int LocalNameHashCode { get { return _localNameHash; } } /// <summary> /// Link together InfoAtoms that hash to the same hashtable bucket (should only be used by XPathNodeInfoTable) /// </summary> public XPathNodeInfoAtom Next { get { return _next; } set { _next = value; } } /// <summary> /// Return this information atom's hash code, previously computed for performance. /// </summary> public override int GetHashCode() { if (_hashCode == 0) { int hashCode; // Start with local name hashCode = _localNameHash; // Add page indexes unchecked { if (_pageSibling != null) hashCode += (hashCode << 7) ^ _pageSibling[0].PageInfo.PageNumber; if (_pageParent != null) hashCode += (hashCode << 7) ^ _pageParent[0].PageInfo.PageNumber; if (_pageSimilar != null) hashCode += (hashCode << 7) ^ _pageSimilar[0].PageInfo.PageNumber; } // Save hashcode. Don't save 0, so that it won't ever be recomputed. _hashCode = ((hashCode == 0) ? 1 : hashCode); } return _hashCode; } /// <summary> /// Return true if this InfoAtom has the same values as another InfoAtom. /// </summary> public override bool Equals(object other) { return Equals(other as XPathNodeInfoAtom); } public bool Equals(XPathNodeInfoAtom other) { Debug.Assert(other != null); Debug.Assert((object)_doc == (object)other._doc); Debug.Assert(_pageInfo == null); // Assume that name parts are atomized if (this.GetHashCode() == other.GetHashCode()) { if ((object)_localName == (object)other._localName && (object)_pageSibling == (object)other._pageSibling && (object)_namespaceUri == (object)other._namespaceUri && (object)_pageParent == (object)other._pageParent && (object)_pageSimilar == (object)other._pageSimilar && (object)_prefix == (object)other._prefix && (object)_baseUri == (object)other._baseUri && _lineNumBase == other._lineNumBase && _linePosBase == other._linePosBase) { return true; } } return false; } /// <summary> /// Return InfoAtom formatted as a string: /// hash=xxx, {http://my.com}foo:bar, parent=1, sibling=1, lineNum=0, linePos=0 /// </summary> public override string ToString() { StringBuilder bldr = new StringBuilder(); bldr.Append("hash="); bldr.Append(GetHashCode()); bldr.Append(", "); if (_localName.Length != 0) { bldr.Append('{'); bldr.Append(_namespaceUri); bldr.Append('}'); if (_prefix.Length != 0) { bldr.Append(_prefix); bldr.Append(':'); } bldr.Append(_localName); bldr.Append(", "); } if (_pageParent != null) { bldr.Append("parent="); bldr.Append(_pageParent[0].PageInfo.PageNumber); bldr.Append(", "); } if (_pageSibling != null) { bldr.Append("sibling="); bldr.Append(_pageSibling[0].PageInfo.PageNumber); bldr.Append(", "); } if (_pageSimilar != null) { bldr.Append("similar="); bldr.Append(_pageSimilar[0].PageInfo.PageNumber); bldr.Append(", "); } bldr.Append("lineNum="); bldr.Append(_lineNumBase); bldr.Append(", "); bldr.Append("linePos="); bldr.Append(_linePosBase); return bldr.ToString(); } } /// <summary> /// An atomization table for XPathNodeInfoAtom. /// </summary> sealed internal class XPathNodeInfoTable { private XPathNodeInfoAtom[] _hashTable; private int _sizeTable; private XPathNodeInfoAtom _infoCached; #if DEBUG private const int DefaultTableSize = 2; #else private const int DefaultTableSize = 32; #endif /// <summary> /// Constructor. /// </summary> public XPathNodeInfoTable() { _hashTable = new XPathNodeInfoAtom[DefaultTableSize]; _sizeTable = 0; } /// <summary> /// Create a new XNodeInfoAtom and ensure it is atomized in the table. /// </summary> public XPathNodeInfoAtom Create(string localName, string namespaceUri, string prefix, string baseUri, XPathNode[] pageParent, XPathNode[] pageSibling, XPathNode[] pageSimilar, XPathDocument doc, int lineNumBase, int linePosBase) { XPathNodeInfoAtom info; // If this.infoCached already exists, then reuse it; else create new InfoAtom if (_infoCached == null) { info = new XPathNodeInfoAtom(localName, namespaceUri, prefix, baseUri, pageParent, pageSibling, pageSimilar, doc, lineNumBase, linePosBase); } else { info = _infoCached; _infoCached = info.Next; info.Init(localName, namespaceUri, prefix, baseUri, pageParent, pageSibling, pageSimilar, doc, lineNumBase, linePosBase); } return Atomize(info); } /// <summary> /// Add a shared information item to the atomization table. If a matching item already exists, then that /// instance is returned. Otherwise, a new item is created. Thus, if itemX and itemY have both been added /// to the same InfoTable: /// 1. itemX.Equals(itemY) != true /// 2. (object) itemX != (object) itemY /// </summary> private XPathNodeInfoAtom Atomize(XPathNodeInfoAtom info) { XPathNodeInfoAtom infoNew, infoNext; // Search for existing XNodeInfoAtom in the table infoNew = _hashTable[info.GetHashCode() & (_hashTable.Length - 1)]; while (infoNew != null) { if (info.Equals(infoNew)) { // Found existing atom, so return that. Reuse "info". info.Next = _infoCached; _infoCached = info; return infoNew; } infoNew = infoNew.Next; } // Expand table and rehash if necessary if (_sizeTable >= _hashTable.Length) { XPathNodeInfoAtom[] oldTable = _hashTable; _hashTable = new XPathNodeInfoAtom[oldTable.Length * 2]; for (int i = 0; i < oldTable.Length; i++) { infoNew = oldTable[i]; while (infoNew != null) { infoNext = infoNew.Next; AddInfo(infoNew); infoNew = infoNext; } } } // Can't find an existing XNodeInfoAtom, so use the one that was passed in AddInfo(info); return info; } /// <summary> /// Add a previously constructed InfoAtom to the table. If a collision occurs, then insert "info" /// as the head of a linked list. /// </summary> private void AddInfo(XPathNodeInfoAtom info) { int idx = info.GetHashCode() & (_hashTable.Length - 1); info.Next = _hashTable[idx]; _hashTable[idx] = info; _sizeTable++; } /// <summary> /// Return InfoAtomTable formatted as a string. /// </summary> public override string ToString() { StringBuilder bldr = new StringBuilder(); XPathNodeInfoAtom infoAtom; for (int i = 0; i < _hashTable.Length; i++) { bldr.AppendFormat("{0,4}: ", i); infoAtom = _hashTable[i]; while (infoAtom != null) { if ((object)infoAtom != (object)_hashTable[i]) bldr.Append("\n "); bldr.Append(infoAtom); infoAtom = infoAtom.Next; } bldr.Append('\n'); } return bldr.ToString(); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; using Microsoft.Win32; using System; using System.Text; using System.Threading; namespace Microsoft.Win32.RegistryTests { public class Registry_SetValue_str_str_obj_valueKind : IDisposable { private string _testKey = "CM3001_TEST"; private string _keyString = ""; private RegistryKey _rk1, _rk2; private string _subName = ""; private static int s_keyCount = 0; // Variables needed private String _strExpected = ""; private Random _rand = new Random(-55); private RegistryValueKind[] _ExpectedKinds = new RegistryValueKind[38]; private Object _obj = new Object(); private Object[] _objArr2; private Byte[] _byteArr; private Object[] _objArr = new Object[38]; public void TestInitialize() { var counter = Interlocked.Increment(ref s_keyCount); _testKey += counter.ToString(); _rk1 = Microsoft.Win32.Registry.CurrentUser; if (_rk1.OpenSubKey(_testKey) != null) _rk1.DeleteSubKeyTree(_testKey); if (_rk1.GetValue(_testKey) != null) _rk1.DeleteValue(_testKey); _rk2 = _rk1.CreateSubKey(_testKey); _keyString = _rk2.ToString(); } public void SetObjArr() { _byteArr = new Byte[_rand.Next(0, 100)]; _objArr2 = new Object[2] { _obj, _obj }; _rand.NextBytes(_byteArr); // Standard Random Numbers _objArr[0] = (Byte)(_rand.Next(Byte.MinValue, Byte.MaxValue)); _objArr[1] = (SByte)(_rand.Next(SByte.MinValue, SByte.MaxValue)); _objArr[2] = (Int16)(_rand.Next(Int16.MinValue, Int16.MaxValue)); _objArr[3] = (UInt16)(_rand.Next(UInt16.MinValue, UInt16.MaxValue)); _objArr[4] = (Char)(_rand.Next(UInt16.MinValue, UInt16.MaxValue)); _objArr[5] = (Int32)(_rand.Next(Int32.MinValue, Int32.MaxValue)); // Random Numbers that can fit into Int32 _objArr[6] = (UInt32)(_rand.NextDouble() * Int32.MaxValue); _objArr[7] = (Int64)(_rand.NextDouble() * Int32.MaxValue); _objArr[8] = (Int64)(_rand.NextDouble() * Int32.MinValue); _objArr[9] = (UInt64)(_rand.NextDouble() * Int32.MaxValue); _objArr[10] = (Decimal)(_rand.NextDouble() * Int32.MaxValue); _objArr[11] = (Decimal)(_rand.NextDouble() * Int32.MinValue); _objArr[12] = (Single)(_rand.NextDouble() * Int32.MaxValue); _objArr[13] = (Single)(_rand.NextDouble() * Int32.MinValue); _objArr[14] = (Double)(_rand.NextDouble() * Int32.MaxValue); _objArr[15] = (Double)(_rand.NextDouble() * Int32.MinValue); // Random Numbers that can't fit into Int32 but can fit into Int64 _objArr[16] = (UInt32)(_rand.NextDouble() * (UInt32.MaxValue - (UInt32)Int32.MaxValue) + (UInt32)Int32.MaxValue); _objArr[17] = (Int64)(_rand.NextDouble() * (Int64.MaxValue - (Int64)Int32.MaxValue) + (Int64)Int32.MaxValue); _objArr[18] = (Int64)(_rand.NextDouble() * (Int64.MinValue - (Int64)Int32.MinValue) + (Int64)Int32.MinValue); _objArr[19] = (UInt64)(_rand.NextDouble() * ((UInt64)Int64.MaxValue - (UInt64)Int32.MaxValue) + (UInt64)Int32.MaxValue); _objArr[20] = (Decimal)(_rand.NextDouble() * (Int64.MaxValue - (Int64)Int32.MaxValue) + (Int64)Int32.MaxValue); _objArr[21] = (Decimal)(_rand.NextDouble() * (Int64.MinValue - (Int64)Int32.MinValue) + (Int64)Int32.MinValue); _objArr[22] = (Single)(_rand.NextDouble() * (Int64.MaxValue - Int32.MaxValue) + Int32.MaxValue); _objArr[23] = (Single)(_rand.NextDouble() * (Int64.MinValue - Int32.MinValue) + Int32.MinValue); _objArr[24] = (Double)(_rand.NextDouble() * (Int64.MaxValue - Int32.MaxValue) + Int32.MaxValue); _objArr[25] = (Double)(_rand.NextDouble() * (Int64.MinValue - Int32.MinValue) + Int32.MinValue); // Random Numbers that can't fit into Int32 or Int64 _objArr[26] = (UInt64)(_rand.NextDouble() * (UInt64.MaxValue - (UInt64)Int64.MaxValue) + (UInt64)Int64.MaxValue); _objArr[27] = Decimal.MaxValue; _objArr[28] = Decimal.MinValue; _objArr[29] = Single.MaxValue; _objArr[30] = Single.MinValue; _objArr[31] = Double.MaxValue; _objArr[32] = Double.MinValue; // Various other types _objArr[33] = (String)"Hello World"; _objArr[34] = (String)"Hello %path5% World"; _objArr[35] = new String[] { "Hello World", "Hello %path% World" }; _objArr[36] = (Object)_obj; _objArr[37] = (Byte[])(_byteArr); } public Registry_SetValue_str_str_obj_valueKind() { TestInitialize(); SetObjArr(); } [Fact] public void Test01() { // [] Test RegistryValueKind.Unknown try { for (int i = 0; i < _ExpectedKinds.Length; _ExpectedKinds[i++] = RegistryValueKind.String) ; //special cases. _ExpectedKinds[5] = RegistryValueKind.DWord; _ExpectedKinds[35] = RegistryValueKind.MultiString; _ExpectedKinds[37] = RegistryValueKind.Binary; for (int i = 0; i < _objArr.Length; i++) { _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.Unknown); if (_rk2.GetValue(_subName).ToString() != _objArr[i].ToString() || _rk2.GetValueKind(_subName) != _ExpectedKinds[i]) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + _ExpectedKinds[i].ToString() + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test02() { // [] Test RegistryValueKind.String try { for (int i = 0; i < _objArr.Length; i++) { _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.String); if (_rk2.GetValue(_subName).ToString() != _objArr[i].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.String) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.String + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test03() { // [] Test RegistryValueKind.ExpandString try { for (int i = 0; i < _objArr.Length; i++) { _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.ExpandString); if (_rk2.GetValue(_subName).ToString() != _objArr[i].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.ExpandString) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.ExpandString + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test04() { // [] Test RegistryValueKind.MultiString try { for (int i = 0; i < _objArr.Length; i++) { try { _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.MultiString); if (_rk2.GetValue(_subName).ToString() != _objArr[i].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.MultiString) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.MultiString + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (ArgumentException ioe) { if (_objArr[i].GetType() == (new string[0]).GetType()) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.MultiString + ", got exception==" + ioe.ToString()); } } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test05() { // [] Test RegistryValueKind.Binary try { for (int i = 0; i < _objArr.Length; i++) { try { _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.Binary); if (_rk2.GetValue(_subName).ToString() != _objArr[i].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.Binary) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.Binary + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (ArgumentException ioe) { if (_objArr[i].GetType() == (new byte[0]).GetType()) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _objArr[i].ToString() + " kind==" + RegistryValueKind.Binary + ", got exception==" + ioe.ToString()); } } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test06() { // [] Test RegistryValueKind.DWord try { for (int i = 0; i < _objArr.Length; i++) { try { if (i <= 15) { _strExpected = (Convert.ToInt32(_objArr[i])).ToString(); } _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.DWord); if (_rk2.GetValue(_subName).ToString() != _strExpected || _rk2.GetValueKind(_subName) != RegistryValueKind.DWord) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _strExpected + " kind==" + RegistryValueKind.DWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } if (i > 15) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==Exception, got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (ArgumentException ioe) { if (i <= 15) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _strExpected + " kind==" + RegistryValueKind.DWord + ", got Exception==" + ioe.ToString()); } } catch (Exception e) { Assert.False(true, "Err i==" + i + " Unexpected exception :: " + e.ToString()); } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test07() { // [] Test RegistryValueKind.QWord try { for (int i = 0; i < _objArr.Length; i++) { try { if (i <= 25) { _strExpected = (Convert.ToInt64(_objArr[i])).ToString(); } _subName = "Testing " + i; Registry.SetValue(_keyString, _subName, _objArr[i], RegistryValueKind.QWord); if (_rk2.GetValue(_subName).ToString() != _strExpected || _rk2.GetValueKind(_subName) != RegistryValueKind.QWord) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _strExpected + " kind==" + RegistryValueKind.QWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } if (i > 25) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==Exception, got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (ArgumentException ioe) { if (i <= 25) { Assert.False(true, "Error Type==" + _objArr[i].GetType() + " Expected==" + _strExpected + " kind==" + RegistryValueKind.QWord + ", got Exception==" + ioe.ToString()); } } catch (Exception e) { Assert.False(true, "Err i==" + i + " Unexpected exception :: " + e.ToString()); } } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test08() { // [] Registry Key does not exist try { // Create the key _subName = "FooBar"; Registry.SetValue(_keyString, _subName, _objArr[5], RegistryValueKind.DWord); if (_rk2.GetValue(_subName).ToString() != _objArr[5].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.DWord) { Assert.False(true, "Error Type==" + _objArr[5].GetType() + " Expected==" + _objArr[5].ToString() + " kind==" + RegistryValueKind.DWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test09() { // [] Registry Key already exists try { // Create the key _subName = "FooBar"; Registry.SetValue(_keyString, _subName, _objArr[7], RegistryValueKind.QWord); if (_rk2.GetValue(_subName).ToString() != _objArr[7].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.QWord) { Assert.False(true, "Error Type==" + _objArr[7].GetType() + " Expected==" + _objArr[7].ToString() + " kind==" + RegistryValueKind.QWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test10() { // [] Name is null try { // Create the key _subName = null; Registry.SetValue(_keyString, _subName, _objArr[5], RegistryValueKind.DWord); if (_rk2.GetValue(_subName).ToString() != _objArr[5].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.DWord) { Assert.False(true, "Error Type==" + _objArr[5].GetType() + " Expected==" + _objArr[5].ToString() + " kind==" + RegistryValueKind.DWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test11() { // [] Name is "" try { // Create the key _subName = ""; Registry.SetValue(_keyString, _subName, _objArr[7], RegistryValueKind.QWord); if (_rk2.GetValue(_subName).ToString() != _objArr[7].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.QWord) { Assert.False(true, "Error Type==" + _objArr[7].GetType() + " Expected==" + _objArr[7].ToString() + " kind==" + RegistryValueKind.QWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test12() { // [] Name is 255 characters try { // Create the key _subName = "12345678901111111110222222222033333333304444444440555555555066666666607777777770888888888099999999901234567890111111111022222222203333333330444444444055555555506666666660777777777088888888809999999990123456789011111111102222222220333333333044444444405555"; Registry.SetValue(_keyString, _subName, _objArr[7], RegistryValueKind.QWord); if (_rk2.GetValue(_subName).ToString() != _objArr[7].ToString() || _rk2.GetValueKind(_subName) != RegistryValueKind.QWord) { Assert.False(true, "Error Type==" + _objArr[7].GetType() + " Expected==" + _objArr[7].ToString() + " kind==" + RegistryValueKind.QWord + ", got value==" + _rk2.GetValue(_subName).ToString() + " kind==" + _rk2.GetValueKind(_subName).ToString()); } } catch (Exception e) { Assert.False(true, "Err Unexpected exception :: " + e.ToString()); } } [Fact] public void Test13() { // [] Name is longer then 255 characters // Create the key int MaxValueNameLength = 16383; // prior to V4, the limit is 255 StringBuilder sb = new StringBuilder(MaxValueNameLength + 1); for (int i = 0; i <= MaxValueNameLength; i++) sb.Append('a'); _subName = sb.ToString(); Action a = () => { Registry.SetValue(_keyString, _subName, _objArr[7], RegistryValueKind.QWord); }; Assert.Throws<ArgumentException>(() => { a(); }); } [Fact] public void Test14() { // [] Value is null // Create the key _subName = "FooBar"; Action a = () => { Registry.SetValue(_keyString, _subName, null, RegistryValueKind.QWord); }; Assert.Throws<ArgumentNullException>(() => { a(); }); } [Fact] public void Test15() { // [] ValueKind is equal to -2 which is not an acceptable value // Create the key _subName = "FooBar"; Action a = () => { Registry.SetValue(_keyString, _subName, _objArr[5], (RegistryValueKind)(-2)); }; Assert.Throws<ArgumentException>(() => { a(); }); } [Fact] public void Test16() { // [] value is a string[] with null values // Create the key _subName = "FooBar"; string[] strArr = new String[] { "one", "two", null, "three" }; Action a = () => { Registry.SetValue(_keyString, _subName, strArr, RegistryValueKind.MultiString); }; Assert.Throws<ArgumentException>(() => { a(); }); } [Fact] public void Test17() { // [] value is a object[] // Create the key _subName = "FooBar"; Action a = () => { Registry.SetValue(_keyString, _subName, _objArr2, RegistryValueKind.MultiString); }; Assert.Throws<ArgumentException>(() => { a(); }); } [Fact] public void Test18() { // [] Set RegistryKey to bad array type. // To improve code coverage object[] objTemp = new object[] { "my string", "your string", "Any once string" }; Action a = () => { Registry.SetValue(_keyString, _subName, objTemp, RegistryValueKind.Unknown); }; Assert.Throws<ArgumentException>(() => { a(); }); } public void Dispose() { var _rk = Microsoft.Win32.Registry.CurrentUser; if (_rk.OpenSubKey(_testKey) != null) _rk.DeleteSubKeyTree(_testKey); if (_rk.GetValue(_testKey) != null) _rk.DeleteValue(_testKey); } } }
/* * Copyright (c) Citrix Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1) Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; namespace XenAPI { /// <summary> /// /// First published in XenServer 4.1. /// </summary> public partial class Bond : XenObject<Bond> { public Bond() { } public Bond(string uuid, XenRef<PIF> master, List<XenRef<PIF>> slaves, Dictionary<string, string> other_config, XenRef<PIF> primary_slave, bond_mode mode, Dictionary<string, string> properties, long links_up) { this.uuid = uuid; this.master = master; this.slaves = slaves; this.other_config = other_config; this.primary_slave = primary_slave; this.mode = mode; this.properties = properties; this.links_up = links_up; } /// <summary> /// Creates a new Bond from a Proxy_Bond. /// </summary> /// <param name="proxy"></param> public Bond(Proxy_Bond proxy) { this.UpdateFromProxy(proxy); } public override void UpdateFrom(Bond update) { uuid = update.uuid; master = update.master; slaves = update.slaves; other_config = update.other_config; primary_slave = update.primary_slave; mode = update.mode; properties = update.properties; links_up = update.links_up; } internal void UpdateFromProxy(Proxy_Bond proxy) { uuid = proxy.uuid == null ? null : (string)proxy.uuid; master = proxy.master == null ? null : XenRef<PIF>.Create(proxy.master); slaves = proxy.slaves == null ? null : XenRef<PIF>.Create(proxy.slaves); other_config = proxy.other_config == null ? null : Maps.convert_from_proxy_string_string(proxy.other_config); primary_slave = proxy.primary_slave == null ? null : XenRef<PIF>.Create(proxy.primary_slave); mode = proxy.mode == null ? (bond_mode) 0 : (bond_mode)Helper.EnumParseDefault(typeof(bond_mode), (string)proxy.mode); properties = proxy.properties == null ? null : Maps.convert_from_proxy_string_string(proxy.properties); links_up = proxy.links_up == null ? 0 : long.Parse((string)proxy.links_up); } public Proxy_Bond ToProxy() { Proxy_Bond result_ = new Proxy_Bond(); result_.uuid = uuid ?? ""; result_.master = master ?? ""; result_.slaves = (slaves != null) ? Helper.RefListToStringArray(slaves) : new string[] {}; result_.other_config = Maps.convert_to_proxy_string_string(other_config); result_.primary_slave = primary_slave ?? ""; result_.mode = bond_mode_helper.ToString(mode); result_.properties = Maps.convert_to_proxy_string_string(properties); result_.links_up = links_up.ToString(); return result_; } /// <summary> /// Creates a new Bond from a Hashtable. /// </summary> /// <param name="table"></param> public Bond(Hashtable table) { uuid = Marshalling.ParseString(table, "uuid"); master = Marshalling.ParseRef<PIF>(table, "master"); slaves = Marshalling.ParseSetRef<PIF>(table, "slaves"); other_config = Maps.convert_from_proxy_string_string(Marshalling.ParseHashTable(table, "other_config")); primary_slave = Marshalling.ParseRef<PIF>(table, "primary_slave"); mode = (bond_mode)Helper.EnumParseDefault(typeof(bond_mode), Marshalling.ParseString(table, "mode")); properties = Maps.convert_from_proxy_string_string(Marshalling.ParseHashTable(table, "properties")); links_up = Marshalling.ParseLong(table, "links_up"); } public bool DeepEquals(Bond other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return Helper.AreEqual2(this._uuid, other._uuid) && Helper.AreEqual2(this._master, other._master) && Helper.AreEqual2(this._slaves, other._slaves) && Helper.AreEqual2(this._other_config, other._other_config) && Helper.AreEqual2(this._primary_slave, other._primary_slave) && Helper.AreEqual2(this._mode, other._mode) && Helper.AreEqual2(this._properties, other._properties) && Helper.AreEqual2(this._links_up, other._links_up); } public override string SaveChanges(Session session, string opaqueRef, Bond server) { if (opaqueRef == null) { System.Diagnostics.Debug.Assert(false, "Cannot create instances of this type on the server"); return ""; } else { if (!Helper.AreEqual2(_other_config, server._other_config)) { Bond.set_other_config(session, opaqueRef, _other_config); } return null; } } /// <summary> /// Get a record containing the current state of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static Bond get_record(Session session, string _bond) { return new Bond((Proxy_Bond)session.proxy.bond_get_record(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get a reference to the Bond instance with the specified UUID. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_uuid">UUID of object to return</param> public static XenRef<Bond> get_by_uuid(Session session, string _uuid) { return XenRef<Bond>.Create(session.proxy.bond_get_by_uuid(session.uuid, _uuid ?? "").parse()); } /// <summary> /// Get the uuid field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static string get_uuid(Session session, string _bond) { return (string)session.proxy.bond_get_uuid(session.uuid, _bond ?? "").parse(); } /// <summary> /// Get the master field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static XenRef<PIF> get_master(Session session, string _bond) { return XenRef<PIF>.Create(session.proxy.bond_get_master(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the slaves field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static List<XenRef<PIF>> get_slaves(Session session, string _bond) { return XenRef<PIF>.Create(session.proxy.bond_get_slaves(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the other_config field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static Dictionary<string, string> get_other_config(Session session, string _bond) { return Maps.convert_from_proxy_string_string(session.proxy.bond_get_other_config(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the primary_slave field of the given Bond. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static XenRef<PIF> get_primary_slave(Session session, string _bond) { return XenRef<PIF>.Create(session.proxy.bond_get_primary_slave(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the mode field of the given Bond. /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static bond_mode get_mode(Session session, string _bond) { return (bond_mode)Helper.EnumParseDefault(typeof(bond_mode), (string)session.proxy.bond_get_mode(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the properties field of the given Bond. /// First published in XenServer 6.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static Dictionary<string, string> get_properties(Session session, string _bond) { return Maps.convert_from_proxy_string_string(session.proxy.bond_get_properties(session.uuid, _bond ?? "").parse()); } /// <summary> /// Get the links_up field of the given Bond. /// First published in XenServer 6.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static long get_links_up(Session session, string _bond) { return long.Parse((string)session.proxy.bond_get_links_up(session.uuid, _bond ?? "").parse()); } /// <summary> /// Set the other_config field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_other_config">New value to set</param> public static void set_other_config(Session session, string _bond, Dictionary<string, string> _other_config) { session.proxy.bond_set_other_config(session.uuid, _bond ?? "", Maps.convert_to_proxy_string_string(_other_config)).parse(); } /// <summary> /// Add the given key-value pair to the other_config field of the given Bond. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_key">Key to add</param> /// <param name="_value">Value to add</param> public static void add_to_other_config(Session session, string _bond, string _key, string _value) { session.proxy.bond_add_to_other_config(session.uuid, _bond ?? "", _key ?? "", _value ?? "").parse(); } /// <summary> /// Remove the given key and its corresponding value from the other_config field of the given Bond. If the key is not in that Map, then do nothing. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_key">Key to remove</param> public static void remove_from_other_config(Session session, string _bond, string _key) { session.proxy.bond_remove_from_other_config(session.uuid, _bond ?? "", _key ?? "").parse(); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> public static XenRef<Bond> create(Session session, string _network, List<XenRef<PIF>> _members, string _mac) { return XenRef<Bond>.Create(session.proxy.bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "").parse()); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> public static XenRef<Task> async_create(Session session, string _network, List<XenRef<PIF>> _members, string _mac) { return XenRef<Task>.Create(session.proxy.async_bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "").parse()); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> /// <param name="_mode">Bonding mode to use for the new bond First published in XenServer 6.0.</param> public static XenRef<Bond> create(Session session, string _network, List<XenRef<PIF>> _members, string _mac, bond_mode _mode) { return XenRef<Bond>.Create(session.proxy.bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "", bond_mode_helper.ToString(_mode)).parse()); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> /// <param name="_mode">Bonding mode to use for the new bond First published in XenServer 6.0.</param> public static XenRef<Task> async_create(Session session, string _network, List<XenRef<PIF>> _members, string _mac, bond_mode _mode) { return XenRef<Task>.Create(session.proxy.async_bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "", bond_mode_helper.ToString(_mode)).parse()); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> /// <param name="_mode">Bonding mode to use for the new bond First published in XenServer 6.0.</param> /// <param name="_properties">Additional configuration parameters specific to the bond mode First published in XenServer 6.1.</param> public static XenRef<Bond> create(Session session, string _network, List<XenRef<PIF>> _members, string _mac, bond_mode _mode, Dictionary<string, string> _properties) { return XenRef<Bond>.Create(session.proxy.bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "", bond_mode_helper.ToString(_mode), Maps.convert_to_proxy_string_string(_properties)).parse()); } /// <summary> /// Create an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_network">Network to add the bonded PIF to</param> /// <param name="_members">PIFs to add to this bond</param> /// <param name="_mac">The MAC address to use on the bond itself. If this parameter is the empty string then the bond will inherit its MAC address from the primary slave.</param> /// <param name="_mode">Bonding mode to use for the new bond First published in XenServer 6.0.</param> /// <param name="_properties">Additional configuration parameters specific to the bond mode First published in XenServer 6.1.</param> public static XenRef<Task> async_create(Session session, string _network, List<XenRef<PIF>> _members, string _mac, bond_mode _mode, Dictionary<string, string> _properties) { return XenRef<Task>.Create(session.proxy.async_bond_create(session.uuid, _network ?? "", (_members != null) ? Helper.RefListToStringArray(_members) : new string[] {}, _mac ?? "", bond_mode_helper.ToString(_mode), Maps.convert_to_proxy_string_string(_properties)).parse()); } /// <summary> /// Destroy an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static void destroy(Session session, string _bond) { session.proxy.bond_destroy(session.uuid, _bond ?? "").parse(); } /// <summary> /// Destroy an interface bond /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> public static XenRef<Task> async_destroy(Session session, string _bond) { return XenRef<Task>.Create(session.proxy.async_bond_destroy(session.uuid, _bond ?? "").parse()); } /// <summary> /// Change the bond mode /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_value">The new bond mode</param> public static void set_mode(Session session, string _bond, bond_mode _value) { session.proxy.bond_set_mode(session.uuid, _bond ?? "", bond_mode_helper.ToString(_value)).parse(); } /// <summary> /// Change the bond mode /// First published in XenServer 6.0. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_value">The new bond mode</param> public static XenRef<Task> async_set_mode(Session session, string _bond, bond_mode _value) { return XenRef<Task>.Create(session.proxy.async_bond_set_mode(session.uuid, _bond ?? "", bond_mode_helper.ToString(_value)).parse()); } /// <summary> /// Set the value of a property of the bond /// First published in XenServer 6.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_name">The property name</param> /// <param name="_value">The property value</param> public static void set_property(Session session, string _bond, string _name, string _value) { session.proxy.bond_set_property(session.uuid, _bond ?? "", _name ?? "", _value ?? "").parse(); } /// <summary> /// Set the value of a property of the bond /// First published in XenServer 6.1. /// </summary> /// <param name="session">The session</param> /// <param name="_bond">The opaque_ref of the given bond</param> /// <param name="_name">The property name</param> /// <param name="_value">The property value</param> public static XenRef<Task> async_set_property(Session session, string _bond, string _name, string _value) { return XenRef<Task>.Create(session.proxy.async_bond_set_property(session.uuid, _bond ?? "", _name ?? "", _value ?? "").parse()); } /// <summary> /// Return a list of all the Bonds known to the system. /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> public static List<XenRef<Bond>> get_all(Session session) { return XenRef<Bond>.Create(session.proxy.bond_get_all(session.uuid).parse()); } /// <summary> /// Get all the Bond Records at once, in a single XML RPC call /// First published in XenServer 4.1. /// </summary> /// <param name="session">The session</param> public static Dictionary<XenRef<Bond>, Bond> get_all_records(Session session) { return XenRef<Bond>.Create<Proxy_Bond>(session.proxy.bond_get_all_records(session.uuid).parse()); } /// <summary> /// Unique identifier/object reference /// </summary> public virtual string uuid { get { return _uuid; } set { if (!Helper.AreEqual(value, _uuid)) { _uuid = value; Changed = true; NotifyPropertyChanged("uuid"); } } } private string _uuid; /// <summary> /// The bonded interface /// </summary> public virtual XenRef<PIF> master { get { return _master; } set { if (!Helper.AreEqual(value, _master)) { _master = value; Changed = true; NotifyPropertyChanged("master"); } } } private XenRef<PIF> _master; /// <summary> /// The interfaces which are part of this bond /// </summary> public virtual List<XenRef<PIF>> slaves { get { return _slaves; } set { if (!Helper.AreEqual(value, _slaves)) { _slaves = value; Changed = true; NotifyPropertyChanged("slaves"); } } } private List<XenRef<PIF>> _slaves; /// <summary> /// additional configuration /// </summary> public virtual Dictionary<string, string> other_config { get { return _other_config; } set { if (!Helper.AreEqual(value, _other_config)) { _other_config = value; Changed = true; NotifyPropertyChanged("other_config"); } } } private Dictionary<string, string> _other_config; /// <summary> /// The PIF of which the IP configuration and MAC were copied to the bond, and which will receive all configuration/VLANs/VIFs on the bond if the bond is destroyed /// First published in XenServer 6.0. /// </summary> public virtual XenRef<PIF> primary_slave { get { return _primary_slave; } set { if (!Helper.AreEqual(value, _primary_slave)) { _primary_slave = value; Changed = true; NotifyPropertyChanged("primary_slave"); } } } private XenRef<PIF> _primary_slave; /// <summary> /// The algorithm used to distribute traffic among the bonded NICs /// First published in XenServer 6.0. /// </summary> public virtual bond_mode mode { get { return _mode; } set { if (!Helper.AreEqual(value, _mode)) { _mode = value; Changed = true; NotifyPropertyChanged("mode"); } } } private bond_mode _mode; /// <summary> /// Additional configuration properties specific to the bond mode. /// First published in XenServer 6.1. /// </summary> public virtual Dictionary<string, string> properties { get { return _properties; } set { if (!Helper.AreEqual(value, _properties)) { _properties = value; Changed = true; NotifyPropertyChanged("properties"); } } } private Dictionary<string, string> _properties; /// <summary> /// Number of links up in this bond /// First published in XenServer 6.1. /// </summary> public virtual long links_up { get { return _links_up; } set { if (!Helper.AreEqual(value, _links_up)) { _links_up = value; Changed = true; NotifyPropertyChanged("links_up"); } } } private long _links_up; } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Reflection; using OpenSim.Framework; using OpenSim.Framework.Communications; using OpenSim.Region.CoreModules.Avatar.Inventory.Archiver; using OpenSim.Region.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using OpenSim.Server.Base; using OpenMetaverse; using log4net; using Mono.Addins; using Nini.Config; namespace OpenSim.Region.CoreModules.Framework.Library { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "LibraryModule")] public class LibraryModule : ISharedRegionModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static bool m_HasRunOnce = false; private bool m_Enabled = false; // private string m_LibraryName = "OpenSim Library"; private Scene m_Scene; private ILibraryService m_Library; #region ISharedRegionModule public void Initialise(IConfigSource config) { m_Enabled = config.Configs["Modules"].GetBoolean("LibraryModule", m_Enabled); if (m_Enabled) { IConfig libConfig = config.Configs["LibraryService"]; if (libConfig != null) { string dllName = libConfig.GetString("LocalServiceModule", string.Empty); m_log.Debug("[LIBRARY MODULE]: Library service dll is " + dllName); if (dllName != string.Empty) { Object[] args = new Object[] { config }; m_Library = ServerUtils.LoadPlugin<ILibraryService>(dllName, args); } } } if (m_Library == null) { m_log.Warn("[LIBRARY MODULE]: No local library service. Module will be disabled."); m_Enabled = false; } } public bool IsSharedModule { get { return true; } } public string Name { get { return "Library Module"; } } public Type ReplaceableInterface { get { return null; } } public void AddRegion(Scene scene) { if (!m_Enabled) return; // Store only the first scene if (m_Scene == null) { m_Scene = scene; } scene.RegisterModuleInterface<ILibraryService>(m_Library); } public void RemoveRegion(Scene scene) { if (!m_Enabled) return; scene.UnregisterModuleInterface<ILibraryService>(m_Library); } public void RegionLoaded(Scene scene) { if (!m_Enabled) return; // This will never run more than once, even if the region is restarted if (!m_HasRunOnce) { LoadLibrariesFromArchives(); //DumpLibrary(); m_HasRunOnce = true; } } public void PostInitialise() { } public void Close() { m_Scene = null; } #endregion ISharedRegionModule #region LoadLibraries private string pathToLibraries = "Library"; protected void LoadLibrariesFromArchives() { InventoryFolderImpl lib = m_Library.LibraryRootFolder; if (lib == null) { m_log.Debug("[LIBRARY MODULE]: No library. Ignoring Library Module"); return; } RegionInfo regInfo = new RegionInfo(); Scene m_MockScene = new Scene(regInfo); LocalInventoryService invService = new LocalInventoryService(lib); m_MockScene.RegisterModuleInterface<IInventoryService>(invService); m_MockScene.RegisterModuleInterface<IAssetService>(m_Scene.AssetService); UserAccount uinfo = new UserAccount(lib.Owner); uinfo.FirstName = "OpenSim"; uinfo.LastName = "Library"; uinfo.ServiceURLs = new Dictionary<string, object>(); foreach (string iarFileName in Directory.GetFiles(pathToLibraries, "*.iar")) { string simpleName = Path.GetFileNameWithoutExtension(iarFileName); m_log.InfoFormat("[LIBRARY MODULE]: Loading library archive {0} ({1})...", iarFileName, simpleName); simpleName = GetInventoryPathFromName(simpleName); InventoryArchiveReadRequest archread = new InventoryArchiveReadRequest(m_MockScene, uinfo, simpleName, iarFileName, false); try { HashSet<InventoryNodeBase> nodes = archread.Execute(); if (nodes != null && nodes.Count == 0) { // didn't find the subfolder with the given name; place it on the top m_log.InfoFormat("[LIBRARY MODULE]: Didn't find {0} in library. Placing archive on the top level", simpleName); archread.Close(); archread = new InventoryArchiveReadRequest(m_MockScene, uinfo, "/", iarFileName, false); archread.Execute(); } foreach (InventoryNodeBase node in nodes) FixPerms(node); } catch (Exception e) { m_log.DebugFormat("[LIBRARY MODULE]: Exception when processing archive {0}: {1}", iarFileName, e.StackTrace); } finally { archread.Close(); } } } private void FixPerms(InventoryNodeBase node) { m_log.DebugFormat("[LIBRARY MODULE]: Fixing perms for {0} {1}", node.Name, node.ID); if (node is InventoryItemBase) { InventoryItemBase item = (InventoryItemBase)node; item.BasePermissions = (uint)PermissionMask.All; item.EveryOnePermissions = (uint)PermissionMask.All - (uint)PermissionMask.Modify; item.CurrentPermissions = (uint)PermissionMask.All; item.NextPermissions = (uint)PermissionMask.All; } } // private void DumpLibrary() // { // InventoryFolderImpl lib = m_Library.LibraryRootFolder; // // m_log.DebugFormat(" - folder {0}", lib.Name); // DumpFolder(lib); // } // // private void DumpLibrary() // { // InventoryFolderImpl lib = m_Scene.CommsManager.UserProfileCacheService.LibraryRoot; // // m_log.DebugFormat(" - folder {0}", lib.Name); // DumpFolder(lib); // } private void DumpFolder(InventoryFolderImpl folder) { foreach (InventoryItemBase item in folder.Items.Values) { m_log.DebugFormat(" --> item {0}", item.Name); } foreach (InventoryFolderImpl f in folder.RequestListOfFolderImpls()) { m_log.DebugFormat(" - folder {0}", f.Name); DumpFolder(f); } } private string GetInventoryPathFromName(string name) { string[] parts = name.Split(new char[] { ' ' }); if (parts.Length == 3) { name = string.Empty; // cut the last part for (int i = 0; i < parts.Length - 1; i++) name = name + ' ' + parts[i]; } return name; } #endregion LoadLibraries } }
////////////////////////////////////////////////////////////////////////////// // This source code and all associated files and resources are copyrighted by // the author(s). This source code and all associated files and resources may // be used as long as they are used according to the terms and conditions set // forth in The Code Project Open License (CPOL), which may be viewed at // http://www.blackbeltcoder.com/Legal/Licenses/CPOL. // // Copyright (c) 2010 Jonathan Wood // using System; namespace SoftCircuits { public class TextParser { private string _text; private int _pos; public string Text { get { return _text; } } public int Position { get { return _pos; } } public int Remaining { get { return _text.Length - _pos; } } public static char NullChar = (char)0; public TextParser() { Reset(null); } public TextParser(string text) { Reset(text); } /// <summary> /// Resets the current position to the start of the current document /// </summary> public void Reset() { _pos = 0; } /// <summary> /// Sets the current document and resets the current position to the start of it /// </summary> /// <param name="html"></param> public void Reset(string text) { _text = (text != null) ? text : String.Empty; _pos = 0; } /// <summary> /// Indicates if the current position is at the end of the current document /// </summary> public bool EndOfText { get { return (_pos >= _text.Length); } } /// <summary> /// Returns the character at the current position, or a null character if we're /// at the end of the document /// </summary> /// <returns>The character at the current position</returns> public char Peek() { return Peek(0); } /// <summary> /// Returns the character at the specified number of characters beyond the current /// position, or a null character if the specified position is at the end of the /// document /// </summary> /// <param name="ahead">The number of characters beyond the current position</param> /// <returns>The character at the specified position</returns> public char Peek(int ahead) { int pos = (_pos + ahead); if (pos < _text.Length) return _text[pos]; return NullChar; } /// <summary> /// Extracts a substring from the specified position to the end of the text /// </summary> /// <param name="start"></param> /// <returns></returns> public string Extract(int start) { return Extract(start, _text.Length); } /// <summary> /// Extracts a substring from the specified range of the current text /// </summary> /// <param name="start"></param> /// <param name="end"></param> /// <returns></returns> public string Extract(int start, int end) { return _text.Substring(start, end - start); } /// <summary> /// Moves the current position ahead one character /// </summary> public void MoveAhead() { MoveAhead(1); } /// <summary> /// Moves the current position ahead the specified number of characters /// </summary> /// <param name="ahead">The number of characters to move ahead</param> public void MoveAhead(int ahead) { _pos = Math.Min(_pos + ahead, _text.Length); } /// <summary> /// Moves to the next occurrence of the specified string (case sensitive) /// </summary> /// /// <param name="s">String to find</param> public void MoveTo(string s) { MoveTo (s, false); } /// <summary> /// Moves to the next occurrence of the specified string /// </summary> /// <param name="s">String to find</param> /// <param name="ignoreCase">Indicates if case-insensitive comparisons are used</param> public void MoveTo(string s, bool ignoreCase) { _pos = _text.IndexOf(s, _pos, ignoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal); if (_pos < 0) _pos = _text.Length; } /// <summary> /// Moves to the next occurrence of the specified character /// </summary> /// <param name="c">Character to find</param> public void MoveTo(char c) { _pos = _text.IndexOf(c, _pos); if (_pos < 0) _pos = _text.Length; } /// <summary> /// Moves to the next occurrence of any one of the specified /// characters /// </summary> /// <param name="chars">Array of characters to find</param> public void MoveTo(char[] chars) { _pos = _text.IndexOfAny(chars, _pos); if (_pos < 0) _pos = _text.Length; } /// <summary> /// Moves to the next occurrence of any character that is not one /// of the specified characters /// </summary> /// <param name="chars">Array of characters to move past</param> public void MovePast(char[] chars) { while (IsInArray(Peek(), chars)) MoveAhead(); } /// <summary> /// Determines if the specified character exists in the specified /// character array. /// </summary> /// <param name="c">Character to find</param> /// <param name="chars">Character array to search</param> /// <returns></returns> protected bool IsInArray(char c, char[] chars) { foreach (char ch in chars) { if (c == ch) return true; } return false; } /// <summary> /// Moves the current position to the first character that is part of a newline /// </summary> public void MoveToEndOfLine() { char c = Peek(); while (c != '\r' && c != '\n' && !EndOfText) { MoveAhead(); c = Peek(); } } /// <summary> /// Moves the current position to the next character that is not whitespace /// </summary> public void MovePastWhitespace() { while (Char.IsWhiteSpace(Peek())) MoveAhead(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ namespace NVelocity.Runtime.Resource { using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Text; using Commons.Collections; using Loader; using Exception; using NVelocity.Util; using System.Security; /// <summary> Class to manage the text resource for the Velocity Runtime. /// /// </summary> /// <author> <a href="mailto:[email protected]">Will Glass-Husain</a> /// </author> /// <author> <a href="mailto:[email protected]">Jason van Zyl</a> /// </author> /// <author> <a href="mailto:[email protected]">Paulo Gaspar</a> /// </author> /// <author> <a href="mailto:[email protected]">Geir Magnusson Jr.</a> /// </author> /// <author> <a href="mailto:[email protected]">Henning P. Schmiedehausen</a> /// </author> /// <version> $Id: ResourceManagerImpl.java 692505 2008-09-05 18:21:51Z nbubna $ /// </version> public class ResourceManagerImpl : IResourceManager { /// <summary>A template resources. </summary> public const int RESOURCE_TEMPLATE = 1; /// <summary>A static content resource. </summary> public const int RESOURCE_CONTENT = 2; /// <summary>token used to identify the loader internally. </summary> private const string RESOURCE_LOADER_IDENTIFIER = "_RESOURCE_LOADER_IDENTIFIER_"; /// <summary>Object implementing ResourceCache to be our resource manager's Resource cache. </summary> protected internal IResourceCache globalCache = null; /// <summary>The List of templateLoaders that the Runtime will use to locate the InputStream source of a template. </summary> protected internal IList<ResourceLoader> resourceLoaders = new List<ResourceLoader>(); /// <summary> This is a list of the template input stream source initializers, basically properties for a particular template stream /// source. The order in this list reflects numbering of the properties i.e. /// /// <p>&lt;loader-id&gt;.resource.loader.&lt;property&gt; = &lt;value&gt;</p> /// </summary> private IList sourceInitializerList = new List<object>(); /// <summary> Has this Manager been initialized?</summary> private bool isInit = false; /// <summary>switch to turn off Log notice when a resource is found for the first time. </summary> private bool logWhenFound = true; /// <summary>The internal RuntimeServices object. </summary> protected internal IRuntimeServices rsvc = null; /// <summary>Logging. </summary> protected internal Log.Log log = null; private static readonly object syncOjb = new object(); /// <summary> Initialize the ResourceManager. /// /// </summary> /// <param name="rsvc"> The Runtime Services object which is associated with this Resource Manager. /// /// </param> /// <throws> Exception </throws> public virtual void Initialize(IRuntimeServices rsvc) { lock (syncOjb) { if (isInit) { log.Debug("Re-initialization of ResourceLoader attempted and ignored."); return; } ResourceLoader resourceLoader = null; this.rsvc = rsvc; log = rsvc.Log; log.Trace("Default ResourceManager initializing. (" + this.GetType() + ")"); AssembleResourceLoaderInitializers(); for (IEnumerator it = sourceInitializerList.GetEnumerator(); it.MoveNext(); ) { /** * Resource loader can be loaded either via class name or be passed * in as an instance. */ ExtendedProperties configuration = (ExtendedProperties)it.Current; string loaderClass = StringUtils.NullTrim(configuration.GetString("class")); ResourceLoader loaderInstance = configuration.ContainsKey("instance") ? (ResourceLoader)configuration["instance"] : null; if (loaderInstance != null) { resourceLoader = loaderInstance; } else if (loaderClass != null) { resourceLoader = ResourceLoaderFactory.GetLoader(rsvc, loaderClass); } else { string msg = "Unable to find '" + configuration.GetString(RESOURCE_LOADER_IDENTIFIER) + ".resource.loader.class' specification in configuration." + " This is a critical value. Please adjust configuration."; log.Error(msg); throw new System.Exception(msg); } resourceLoader.CommonInit(rsvc, configuration); resourceLoader.Init(configuration); resourceLoaders.Add(resourceLoader); } /* * now see if this is overridden by configuration */ logWhenFound = rsvc.GetBoolean(NVelocity.Runtime.RuntimeConstants.RESOURCE_MANAGER_LOGWHENFOUND, true); /* * now, is a global cache specified? */ string cacheClassName = rsvc.GetString(RuntimeConstants.RESOURCE_MANAGER_CACHE_CLASS); object cacheObject = null; if (!string.IsNullOrEmpty(cacheClassName)) { try { cacheObject = System.Activator.CreateInstance(Type.GetType(cacheClassName.Replace(';', ','))); } catch (System.Exception cnfe) { string msg = "The specified class for ResourceCache (" + cacheClassName + ") does not exist or is not accessible to the current classloader."; log.Error(msg, cnfe); throw cnfe; } if (!(cacheObject is IResourceCache)) { string msg = "The specified resource cache class (" + cacheClassName + ") must implement " + typeof(IResourceCache).FullName; log.Error(msg); throw new System.SystemException(msg); } } /* * if we didn't Get through that, just use the default. */ if (cacheObject == null) { cacheObject = new ResourceCacheImpl(); } globalCache = (IResourceCache)cacheObject; globalCache.Initialize(rsvc); log.Trace("Default ResourceManager initialization complete."); } } /// <summary> This will produce a List of Hashtables, each hashtable contains the intialization Info for a particular resource loader. This /// Hashtable will be passed in when initializing the the template loader. /// </summary> private void AssembleResourceLoaderInitializers() { List<object> resourceLoaderNames = rsvc.Configuration.GetVector(RuntimeConstants.RESOURCE_LOADER); StringUtils.TrimStrings(resourceLoaderNames); for (IEnumerator it = resourceLoaderNames.GetEnumerator(); it.MoveNext(); ) { /* * The loader id might look something like the following: * * file.resource.loader * * The loader id is the prefix used for all properties * pertaining to a particular loader. */ string loaderName = (string)it.Current; StringBuilder loaderID = new StringBuilder(loaderName); loaderID.Append(".").Append(RuntimeConstants.RESOURCE_LOADER); ExtendedProperties loaderConfiguration = rsvc.Configuration.Subset(loaderID.ToString()); /* * we can't really count on ExtendedProperties to give us an empty set */ if (loaderConfiguration == null) { log.Debug("ResourceManager : No configuration information found " + "for resource loader named '" + loaderName + "' (id is " + loaderID + "). Skipping it..."); continue; } /* * Add the loader name token to the initializer if we need it * for reference later. We can't count on the user to fill * in the 'name' field */ loaderConfiguration.SetProperty(RESOURCE_LOADER_IDENTIFIER, loaderName); /* * Add resources to the list of resource loader * initializers. */ sourceInitializerList.Add(loaderConfiguration); } } /// <summary> Gets the named resource. Returned class type corresponds to specified type (i.e. <code>Template</code> to <code> /// RESOURCE_TEMPLATE</code>). /// /// This method is now unsynchronized which requires that ResourceCache /// implementations be thread safe (as the default is). /// /// </summary> /// <param name="resourceName"> The name of the resource to retrieve. /// </param> /// <param name="resourceType"> The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). /// </param> /// <param name="encoding"> The character encoding to use. /// /// </param> /// <returns> Resource with the template parsed and ready. /// /// </returns> /// <throws> ResourceNotFoundException if template not found from any available source. </throws> /// <throws> ParseErrorException if template cannot be parsed due to syntax (or other) Error. </throws> /// <throws> Exception if a problem in parse </throws> public virtual Resource GetResource(string resourceName, int resourceType, string encoding) { /* * Check to see if the resource was placed in the cache. * If it was placed in the cache then we will use * the cached version of the resource. If not we * will load it. * * Note: the type is included in the key to differentiate ContentResource * (static content from #include) with a Template. */ string resourceKey = resourceType + resourceName; Resource resource = globalCache.Get(resourceKey); if (resource != null) { try { // avoids additional method call to refreshResource if (resource.RequiresChecking()) { /* * both loadResource() and refreshResource() now return * a new Resource instance when they are called * (Put in the cache when appropriate) in order to allow * several threads to parse the same template simultaneously. * It is redundant work and will cause more garbage collection but the * benefit is that it allows concurrent parsing and processing * without race conditions when multiple requests try to * refresh/load the same template at the same time. * * Another alternative is to limit template parsing/retrieval * so that only one thread can parse each template at a time * but that creates a scalability bottleneck. * * See VELOCITY-606, VELOCITY-595 and VELOCITY-24 */ resource = RefreshResource(resource, encoding); } } catch (ResourceNotFoundException) { /* * something exceptional happened to that resource * this could be on purpose, * so clear the cache and try again */ globalCache.Remove(resourceKey); return GetResource(resourceName, resourceType, encoding); } catch (ParseErrorException pee) { log.Error("ResourceManager.getResource() exception", pee); throw pee; } catch (System.SystemException re) { log.Error("ResourceManager.getResource() exception", re); throw re; } catch (System.Exception e) { log.Error("ResourceManager.getResource() exception", e); throw e; } } else { try { /* * it's not in the cache, so load it. */ resource = LoadResource(resourceName, resourceType, encoding); if (resource.ResourceLoader.CachingOn) { globalCache.Put(resourceKey, resource); } } catch (ResourceNotFoundException rnfe) { log.Error("ResourceManager : unable to find resource '" + resourceName + "' in any resource loader."); throw rnfe; } catch (ParseErrorException pee) { log.Error("ResourceManager.getResource() parse exception", pee); throw pee; } catch (System.SystemException re) { log.Error("ResourceManager.getResource() load exception", re); throw re; } catch (System.Exception e) { log.Error("ResourceManager.getResource() exception new", e); throw e; } } return resource; } /// <summary> Create a new Resource of the specified type. /// /// </summary> /// <param name="resourceName"> The name of the resource to retrieve. /// </param> /// <param name="resourceType"> The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). /// </param> /// <returns> new instance of appropriate resource type /// </returns> /// <since> 1.6 /// </since> protected internal virtual Resource CreateResource(string resourceName, int resourceType) { return ResourceFactory.getResource(resourceName, resourceType); } /// <summary> Loads a resource from the current set of resource loaders. /// /// </summary> /// <param name="resourceName"> The name of the resource to retrieve. /// </param> /// <param name="resourceType"> The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). /// </param> /// <param name="encoding"> The character encoding to use. /// /// </param> /// <returns> Resource with the template parsed and ready. /// /// </returns> /// <throws> ResourceNotFoundException if template not found from any available source. </throws> /// <throws> ParseErrorException if template cannot be parsed due to syntax (or other) Error. </throws> /// <throws> Exception if a problem in parse </throws> protected internal virtual Resource LoadResource(string resourceName, int resourceType, string encoding) { Resource resource = CreateResource(resourceName, resourceType); resource.RuntimeServices = rsvc; resource.Name = resourceName; resource.Encoding = encoding; /* * Now we have to try to find the appropriate * loader for this resource. We have to cycle through * the list of available resource loaders and see * which one gives us a stream that we can use to * make a resource with. */ long howOldItWas = 0; for (IEnumerator<ResourceLoader> it = resourceLoaders.GetEnumerator(); it.MoveNext(); ) { ResourceLoader resourceLoader = it.Current; resource.ResourceLoader = resourceLoader; /* * catch the ResourceNotFound exception * as that is ok in our new multi-loader environment */ try { if (resource.Process()) { /* * FIXME (gmj) * moved in here - technically still * a problem - but the resource needs to be * processed before the loader can figure * it out due to to the new * multi-path support - will revisit and fix */ if (logWhenFound && log.DebugEnabled) { log.Debug("ResourceManager : found " + resourceName + " with loader " + resourceLoader.ClassName); } howOldItWas = resourceLoader.GetLastModified(resource); break; } } catch (ResourceNotFoundException ex) { log.Info(string.Format("Loading {0} with {1} caused by {2}", resource, resourceLoader, ex)); /* * that's ok - it's possible to fail in * multi-loader environment */ } catch (SecurityException ex) { log.Info(string.Format("Loading {0} with {1} caused by {2}", resource, resourceLoader, ex)); /* * that's ok - it's possible to fail in * multi-loader environment */ } } /* * Return null if we can't find a resource. */ if (resource.Data == null) { throw new ResourceNotFoundException("Unable to find resource '" + resourceName + "'"); } /* * some final cleanup */ resource.LastModified = howOldItWas; resource.ModificationCheckInterval = resource.ResourceLoader.ModificationCheckInterval; resource.Touch(); return resource; } /// <summary> Takes an existing resource, and 'refreshes' it. This generally means that the source of the resource is checked for changes /// according to some cache/check algorithm and if the resource changed, then the resource data is reloaded and re-parsed. /// /// </summary> /// <param name="resource"> resource to refresh /// </param> /// <param name="encoding"> character encoding of the resource to refresh. /// /// </param> /// <throws> ResourceNotFoundException if template not found from current source for this Resource </throws> /// <throws> ParseErrorException if template cannot be parsed due to syntax (or other) Error. </throws> /// <throws> Exception if a problem in parse </throws> protected internal virtual Resource RefreshResource(Resource resource, string encoding) { /* * The resource knows whether it needs to be checked * or not, and the resource's loader can check to * see if the source has been modified. If both * these conditions are true then we must reload * the input stream and parse it to make a new * AST for the resource. */ /* * touch() the resource to reset the counters */ resource.Touch(); if (resource.IsSourceModified) { /* * now check encoding Info. It's possible that the newly declared * encoding is different than the encoding already in the resource * this strikes me as bad... */ if (!string.Equals(resource.Encoding, encoding)) { log.Warn("Declared encoding for template '" + resource.Name + "' is different on reload. Old = '" + resource.Encoding + "' New = '" + encoding); resource.Encoding = encoding; } /* * read how old the resource is _before_ * processing (=>reading) it */ long howOldItWas = resource.ResourceLoader.GetLastModified(resource); string resourceKey = resource.Type + resource.Name; /* * we create a copy to avoid partially overwriting a * template which may be in use in another thread */ Resource newResource = ResourceFactory.getResource(resource.Name, resource.Type); newResource.RuntimeServices = rsvc; newResource.Name = resource.Name; newResource.Encoding = resource.Encoding; newResource.ResourceLoader = resource.ResourceLoader; newResource.ModificationCheckInterval = resource.ResourceLoader.ModificationCheckInterval; newResource.Process(); newResource.LastModified = howOldItWas; resource = newResource; globalCache.Put(resourceKey, newResource); } return resource; } /// <summary> Determines if a template exists, and returns name of the loader that provides it. This is a slightly less hokey way to /// support the Velocity.TemplateExists() utility method, which was broken when per-template encoding was introduced. We can /// revisit this. /// /// </summary> /// <param name="resourceName"> Name of template or content resource /// /// </param> /// <returns> class name of loader than can provide it /// </returns> public virtual string GetLoaderNameForResource(string resourceName) { /* * loop through our loaders... */ for (IEnumerator<ResourceLoader> it = resourceLoaders.GetEnumerator(); it.MoveNext(); ) { ResourceLoader resourceLoader = it.Current; if (resourceLoader.ResourceExists(resourceName)) { return resourceLoader.GetType().ToString(); } } return null; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Orleans.Concurrency; using Orleans.Providers.Streams.Common; using Orleans.Runtime; namespace Orleans.Streams { internal class PersistentStreamPullingManager : SystemTarget, IPersistentStreamPullingManager, IStreamQueueBalanceListener { private static readonly TimeSpan QUEUES_PRINT_PERIOD = TimeSpan.FromMinutes(5); private readonly Dictionary<QueueId, PersistentStreamPullingAgent> queuesToAgentsMap; private readonly string streamProviderName; private readonly IStreamProviderRuntime providerRuntime; private readonly IStreamPubSub pubSub; private readonly PersistentStreamProviderConfig config; private readonly AsyncSerialExecutor nonReentrancyGuarantor; // for non-reentrant execution of queue change notifications. private readonly LoggerImpl logger; private int latestRingNotificationSequenceNumber; private int latestCommandNumber; private IQueueAdapter queueAdapter; private readonly IQueueAdapterCache queueAdapterCache; private readonly IStreamQueueBalancer queueBalancer; private readonly IQueueAdapterFactory adapterFactory; private PersistentStreamProviderState managerState; private readonly IDisposable queuePrintTimer; private int NumberRunningAgents { get { return queuesToAgentsMap.Count; } } internal PersistentStreamPullingManager( GrainId id, string strProviderName, IStreamProviderRuntime runtime, IStreamPubSub streamPubSub, IQueueAdapterFactory adapterFactory, IStreamQueueBalancer streamQueueBalancer, PersistentStreamProviderConfig config) : base(id, runtime.ExecutingSiloAddress) { if (string.IsNullOrWhiteSpace(strProviderName)) { throw new ArgumentNullException("strProviderName"); } if (runtime == null) { throw new ArgumentNullException("runtime", "IStreamProviderRuntime runtime reference should not be null"); } if (streamPubSub == null) { throw new ArgumentNullException("streamPubSub", "StreamPubSub reference should not be null"); } if (streamQueueBalancer == null) { throw new ArgumentNullException("streamQueueBalancer", "IStreamQueueBalancer streamQueueBalancer reference should not be null"); } queuesToAgentsMap = new Dictionary<QueueId, PersistentStreamPullingAgent>(); streamProviderName = strProviderName; providerRuntime = runtime; pubSub = streamPubSub; this.config = config; nonReentrancyGuarantor = new AsyncSerialExecutor(); latestRingNotificationSequenceNumber = 0; latestCommandNumber = 0; queueBalancer = streamQueueBalancer; this.adapterFactory = adapterFactory; queueAdapterCache = adapterFactory.GetQueueAdapterCache(); logger = LogManager.GetLogger(GetType().Name + "-" + streamProviderName, LoggerType.Provider); Log(ErrorCode.PersistentStreamPullingManager_01, "Created {0} for Stream Provider {1}.", GetType().Name, streamProviderName); IntValueStatistic.FindOrCreate(new StatisticName(StatisticNames.STREAMS_PERSISTENT_STREAM_NUM_PULLING_AGENTS, strProviderName), () => queuesToAgentsMap.Count); queuePrintTimer = base.RegisterTimer(AsyncTimerCallback, null, QUEUES_PRINT_PERIOD, QUEUES_PRINT_PERIOD); } public Task Initialize(Immutable<IQueueAdapter> qAdapter) { if (qAdapter.Value == null) throw new ArgumentNullException("qAdapter", "Init: queueAdapter should not be null"); Log(ErrorCode.PersistentStreamPullingManager_02, "Init."); // Remove cast once we cleanup queueAdapter = qAdapter.Value; var meAsQueueBalanceListener = this.AsReference<IStreamQueueBalanceListener>(); queueBalancer.SubscribeToQueueDistributionChangeEvents(meAsQueueBalanceListener); List<QueueId> myQueues = queueBalancer.GetMyQueues().ToList(); Log(ErrorCode.PersistentStreamPullingManager_03, String.Format("Initialize: I am now responsible for {0} queues: {1}.", myQueues.Count, PrintQueues(myQueues))); managerState = PersistentStreamProviderState.Initialized; return TaskDone.Done; } public async Task Stop() { await StopAgents(); if (queuePrintTimer != null) { queuePrintTimer.Dispose(); } } public async Task StartAgents() { managerState = PersistentStreamProviderState.AgentsStarted; List<QueueId> myQueues = queueBalancer.GetMyQueues().ToList(); Log(ErrorCode.PersistentStreamPullingManager_Starting, "Starting agents for {0} queues: {1}", myQueues.Count, PrintQueues(myQueues)); await AddNewQueues(myQueues, true); Log(ErrorCode.PersistentStreamPullingManager_Started, "Started agents."); } public async Task StopAgents() { managerState = PersistentStreamProviderState.AgentsStopped; List<QueueId> queuesToRemove = queuesToAgentsMap.Keys.ToList(); Log(ErrorCode.PersistentStreamPullingManager_Stopping, "Stopping agents for {0} queues: {1}", queuesToRemove.Count, PrintQueues(queuesToRemove)); await RemoveQueues(queuesToRemove); Log(ErrorCode.PersistentStreamPullingManager_Stopped, "Stopped agents."); } #region Management of queues /// <summary> /// Actions to take when the queue distribution changes due to a failure or a join. /// Since this pulling manager is system target and queue distribution change notifications /// are delivered to it as grain method calls, notifications are not reentrant. To simplify /// notification handling we execute them serially, in a non-reentrant way. We also supress /// and don't execute an older notification if a newer one was already delivered. /// </summary> public Task QueueDistributionChangeNotification() { latestRingNotificationSequenceNumber++; int notificationSeqNumber = latestRingNotificationSequenceNumber; Log(ErrorCode.PersistentStreamPullingManager_04, "Got QueueChangeNotification number {0} from the queue balancer. managerState = {1}", notificationSeqNumber, managerState); if (managerState == PersistentStreamProviderState.AgentsStopped) { return TaskDone.Done; // if agents not running, no need to rebalance the queues among them. } return nonReentrancyGuarantor.AddNext(() => { // skip execution of an older/previous notification since already got a newer range update notification. if (notificationSeqNumber < latestRingNotificationSequenceNumber) { Log(ErrorCode.PersistentStreamPullingManager_05, "Skipping execution of QueueChangeNotification number {0} from the queue allocator since already received a later notification " + "(already have notification number {1}).", notificationSeqNumber, latestRingNotificationSequenceNumber); return TaskDone.Done; } if (managerState == PersistentStreamProviderState.AgentsStopped) { return TaskDone.Done; // if agents not running, no need to rebalance the queues among them. } return QueueDistributionChangeNotification(notificationSeqNumber); }); } private async Task QueueDistributionChangeNotification(int notificationSeqNumber) { HashSet<QueueId> currentQueues = queueBalancer.GetMyQueues().ToSet(); Log(ErrorCode.PersistentStreamPullingManager_06, "Executing QueueChangeNotification number {0}. Queue balancer says I should now own {1} queues: {2}", notificationSeqNumber, currentQueues.Count, PrintQueues(currentQueues)); try { Task t1 = AddNewQueues(currentQueues, false); List<QueueId> queuesToRemove = queuesToAgentsMap.Keys.Where(queueId => !currentQueues.Contains(queueId)).ToList(); Task t2 = RemoveQueues(queuesToRemove); await Task.WhenAll(t1, t2); } finally { Log(ErrorCode.PersistentStreamPullingManager_16, "Done Executing QueueChangeNotification number {0}. I now own {1} queues: {2}", notificationSeqNumber, NumberRunningAgents, PrintQueues(queuesToAgentsMap.Keys)); } } /// <summary> /// Take responsibility for a set of new queues that were assigned to me via a new range. /// We first create one pulling agent for every new queue and store them in our internal data structure, then try to initialize the agents. /// ERROR HANDLING: /// The responsibility to handle initialization and shutdown failures is inside the Agents code. /// The manager will call Initialize once and log an error. It will not call initialize again and will assume initialization has succeeded. /// Same applies to shutdown. /// </summary> /// <param name="myQueues"></param> /// <param name="failOnInit"></param> /// <returns></returns> private async Task AddNewQueues(IEnumerable<QueueId> myQueues, bool failOnInit) { // Create agents for queues in range that we don't yet have. // First create them and store in local queuesToAgentsMap. // Only after that Initialize them all. var agents = new List<PersistentStreamPullingAgent>(); foreach (var queueId in myQueues.Where(queueId => !queuesToAgentsMap.ContainsKey(queueId))) { try { var agentId = GrainId.NewSystemTargetGrainIdByTypeCode(Constants.PULLING_AGENT_SYSTEM_TARGET_TYPE_CODE); var agent = new PersistentStreamPullingAgent(agentId, streamProviderName, providerRuntime, pubSub, queueId, config); providerRuntime.RegisterSystemTarget(agent); queuesToAgentsMap.Add(queueId, agent); agents.Add(agent); } catch (Exception exc) { logger.Error(ErrorCode.PersistentStreamPullingManager_07, "Exception while creating PersistentStreamPullingAgent.", exc); // What should we do? This error is not recoverable and considered a bug. But we don't want to bring the silo down. // If this is when silo is starting and agent is initializing, fail the silo startup. Otherwise, just swallow to limit impact on other receivers. if (failOnInit) throw; } } try { var initTasks = new List<Task>(); foreach (var agent in agents) { initTasks.Add(InitAgent(agent)); } await Task.WhenAll(initTasks); } catch { // Just ignore this exception and proceed as if Initialize has succeeded. // We already logged individual exceptions for individual calls to Initialize. No need to log again. } if (agents.Count > 0) { Log(ErrorCode.PersistentStreamPullingManager_08, "Added {0} new queues: {1}. Now own total of {2} queues: {3}", agents.Count, Utils.EnumerableToString(agents, agent => agent.QueueId.ToString()), NumberRunningAgents, PrintQueues(queuesToAgentsMap.Keys)); } } private async Task InitAgent(PersistentStreamPullingAgent agent) { // Init the agent only after it was registered locally. var agentGrainRef = agent.AsReference<IPersistentStreamPullingAgent>(); var queueAdapterCacheAsImmutable = queueAdapterCache != null ? queueAdapterCache.AsImmutable() : new Immutable<IQueueAdapterCache>(null); IStreamFailureHandler deliveryFailureHandler = await adapterFactory.GetDeliveryFailureHandler(agent.QueueId); // Need to call it as a grain reference. var task = OrleansTaskExtentions.SafeExecute(() => agentGrainRef.Initialize(queueAdapter.AsImmutable(), queueAdapterCacheAsImmutable, deliveryFailureHandler.AsImmutable())); await task.LogException(logger, ErrorCode.PersistentStreamPullingManager_09, String.Format("PersistentStreamPullingAgent {0} failed to Initialize.", agent.QueueId)); } private async Task RemoveQueues(List<QueueId> queuesToRemove) { if (queuesToRemove.Count == 0) { return; } // Stop the agents that for queues that are not in my range anymore. var agents = new List<PersistentStreamPullingAgent>(queuesToRemove.Count); Log(ErrorCode.PersistentStreamPullingManager_10, "About to remove {0} agents from my responsibility: {1}", queuesToRemove.Count, Utils.EnumerableToString(queuesToRemove, q => q.ToString())); var removeTasks = new List<Task>(); foreach (var queueId in queuesToRemove) { PersistentStreamPullingAgent agent; if (!queuesToAgentsMap.TryGetValue(queueId, out agent)) continue; agents.Add(agent); queuesToAgentsMap.Remove(queueId); var agentGrainRef = agent.AsReference<IPersistentStreamPullingAgent>(); var task = OrleansTaskExtentions.SafeExecute(agentGrainRef.Shutdown); task = task.LogException(logger, ErrorCode.PersistentStreamPullingManager_11, String.Format("PersistentStreamPullingAgent {0} failed to Shutdown.", agent.QueueId)); removeTasks.Add(task); } try { await Task.WhenAll(removeTasks); } catch { // Just ignore this exception and proceed as if Initialize has succeeded. // We already logged individual exceptions for individual calls to Shutdown. No need to log again. } foreach (var agent in agents) { try { providerRuntime.UnregisterSystemTarget(agent); } catch (Exception exc) { Log(ErrorCode.PersistentStreamPullingManager_12, "Exception while UnRegisterSystemTarget of PersistentStreamPullingAgent {0}. Ignoring. Exc.Message = {1}.", ((ISystemTargetBase)agent).GrainId, exc.Message); } } if (agents.Count > 0) { Log(ErrorCode.PersistentStreamPullingManager_10, "Removed {0} queues: {1}. Now own total of {2} queues: {3}", agents.Count, Utils.EnumerableToString(agents, agent => agent.QueueId.ToString()), NumberRunningAgents, PrintQueues(queuesToAgentsMap.Keys)); } } #endregion public async Task<object> ExecuteCommand(PersistentStreamProviderCommand command, object arg) { latestCommandNumber++; int commandSeqNumber = latestCommandNumber; try { Log(ErrorCode.PersistentStreamPullingManager_13, String.Format("Got command {0}{1}: commandSeqNumber = {2}, managerState = {3}.", command, arg != null ? " with arg " + arg : String.Empty, commandSeqNumber, managerState)); switch (command) { case PersistentStreamProviderCommand.StartAgents: case PersistentStreamProviderCommand.StopAgents: await QueueCommandForExecution(command, commandSeqNumber); return null; case PersistentStreamProviderCommand.GetAgentsState: return managerState; case PersistentStreamProviderCommand.GetNumberRunningAgents: return NumberRunningAgents; default: throw new OrleansException(String.Format("PullingAgentManager does not support command {0}.", command)); } } finally { Log(ErrorCode.PersistentStreamPullingManager_15, String.Format("Done executing command {0}: commandSeqNumber = {1}, managerState = {2}, num running agents = {3}.", command, commandSeqNumber, managerState, NumberRunningAgents)); } } // Start and Stop commands are composite commands that take multiple turns. // Ee don't wnat them to interleave with other concurrent Start/Stop commands, as well as not with QueueDistributionChangeNotification. // Therefore, we serialize them all via the same nonReentrancyGuarantor. private Task QueueCommandForExecution(PersistentStreamProviderCommand command, int commandSeqNumber) { return nonReentrancyGuarantor.AddNext(() => { // skip execution of an older/previous command since already got a newer command. if (commandSeqNumber < latestCommandNumber) { Log(ErrorCode.PersistentStreamPullingManager_15, "Skipping execution of command number {0} since already received a later command (already have command number {1}).", commandSeqNumber, latestCommandNumber); return TaskDone.Done; } switch (command) { case PersistentStreamProviderCommand.StartAgents: return StartAgents(); case PersistentStreamProviderCommand.StopAgents: return StopAgents(); default: throw new OrleansException(String.Format("PullingAgentManager got unsupported command {0}", command)); } }); } private static string PrintQueues(ICollection<QueueId> myQueues) { return Utils.EnumerableToString(myQueues, q => q.ToString()); } // Just print our queue assignment periodicaly, for easy monitoring. private Task AsyncTimerCallback(object state) { Log(ErrorCode.PersistentStreamPullingManager_PeriodicPrint, "I am responsible for a total of {0} queues on stream provider {1}: {2}.", NumberRunningAgents, streamProviderName, PrintQueues(queuesToAgentsMap.Keys)); return TaskDone.Done; } private void Log(ErrorCode logCode, string format, params object[] args) { logger.LogWithoutBulkingAndTruncating(Severity.Info, logCode, format, args); } } }
/* * Cellstore API * * <h3>CellStore API</h3> * * OpenAPI spec version: vX.X.X * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Text; using System.Collections; using RestSharp; using Newtonsoft.Json.Linq; using CellStore.Client; namespace CellStore.Api { /// <summary> /// Represents a collection of functions to interact with the API endpoints /// </summary> public interface IDefaultApi : IApiAccessor { #region Synchronous Operations /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <remarks> /// /// </remarks> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Object</returns> Object GetDocs (); /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <remarks> /// /// </remarks> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>ApiResponse of Object</returns> ApiResponse<Object> GetDocsWithHttpInfo (); #endregion Synchronous Operations #region Asynchronous Operations /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <remarks> /// /// </remarks> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Task of Object</returns> System.Threading.Tasks.Task<Object> GetDocsAsync (); /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <remarks> /// /// </remarks> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Task of ApiResponse (Object)</returns> System.Threading.Tasks.Task<ApiResponse<Object>> GetDocsAsyncWithHttpInfo (); #endregion Asynchronous Operations } /// <summary> /// Represents a collection of functions to interact with the API endpoints /// </summary> public partial class DefaultApi : IDefaultApi { private CellStore.Client.ExceptionFactory _exceptionFactory = (name, request, response) => null; /// <summary> /// Initializes a new instance of the <see cref="DefaultApi"/> class. /// </summary> /// <returns></returns> public DefaultApi(String basePath) { this.Configuration = new Configuration(new ApiClient(basePath)); ExceptionFactory = CellStore.Client.Configuration.DefaultExceptionFactory; // ensure API client has configuration ready if (Configuration.ApiClient.Configuration == null) { this.Configuration.ApiClient.Configuration = this.Configuration; } } /// <summary> /// Initializes a new instance of the <see cref="DefaultApi"/> class /// using Configuration object /// </summary> /// <param name="configuration">An instance of Configuration</param> /// <returns></returns> public DefaultApi(Configuration configuration = null) { if (configuration == null) // use the default one in Configuration this.Configuration = Configuration.Default; else this.Configuration = configuration; ExceptionFactory = CellStore.Client.Configuration.DefaultExceptionFactory; // ensure API client has configuration ready if (Configuration.ApiClient.Configuration == null) { this.Configuration.ApiClient.Configuration = this.Configuration; } } /// <summary> /// Gets the base path of the API client. /// </summary> /// <value>The base path</value> public String GetBasePath() { return this.Configuration.ApiClient.RestClient.BaseUrl.ToString(); } /// <summary> /// Sets the base path of the API client. /// </summary> /// <value>The base path</value> [Obsolete("SetBasePath is deprecated, please do 'Configuration.ApiClient = new ApiClient(\"http://new-path\")' instead.")] public void SetBasePath(String basePath) { // do nothing } /// <summary> /// Gets or sets the configuration object /// </summary> /// <value>An instance of the Configuration</value> public Configuration Configuration {get; set;} /// <summary> /// Provides a factory method hook for the creation of exceptions. /// </summary> public CellStore.Client.ExceptionFactory ExceptionFactory { get { if (_exceptionFactory != null && _exceptionFactory.GetInvocationList().Length > 1) { throw new InvalidOperationException("Multicast delegate for ExceptionFactory is unsupported."); } return _exceptionFactory; } set { _exceptionFactory = value; } } /// <summary> /// Gets the default header. /// </summary> /// <returns>Dictionary of HTTP header</returns> [Obsolete("DefaultHeader is deprecated, please use Configuration.DefaultHeader instead.")] public Dictionary<String, String> DefaultHeader() { return this.Configuration.DefaultHeader; } /// <summary> /// Add default header. /// </summary> /// <param name="key">Header field name.</param> /// <param name="value">Header field value.</param> /// <returns></returns> [Obsolete("AddDefaultHeader is deprecated, please use Configuration.AddDefaultHeader instead.")] public void AddDefaultHeader(string key, string value) { this.Configuration.AddDefaultHeader(key, value); } /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Object</returns> public Object GetDocs () { ApiResponse<Object> localVarResponse = GetDocsWithHttpInfo(); return localVarResponse.Data; } /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>ApiResponse of Object</returns> public ApiResponse< Object > GetDocsWithHttpInfo () { var localVarPath = "/api/docs"; var localVarPathParams = new Dictionary<String, String>(); /* 28msec */ var localVarQueryParams = new Dictionary<String, List<String>>(); /* 28msec */ var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); /* 28msec */ /* // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); */ /* 28msec */ /* 28msec */ /* 28msec */ // build the HTTP request IRestRequest localVarRequest = (IRestRequest) Configuration.ApiClient.PrepareRequest(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); // execute the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarRequest); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetDocs", localVarRequest, localVarResponse); if (exception != null) throw exception; } String localResponseFormat = null; List<String> formatParameter = null; if (localVarQueryParams.ContainsKey("format")) { formatParameter = localVarQueryParams["format"]; if (formatParameter != null && formatParameter.Count == 1) localResponseFormat = formatParameter.ElementAt(0); } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (Object) Configuration.ApiClient.Deserialize(localVarResponse, typeof(Object), localResponseFormat)); } /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Task of Object</returns> public async System.Threading.Tasks.Task<Object> GetDocsAsync () { ApiResponse<Object> localVarResponse = await GetDocsAsyncWithHttpInfo(); return localVarResponse.Data; } /// <summary> /// Get the documentation of the CellStore API. /// </summary> /// <exception cref="CellStore.Client.ApiException">Thrown when fails to make API call</exception> /// <returns>Task of ApiResponse (Object)</returns> public async System.Threading.Tasks.Task<ApiResponse<Object>> GetDocsAsyncWithHttpInfo () { var localVarPath = "/api/docs"; var localVarPathParams = new Dictionary<String, String>(); /* 28msec */ var localVarQueryParams = new Dictionary<String, List<String>>(); /* 28msec */ var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); /* 28msec */ /* // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); */ /* 28msec */ /* 28msec */ /* 28msec */ // build the HTTP request IRestRequest localVarRequest = (IRestRequest) Configuration.ApiClient.PrepareRequest(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); // execute the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarRequest); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetDocs", localVarRequest, localVarResponse); if (exception != null) throw exception; } String localResponseFormat = null; List<String> formatParameter = null; if (localVarQueryParams.ContainsKey("format")) { formatParameter = localVarQueryParams["format"]; if (formatParameter != null && formatParameter.Count == 1) localResponseFormat = formatParameter.ElementAt(0); } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (Object) Configuration.ApiClient.Deserialize(localVarResponse, typeof(Object), localResponseFormat)); } } }
using System; using System.IO; using System.Linq; using System.Threading.Tasks; using System.Xml.Linq; using log4net; // ReSharper disable UnusedMethodReturnValue.Global namespace HSLProcessor { internal static class Importer { public enum ImportResult { Success, Failed } private static readonly ILog Log = LogManager.GetLogger(typeof(Importer)); /// <summary> /// Synchronous wrapper for importing CSV file /// </summary> /// <param name="file">CSV file to import</param> /// <returns>Result of the import</returns> public static ImportResult ImportCsv(FileInfo file) { return ImportCsvAsync(file).Result; } /// <summary> /// Import CSV file /// </summary> /// <param name="file">CSV file to import</param> /// <returns>Result of the import</returns> private static async Task<ImportResult> ImportCsvAsync(FileSystemInfo file) { try { Log.Info("Importing a CSV file " + file.FullName); var context = new HSLContext(); // Check to see the file exists if (!file.Exists) { Log.Error("No such file."); return ImportResult.Failed; } // Parse CSV var result = CsvParser.Load(new FileInfo(file.FullName)); // Begin import Console.Write("Importing"); foreach (var item in result) { Console.Write("."); var song = new Song {Title = item.Entry["title"]}; var artist = new Artist {Name = item.Entry["artist"]}; var source = new Source {Name = item.Entry["source"]}; var artistItem = Utils.GetOrAddArtist(artist, ref context).ArtistId; var sourceItem = Utils.GetOrAddSource(source, ref context).SourceId; song.ArtistId = artistItem; song.SourceId = sourceItem; // Add to the DB await context.Songs.AddAsync(song); } Console.WriteLine("Done"); Console.Write("Saving..."); // Save to DB await context.SaveChangesAsync(true); Console.WriteLine(" Done"); Log.Info("Import process completed."); return ImportResult.Success; } catch (Exception ex) { Log.Error("Failed importing CSV."); Log.Debug(ex.Message); Log.Debug(ex.StackTrace); if (ex.InnerException != null) Log.Debug(ex.InnerException.Message); return ImportResult.Failed; } } /// <summary> /// Synchronous wrapper for importing XML file /// </summary> /// <param name="file">XML file to import</param> /// <returns>Result of the import</returns> public static ImportResult ImportXml(FileInfo file) { return ImportXmlAsync(file).Result; } /// <summary> /// Import XML file /// </summary> /// <param name="file">XML file to import</param> /// <returns>Result of the import</returns> private static async Task<ImportResult> ImportXmlAsync(FileSystemInfo file) { try { Log.Info("Importing XML file " + file.Name); var context = new HSLContext(); // Load the file var xl = XElement.Load(file.FullName); Log.Info("Importing songs element..."); // Process each entry // ReSharper disable once PossibleNullReferenceException foreach (var item in xl.Element("songs")?.Elements("entry")) { var entry = new Song { TitleId = new Guid(item.Attribute("id")?.Value), Title = item.Element("title")?.Value, ArtistId = new Guid(item.Element("artist")?.Attribute("id")?.Value), SourceId = new Guid(item.Element("source")?.Attribute("id")?.Value) }; // Create and add artist entry var artist = new Artist(); if (item.Element("artist") != null) { artist.ArtistId = entry.ArtistId; artist.Name = item.Element("artist")?.Value; Utils.GetOrAddArtist(artist, ref context); } else { artist.Name = ""; Utils.GetOrAddArtist(artist, ref context); } // Create and add source entry var source = new Source(); if (item.Element("source") != null) { source.SourceId = entry.SourceId; source.Name = item.Element("source")?.Value; Utils.GetOrAddSource(source, ref context); } else { source.Name = ""; Utils.GetOrAddSource(source, ref context); } // Add to DB await context.AddAsync(entry); await context.SaveChangesAsync(); } Log.Info("Importing series element..."); // ReSharper disable once PossibleNullReferenceException foreach (var item in xl.Element("series")?.Elements("entry")) { var series = new Series { SeriesId = new Guid(item.Attribute("id")?.Value), Name = item.Element("name")?.Value }; Utils.GetOrAddSeries(series, ref context); // Add to DB await context.SaveChangesAsync(); } Log.Info("Importing sources element..."); // ReSharper disable once PossibleNullReferenceException foreach (var item in xl.Element("sources")?.Elements("entry")) { if (item.Element("series") == null) continue; var seriesId = new Guid(item.Element("series")?.Attribute("id")?.Value); var sourceId = new Guid(item.Attribute("id")?.Value); var sourceItem = context.Sources.Find(sourceId); if (sourceItem == null) continue; sourceItem.SeriesId = seriesId; context.Sources.Update(sourceItem); await context.SaveChangesAsync(); } // Save to DB context.LoadRelations(); await context.SaveChangesAsync(); Log.Info("Import completed."); return ImportResult.Success; } catch (Exception ex) { Log.Error("Failed importing XML."); Log.Debug(ex.Message); Log.Debug(ex.StackTrace); if (ex.InnerException != null) Log.Error(ex.InnerException.Message); return ImportResult.Failed; } } /// <summary> /// Synchronous wrapper for importing Series CSV file /// </summary> /// <param name="file">CSV file to import</param> /// <returns>Result of the import</returns> public static ImportResult ImportSourceSeriesCsv(FileInfo file) { return ImportSourceSeriesCsvAsync(file).Result; } /// <summary> /// Import reference CSV file /// </summary> /// <param name="file">CSV file to import</param> /// <returns>Result of the import</returns> private static async Task<ImportResult> ImportSourceSeriesCsvAsync(FileSystemInfo file) { try { Log.Info("Importing a CSV file " + file.FullName); var context = new HSLContext(); // Check to see the file exists if (!file.Exists) { Log.Error("No such file."); return ImportResult.Failed; } // Parse CSV var result = CsvParser.Load(new FileInfo(file.FullName), CsvParser.OperationMode.SeriesSource); // Begin import Console.Write("Importing"); var series = new Series(); foreach (var item in result) { Console.Write("."); var source = new Source {Name = item.Entry["source"]}; series.Name = item.Entry["series"]; var seriesItem = Utils.GetOrAddSeries(series, ref context).SeriesId; var sourceItem = Utils.GetOrAddSource(source, ref context).SourceId; var dbSourceList = context.Sources.Where(entry => entry.SourceId == sourceItem).ToList(); foreach (var sourceEntry in dbSourceList) sourceEntry.SeriesId = seriesItem; context.Sources.UpdateRange(dbSourceList); } Console.WriteLine("Done"); Console.Write("Saving..."); // Save to DB await context.SaveChangesAsync(true); Console.WriteLine(" Done"); Log.Info("Import process completed."); return ImportResult.Success; } catch (Exception ex) { Log.Error("Failed importing CSV."); Log.Debug(ex.Message); Log.Debug(ex.StackTrace); if (ex.InnerException != null) Log.Debug(ex.InnerException.Message); return ImportResult.Failed; } } } }
// // System.Web.Configuration.AuthConfig // // Authors: // Gonzalo Paniagua Javier ([email protected]) // // (C) 2002 Ximian, Inc (http://www.ximian.com) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Configuration; using System.Xml; namespace System.Web.Configuration { class AuthConfig { AuthenticationMode mode; string cookieName; string cookiePath; string loginUrl; FormsProtectionEnum protection; int timeout; FormsAuthPasswordFormat pwdFormat; Hashtable credentialUsers; bool has_parent; #if NET_1_1 bool requireSSL; bool slidingExpiration; #endif internal AuthConfig (object parent) { if (parent is AuthConfig) { has_parent = true; AuthConfig p = (AuthConfig) parent; mode = p.mode; cookieName = p.cookieName; cookiePath = p.cookiePath; loginUrl = p.loginUrl; protection = p.protection; timeout = p.timeout; pwdFormat = p.pwdFormat; #if NET_1_1 requireSSL = p.requireSSL; slidingExpiration = p.slidingExpiration; #endif credentialUsers = new Hashtable (p.CredentialUsers); } } internal void SetMode (string m) { if (m == null) { // we default to Forms authentication mode, MS defaults to Windows if (!has_parent) Mode = AuthenticationMode.Forms; return; } Mode = (AuthenticationMode) Enum.Parse (typeof (AuthenticationMode), m, true); } internal void SetProtection (string prot) { if (prot == null) { if (!has_parent) Protection = FormsProtectionEnum.All; return; } Protection = (FormsProtectionEnum) Enum.Parse (typeof (FormsProtectionEnum), prot, true); } internal void SetTimeout (string minutes) { if (minutes != null) { Timeout = Int32.Parse (minutes); return; } if (!has_parent) Timeout = 30; } internal void SetPasswordFormat (string pwdFormat) { if (pwdFormat == null) { if (!has_parent) PasswordFormat = FormsAuthPasswordFormat.Clear; return; } PasswordFormat = (FormsAuthPasswordFormat) Enum.Parse (typeof (FormsAuthPasswordFormat), pwdFormat, true); } internal AuthenticationMode Mode { get { return mode; } set { mode = value; } } internal string CookieName { get { if (cookieName == null) cookieName = ".ASPXAUTH"; return cookieName; } set { if (value == null) return; cookieName = value; } } internal string CookiePath { get { if (cookiePath == null) cookiePath = "/"; return cookiePath; } set { if (value == null) return; cookiePath = value; } } internal string LoginUrl { get { if (loginUrl == null) loginUrl = "login.aspx"; return loginUrl; } set { if (value == null) return; loginUrl = value; } } internal FormsProtectionEnum Protection { get { return protection; } set { protection = value; } } internal int Timeout { get { return timeout; } set { if (value <= 0) throw new ArgumentException ("Timeout must be > 0", "value"); timeout = value; } } internal FormsAuthPasswordFormat PasswordFormat { get { return pwdFormat; } set { pwdFormat = value; } } internal Hashtable CredentialUsers { get { if (credentialUsers == null) credentialUsers = new Hashtable (); return credentialUsers; } } #if NET_1_1 internal bool RequireSSL { get { return requireSSL; } set { requireSSL = value; } } internal bool SlidingExpiration { get { return slidingExpiration; } set { slidingExpiration = value; } } #endif } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Reflection.Metadata.Ecma335; using System.Runtime.CompilerServices; namespace System.Reflection.Metadata { partial class MetadataReader { internal const string ClrPrefix = "<CLR>"; internal static readonly byte[] WinRTPrefix = new[] { (byte)'<', (byte)'W', (byte)'i', (byte)'n', (byte)'R', (byte)'T', (byte)'>' }; #region Projection Tables // Maps names of projected types to projection information for each type. // Both arrays are of the same length and sorted by the type name. private static string[] _projectedTypeNames; private static ProjectionInfo[] s_projectionInfos; private struct ProjectionInfo { public readonly string WinRTNamespace; public readonly StringHandle.VirtualIndex ClrNamespace; public readonly StringHandle.VirtualIndex ClrName; public readonly AssemblyReferenceHandle.VirtualIndex AssemblyRef; public readonly TypeDefTreatment Treatment; public readonly bool IsIDisposable; public ProjectionInfo( string winRtNamespace, StringHandle.VirtualIndex clrNamespace, StringHandle.VirtualIndex clrName, AssemblyReferenceHandle.VirtualIndex clrAssembly, TypeDefTreatment treatment = TypeDefTreatment.RedirectedToClrType, bool isIDisposable = false) { this.WinRTNamespace = winRtNamespace; this.ClrNamespace = clrNamespace; this.ClrName = clrName; this.AssemblyRef = clrAssembly; this.Treatment = treatment; this.IsIDisposable = isIDisposable; } } private TypeDefTreatment GetWellKnownTypeDefinitionTreatment(TypeDefinitionHandle typeDef) { InitializeProjectedTypes(); StringHandle name = TypeDefTable.GetName(typeDef); int index = StringStream.BinarySearchRaw(_projectedTypeNames, name); if (index < 0) { return TypeDefTreatment.None; } StringHandle namespaceName = TypeDefTable.GetNamespaceString(typeDef); if (StringStream.EqualsRaw(namespaceName, StringStream.GetVirtualValue(s_projectionInfos[index].ClrNamespace))) { return s_projectionInfos[index].Treatment; } // TODO: we can avoid this comparison if info.DotNetNamespace == info.WinRtNamespace if (StringStream.EqualsRaw(namespaceName, s_projectionInfos[index].WinRTNamespace)) { return s_projectionInfos[index].Treatment | TypeDefTreatment.MarkInternalFlag; } return TypeDefTreatment.None; } private int GetProjectionIndexForTypeReference(TypeReferenceHandle typeRef, out bool isIDisposable) { InitializeProjectedTypes(); int index = StringStream.BinarySearchRaw(_projectedTypeNames, TypeRefTable.GetName(typeRef)); if (index >= 0 && StringStream.EqualsRaw(TypeRefTable.GetNamespace(typeRef), s_projectionInfos[index].WinRTNamespace)) { isIDisposable = s_projectionInfos[index].IsIDisposable; return index; } isIDisposable = false; return -1; } internal static AssemblyReferenceHandle GetProjectedAssemblyRef(int projectionIndex) { Debug.Assert(s_projectionInfos != null && projectionIndex >= 0 && projectionIndex < s_projectionInfos.Length); return AssemblyReferenceHandle.FromVirtualIndex(s_projectionInfos[projectionIndex].AssemblyRef); } internal static StringHandle GetProjectedName(int projectionIndex) { Debug.Assert(s_projectionInfos != null && projectionIndex >= 0 && projectionIndex < s_projectionInfos.Length); return StringHandle.FromVirtualIndex(s_projectionInfos[projectionIndex].ClrName); } internal static StringHandle GetProjectedNamespace(int projectionIndex) { Debug.Assert(s_projectionInfos != null && projectionIndex >= 0 && projectionIndex < s_projectionInfos.Length); return StringHandle.FromVirtualIndex(s_projectionInfos[projectionIndex].ClrNamespace); } private static void InitializeProjectedTypes() { if (_projectedTypeNames == null || s_projectionInfos == null) { var systemRuntimeWindowsRuntime = AssemblyReferenceHandle.VirtualIndex.System_Runtime_WindowsRuntime; var systemRuntime = AssemblyReferenceHandle.VirtualIndex.System_Runtime; var systemObjectModel = AssemblyReferenceHandle.VirtualIndex.System_ObjectModel; var systemRuntimeWindowsUiXaml = AssemblyReferenceHandle.VirtualIndex.System_Runtime_WindowsRuntime_UI_Xaml; var systemRuntimeInterop = AssemblyReferenceHandle.VirtualIndex.System_Runtime_InteropServices_WindowsRuntime; var systemNumericsVectors = AssemblyReferenceHandle.VirtualIndex.System_Numerics_Vectors; // sorted by name var keys = new string[50]; var values = new ProjectionInfo[50]; int k = 0, v = 0; // WARNING: Keys must be sorted by name and must only contain ASCII characters. WinRTNamespace must also be ASCII only. keys[k++] = "AttributeTargets"; values[v++] = new ProjectionInfo("Windows.Foundation.Metadata", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.AttributeTargets, systemRuntime); keys[k++] = "AttributeUsageAttribute"; values[v++] = new ProjectionInfo("Windows.Foundation.Metadata", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.AttributeUsageAttribute, systemRuntime, treatment: TypeDefTreatment.RedirectedToClrAttribute); keys[k++] = "Color"; values[v++] = new ProjectionInfo("Windows.UI", StringHandle.VirtualIndex.Windows_UI, StringHandle.VirtualIndex.Color, systemRuntimeWindowsRuntime); keys[k++] = "CornerRadius"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.CornerRadius, systemRuntimeWindowsUiXaml); keys[k++] = "DateTime"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.DateTimeOffset, systemRuntime); keys[k++] = "Duration"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.Duration, systemRuntimeWindowsUiXaml); keys[k++] = "DurationType"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.DurationType, systemRuntimeWindowsUiXaml); keys[k++] = "EventHandler`1"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.EventHandler1, systemRuntime); keys[k++] = "EventRegistrationToken"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System_Runtime_InteropServices_WindowsRuntime, StringHandle.VirtualIndex.EventRegistrationToken, systemRuntimeInterop); keys[k++] = "GeneratorPosition"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Controls.Primitives", StringHandle.VirtualIndex.Windows_UI_Xaml_Controls_Primitives, StringHandle.VirtualIndex.GeneratorPosition, systemRuntimeWindowsUiXaml); keys[k++] = "GridLength"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.GridLength, systemRuntimeWindowsUiXaml); keys[k++] = "GridUnitType"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.GridUnitType, systemRuntimeWindowsUiXaml); keys[k++] = "HResult"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.Exception, systemRuntime); keys[k++] = "IBindableIterable"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections, StringHandle.VirtualIndex.IEnumerable, systemRuntime); keys[k++] = "IBindableVector"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections, StringHandle.VirtualIndex.IList, systemRuntime); keys[k++] = "IClosable"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.IDisposable, systemRuntime, isIDisposable: true); keys[k++] = "ICommand"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Input", StringHandle.VirtualIndex.System_Windows_Input, StringHandle.VirtualIndex.ICommand, systemObjectModel); keys[k++] = "IIterable`1"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.IEnumerable1, systemRuntime); keys[k++] = "IKeyValuePair`2"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.KeyValuePair2, systemRuntime); keys[k++] = "IMapView`2"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.IReadOnlyDictionary2, systemRuntime); keys[k++] = "IMap`2"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.IDictionary2, systemRuntime); keys[k++] = "INotifyCollectionChanged"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections_Specialized, StringHandle.VirtualIndex.INotifyCollectionChanged, systemObjectModel); keys[k++] = "INotifyPropertyChanged"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Data", StringHandle.VirtualIndex.System_ComponentModel, StringHandle.VirtualIndex.INotifyPropertyChanged, systemObjectModel); keys[k++] = "IReference`1"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.Nullable1, systemRuntime); keys[k++] = "IVectorView`1"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.IReadOnlyList1, systemRuntime); keys[k++] = "IVector`1"; values[v++] = new ProjectionInfo("Windows.Foundation.Collections", StringHandle.VirtualIndex.System_Collections_Generic, StringHandle.VirtualIndex.IList1, systemRuntime); keys[k++] = "KeyTime"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Media.Animation", StringHandle.VirtualIndex.Windows_UI_Xaml_Media_Animation, StringHandle.VirtualIndex.KeyTime, systemRuntimeWindowsUiXaml); keys[k++] = "Matrix"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Media", StringHandle.VirtualIndex.Windows_UI_Xaml_Media, StringHandle.VirtualIndex.Matrix, systemRuntimeWindowsUiXaml); keys[k++] = "Matrix3D"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Media.Media3D", StringHandle.VirtualIndex.Windows_UI_Xaml_Media_Media3D, StringHandle.VirtualIndex.Matrix3D, systemRuntimeWindowsUiXaml); keys[k++] = "Matrix3x2"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Matrix3x2, systemNumericsVectors); keys[k++] = "Matrix4x4"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Matrix4x4, systemNumericsVectors); keys[k++] = "NotifyCollectionChangedAction"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections_Specialized, StringHandle.VirtualIndex.NotifyCollectionChangedAction, systemObjectModel); keys[k++] = "NotifyCollectionChangedEventArgs"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections_Specialized, StringHandle.VirtualIndex.NotifyCollectionChangedEventArgs, systemObjectModel); keys[k++] = "NotifyCollectionChangedEventHandler"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System_Collections_Specialized, StringHandle.VirtualIndex.NotifyCollectionChangedEventHandler, systemObjectModel); keys[k++] = "Plane"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Plane, systemNumericsVectors); keys[k++] = "Point"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.Windows_Foundation, StringHandle.VirtualIndex.Point, systemRuntimeWindowsRuntime); keys[k++] = "PropertyChangedEventArgs"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Data", StringHandle.VirtualIndex.System_ComponentModel, StringHandle.VirtualIndex.PropertyChangedEventArgs, systemObjectModel); keys[k++] = "PropertyChangedEventHandler"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Data", StringHandle.VirtualIndex.System_ComponentModel, StringHandle.VirtualIndex.PropertyChangedEventHandler, systemObjectModel); keys[k++] = "Quaternion"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Quaternion, systemNumericsVectors); keys[k++] = "Rect"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.Windows_Foundation, StringHandle.VirtualIndex.Rect, systemRuntimeWindowsRuntime); keys[k++] = "RepeatBehavior"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Media.Animation", StringHandle.VirtualIndex.Windows_UI_Xaml_Media_Animation, StringHandle.VirtualIndex.RepeatBehavior, systemRuntimeWindowsUiXaml); keys[k++] = "RepeatBehaviorType"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Media.Animation", StringHandle.VirtualIndex.Windows_UI_Xaml_Media_Animation, StringHandle.VirtualIndex.RepeatBehaviorType, systemRuntimeWindowsUiXaml); keys[k++] = "Size"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.Windows_Foundation, StringHandle.VirtualIndex.Size, systemRuntimeWindowsRuntime); keys[k++] = "Thickness"; values[v++] = new ProjectionInfo("Windows.UI.Xaml", StringHandle.VirtualIndex.Windows_UI_Xaml, StringHandle.VirtualIndex.Thickness, systemRuntimeWindowsUiXaml); keys[k++] = "TimeSpan"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.TimeSpan, systemRuntime); keys[k++] = "TypeName"; values[v++] = new ProjectionInfo("Windows.UI.Xaml.Interop", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.Type, systemRuntime); keys[k++] = "Uri"; values[v++] = new ProjectionInfo("Windows.Foundation", StringHandle.VirtualIndex.System, StringHandle.VirtualIndex.Uri, systemRuntime); keys[k++] = "Vector2"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Vector2, systemNumericsVectors); keys[k++] = "Vector3"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Vector3, systemNumericsVectors); keys[k++] = "Vector4"; values[v++] = new ProjectionInfo("Windows.Foundation.Numerics", StringHandle.VirtualIndex.System_Numerics, StringHandle.VirtualIndex.Vector4, systemNumericsVectors); Debug.Assert(k == keys.Length && v == keys.Length && k == v); AssertSorted(keys); _projectedTypeNames = keys; s_projectionInfos = values; } } [Conditional("DEBUG")] private static void AssertSorted(string[] keys) { for (int i = 0; i < keys.Length - 1; i++) { Debug.Assert(String.CompareOrdinal(keys[i], keys[i + 1]) < 0); } } // test only internal static string[] GetProjectedTypeNames() { InitializeProjectedTypes(); return _projectedTypeNames; } #endregion private static uint TreatmentAndRowId(byte treatment, uint rowId) { return ((uint)treatment << TokenTypeIds.RowIdBitCount) | rowId; } #region TypeDef [MethodImplAttribute(MethodImplOptions.NoInlining)] internal uint CalculateTypeDefTreatmentAndRowId(TypeDefinitionHandle handle) { Debug.Assert(_metadataKind != MetadataKind.Ecma335); TypeDefTreatment treatment; TypeAttributes flags = TypeDefTable.GetFlags(handle); Handle extends = TypeDefTable.GetExtends(handle); if ((flags & TypeAttributes.WindowsRuntime) != 0) { if (_metadataKind == MetadataKind.WindowsMetadata) { treatment = GetWellKnownTypeDefinitionTreatment(handle); if (treatment != TypeDefTreatment.None) { return TreatmentAndRowId((byte)treatment, handle.RowId); } // Is this an attribute? if (extends.Kind == HandleKind.TypeReference && IsSystemAttribute((TypeReferenceHandle)extends)) { treatment = TypeDefTreatment.NormalAttribute; } else { treatment = TypeDefTreatment.NormalNonAttribute; } } else if (_metadataKind == MetadataKind.ManagedWindowsMetadata && NeedsWinRTPrefix(flags, extends)) { // WinMDExp emits two versions of RuntimeClasses and Enums: // // public class Foo {} // the WinRT reference class // internal class <CLR>Foo {} // the implementation class that we want WinRT consumers to ignore // // The adapter's job is to undo WinMDExp's transformations. I.e. turn the above into: // // internal class <WinRT>Foo {} // the WinRT reference class that we want CLR consumers to ignore // public class Foo {} // the implementation class // // We only add the <WinRT> prefix here since the WinRT view is the only view that is marked WindowsRuntime // De-mangling the CLR name is done below. // tomat: The CLR adapter implements a back-compat quirk: Enums exported with an older WinMDExp have only one version // not marked with tdSpecialName. These enums should *not* be mangled and flipped to private. // We don't implement this flag since the WinMDs producted by the older WinMDExp are not used in the wild. treatment = TypeDefTreatment.PrefixWinRTName; } else { treatment = TypeDefTreatment.None; } // Scan through Custom Attributes on type, looking for interesting bits. We only // need to do this for RuntimeClasses if ((treatment == TypeDefTreatment.PrefixWinRTName || treatment == TypeDefTreatment.NormalNonAttribute)) { if ((flags & TypeAttributes.Interface) == 0 && HasAttribute(handle, "Windows.UI.Xaml", "TreatAsAbstractComposableClassAttribute")) { treatment |= TypeDefTreatment.MarkAbstractFlag; } } } else if (_metadataKind == MetadataKind.ManagedWindowsMetadata && IsClrImplementationType(handle)) { // <CLR> implementation classes are not marked WindowsRuntime, but still need to be modified // by the adapter. treatment = TypeDefTreatment.UnmangleWinRTName; } else { treatment = TypeDefTreatment.None; } return TreatmentAndRowId((byte)treatment, handle.RowId); } private bool IsClrImplementationType(TypeDefinitionHandle typeDef) { var attrs = TypeDefTable.GetFlags(typeDef); if ((attrs & (TypeAttributes.VisibilityMask | TypeAttributes.SpecialName)) != TypeAttributes.SpecialName) { return false; } return StringStream.StartsWithRaw(TypeDefTable.GetName(typeDef), ClrPrefix); } #endregion #region TypeRef internal uint CalculateTypeRefTreatmentAndRowId(TypeReferenceHandle handle) { Debug.Assert(_metadataKind != MetadataKind.Ecma335); bool isIDisposable; int projectionIndex = GetProjectionIndexForTypeReference(handle, out isIDisposable); if (projectionIndex >= 0) { return TreatmentAndRowId((byte)TypeRefTreatment.UseProjectionInfo, (uint)projectionIndex); } else { return TreatmentAndRowId((byte)GetSpecialTypeRefTreatment(handle), handle.RowId); } } private TypeRefTreatment GetSpecialTypeRefTreatment(TypeReferenceHandle handle) { if (StringStream.EqualsRaw(TypeRefTable.GetNamespace(handle), "System")) { StringHandle name = TypeRefTable.GetName(handle); if (StringStream.EqualsRaw(name, "MulticastDelegate")) { return TypeRefTreatment.SystemDelegate; } if (StringStream.EqualsRaw(name, "Attribute")) { return TypeRefTreatment.SystemAttribute; } } return TypeRefTreatment.None; } private bool IsSystemAttribute(TypeReferenceHandle handle) { return StringStream.EqualsRaw(TypeRefTable.GetNamespace(handle), "System") && StringStream.EqualsRaw(TypeRefTable.GetName(handle), "Attribute"); } private bool IsSystemEnum(TypeReferenceHandle handle) { return StringStream.EqualsRaw(TypeRefTable.GetNamespace(handle), "System") && StringStream.EqualsRaw(TypeRefTable.GetName(handle), "Enum"); } private bool NeedsWinRTPrefix(TypeAttributes flags, Handle extends) { if ((flags & (TypeAttributes.VisibilityMask | TypeAttributes.Interface)) != TypeAttributes.Public) { return false; } if (extends.Kind != HandleKind.TypeReference) { return false; } // Check if the type is a delegate, struct, or attribute TypeReferenceHandle extendsRefHandle = (TypeReferenceHandle)extends; if (StringStream.EqualsRaw(TypeRefTable.GetNamespace(extendsRefHandle), "System")) { StringHandle nameHandle = TypeRefTable.GetName(extendsRefHandle); if (StringStream.EqualsRaw(nameHandle, "MulticastDelegate") || StringStream.EqualsRaw(nameHandle, "ValueType") || StringStream.EqualsRaw(nameHandle, "Attribute")) { return false; } } return true; } #endregion #region MethodDef private uint CalculateMethodDefTreatmentAndRowId(MethodDefinitionHandle methodDef) { MethodDefTreatment treatment = MethodDefTreatment.Implementation; TypeDefinitionHandle parentTypeDef = GetDeclaringType(methodDef); TypeAttributes parentFlags = TypeDefTable.GetFlags(parentTypeDef); if ((parentFlags & TypeAttributes.WindowsRuntime) != 0) { if (IsClrImplementationType(parentTypeDef)) { treatment = MethodDefTreatment.Implementation; } else if (parentFlags.IsNested()) { treatment = MethodDefTreatment.Implementation; } else if ((parentFlags & TypeAttributes.Interface) != 0) { treatment = MethodDefTreatment.InterfaceMethod; } else if (_metadataKind == MetadataKind.ManagedWindowsMetadata && (parentFlags & TypeAttributes.Public) == 0) { treatment = MethodDefTreatment.Implementation; } else { treatment = MethodDefTreatment.Other; var parentBaseType = TypeDefTable.GetExtends(parentTypeDef); if (parentBaseType.Kind == HandleKind.TypeReference) { switch (GetSpecialTypeRefTreatment((TypeReferenceHandle)parentBaseType)) { case TypeRefTreatment.SystemAttribute: treatment = MethodDefTreatment.AttributeMethod; break; case TypeRefTreatment.SystemDelegate: treatment = MethodDefTreatment.DelegateMethod | MethodDefTreatment.MarkPublicFlag; break; } } } } if (treatment == MethodDefTreatment.Other) { // we want to hide the method if it implements // only redirected interfaces // We also want to check if the methodImpl is IClosable.Close, // so we can change the name bool seenRedirectedInterfaces = false; bool seenNonRedirectedInterfaces = false; bool isIClosableClose = false; foreach (var methodImplHandle in new MethodImplementationHandleCollection(this, parentTypeDef)) { MethodImplementation methodImpl = GetMethodImplementation(methodImplHandle); if (methodImpl.MethodBody == methodDef) { Handle declaration = methodImpl.MethodDeclaration; // See if this MethodImpl implements a redirected interface // In WinMD, MethodImpl will always use MemberRef and TypeRefs to refer to redirected interfaces, // even if they are in the same module. if (declaration.Kind == HandleKind.MemberReference && ImplementsRedirectedInterface((MemberReferenceHandle)declaration, out isIClosableClose)) { seenRedirectedInterfaces = true; if (isIClosableClose) { // This method implements IClosable.Close // Let's rename to IDisposable later // Once we know this implements IClosable.Close, we are done // looking break; } } else { // Now we know this implements a non-redirected interface // But we need to keep looking, just in case we got a methodimpl that // implements the IClosable.Close method and needs to be renamed seenNonRedirectedInterfaces = true; } } } if (isIClosableClose) { treatment = MethodDefTreatment.DisposeMethod; } else if (seenRedirectedInterfaces && !seenNonRedirectedInterfaces) { // Only hide if all the interfaces implemented are redirected treatment = MethodDefTreatment.HiddenInterfaceImplementation; } } // If treatment is other, then this is a non-managed WinRT runtime class definition // Find out about various bits that we apply via attributes and name parsing if (treatment == MethodDefTreatment.Other) { treatment |= GetMethodTreatmentFromCustomAttributes(methodDef); } return TreatmentAndRowId((byte)treatment, methodDef.RowId); } private MethodDefTreatment GetMethodTreatmentFromCustomAttributes(MethodDefinitionHandle methodDef) { MethodDefTreatment treatment = 0; foreach (var caHandle in GetCustomAttributes(methodDef)) { StringHandle namespaceHandle, nameHandle; if (!GetAttributeTypeNameRaw(caHandle, out namespaceHandle, out nameHandle)) { continue; } Debug.Assert(!namespaceHandle.IsVirtual && !nameHandle.IsVirtual); if (StringStream.EqualsRaw(namespaceHandle, "Windows.UI.Xaml")) { if (StringStream.EqualsRaw(nameHandle, "TreatAsPublicMethodAttribute")) { treatment |= MethodDefTreatment.MarkPublicFlag; } if (StringStream.EqualsRaw(nameHandle, "TreatAsAbstractMethodAttribute")) { treatment |= MethodDefTreatment.MarkAbstractFlag; } } } return treatment; } #endregion #region FieldDef /// <summary> /// The backing field of a WinRT enumeration type is not public although the backing fields /// of managed enumerations are. To allow managed languages to directly access this field, /// it is made public by the metadata adapter. /// </summary> private uint CalculateFieldDefTreatmentAndRowId(FieldDefinitionHandle handle) { var flags = FieldTable.GetFlags(handle); FieldDefTreatment treatment = FieldDefTreatment.None; if ((flags & FieldAttributes.RTSpecialName) != 0 && StringStream.EqualsRaw(FieldTable.GetName(handle), "value__")) { TypeDefinitionHandle typeDef = GetDeclaringType(handle); Handle baseTypeHandle = TypeDefTable.GetExtends(typeDef); if (baseTypeHandle.Kind == HandleKind.TypeReference) { var typeRef = (TypeReferenceHandle)baseTypeHandle; if (StringStream.EqualsRaw(TypeRefTable.GetName(typeRef), "Enum") && StringStream.EqualsRaw(TypeRefTable.GetNamespace(typeRef), "System")) { treatment = FieldDefTreatment.EnumValue; } } } return TreatmentAndRowId((byte)treatment, handle.RowId); } #endregion #region MemberRef private uint CalculateMemberRefTreatmentAndRowId(MemberReferenceHandle handle) { MemberRefTreatment treatment; // We need to rename the MemberRef for IClosable.Close as well // so that the MethodImpl for the Dispose method can be correctly shown // as IDisposable.Dispose instead of IDisposable.Close bool isIDisposable; if (ImplementsRedirectedInterface(handle, out isIDisposable) && isIDisposable) { treatment = MemberRefTreatment.Dispose; } else { treatment = MemberRefTreatment.None; } return TreatmentAndRowId((byte)treatment, handle.RowId); } /// <summary> /// We want to know if a given method implements a redirected interface. /// For example, if we are given the method RemoveAt on a class "A" /// which implements the IVector interface (which is redirected /// to IList in .NET) then this method would return true. The most /// likely reason why we would want to know this is that we wish to hide /// (mark private) all methods which implement methods on a redirected /// interface. /// </summary> /// <param name="memberRef">The declaration token for the method</param> /// <param name="isIDisposable"> /// Returns true if the redirected interface is <see cref="IDisposable"/>. /// </param> /// <returns>True if the method implements a method on a redirected interface. /// False otherwise.</returns> private bool ImplementsRedirectedInterface(MemberReferenceHandle memberRef, out bool isIDisposable) { isIDisposable = false; Handle parent = MemberRefTable.GetClass(memberRef); TypeReferenceHandle typeRef; if (parent.Kind == HandleKind.TypeReference) { typeRef = (TypeReferenceHandle)parent; } else if (parent.Kind == HandleKind.TypeSpecification) { BlobHandle blob = TypeSpecTable.GetSignature((TypeSpecificationHandle)parent); BlobReader sig = BlobStream.GetBlobReader(blob); if (sig.Length < 2 || sig.ReadByte() != (byte)CorElementType.ELEMENT_TYPE_GENERICINST || sig.ReadByte() != (byte)CorElementType.ELEMENT_TYPE_CLASS) { return false; } Handle token = sig.ReadTypeHandle(); if (token.Kind != HandleKind.TypeReference) { return false; } typeRef = (TypeReferenceHandle)token; } else { return false; } return GetProjectionIndexForTypeReference(typeRef, out isIDisposable) >= 0; } #endregion #region AssemblyRef private uint FindMscorlibAssemblyRefNoProjection() { for (uint i = 1; i <= AssemblyRefTable.NumberOfNonVirtualRows; i++) { if (StringStream.EqualsRaw(AssemblyRefTable.GetName(i), "mscorlib")) { return i; } } throw new BadImageFormatException(MetadataResources.WinMDMissingMscorlibRef); } #endregion #region CustomAttribute internal CustomAttributeValueTreatment CalculateCustomAttributeValueTreatment(CustomAttributeHandle handle) { Debug.Assert(_metadataKind != MetadataKind.Ecma335); var parent = CustomAttributeTable.GetParent(handle); // Check for Windows.Foundation.Metadata.AttributeUsageAttribute. // WinMD rules: // - The attribute is only applicable on TypeDefs. // - Constructor must be a MemberRef with TypeRef. if (!IsWindowsAttributeUsageAttribute(parent, handle)) { return CustomAttributeValueTreatment.None; } var targetTypeDef = (TypeDefinitionHandle)parent; if (StringStream.EqualsRaw(TypeDefTable.GetNamespaceString(targetTypeDef), "Windows.Foundation.Metadata")) { if (StringStream.EqualsRaw(TypeDefTable.GetName(targetTypeDef), "VersionAttribute")) { return CustomAttributeValueTreatment.AttributeUsageVersionAttribute; } if (StringStream.EqualsRaw(TypeDefTable.GetName(targetTypeDef), "DeprecatedAttribute")) { return CustomAttributeValueTreatment.AttributeUsageDeprecatedAttribute; } } bool allowMultiple = HasAttribute(targetTypeDef, "Windows.Foundation.Metadata", "AllowMultipleAttribute"); return allowMultiple ? CustomAttributeValueTreatment.AttributeUsageAllowMultiple : CustomAttributeValueTreatment.AttributeUsageAllowSingle; } private bool IsWindowsAttributeUsageAttribute(Handle targetType, CustomAttributeHandle attributeHandle) { // Check for Windows.Foundation.Metadata.AttributeUsageAttribute. // WinMD rules: // - The attribute is only applicable on TypeDefs. // - Constructor must be a MemberRef with TypeRef. if (targetType.Kind != HandleKind.TypeDefinition) { return false; } var attributeCtor = CustomAttributeTable.GetConstructor(attributeHandle); if (attributeCtor.Kind != HandleKind.MemberReference) { return false; } var attributeType = MemberRefTable.GetClass((MemberReferenceHandle)attributeCtor); if (attributeType.Kind != HandleKind.TypeReference) { return false; } var attributeTypeRef = (TypeReferenceHandle)attributeType; return StringStream.EqualsRaw(TypeRefTable.GetName(attributeTypeRef), "AttributeUsageAttribute") && StringStream.EqualsRaw(TypeRefTable.GetNamespace(attributeTypeRef), "Windows.Foundation.Metadata"); } private bool HasAttribute(Handle token, string asciiNamespaceName, string asciiTypeName) { foreach (var caHandle in GetCustomAttributes(token)) { StringHandle namespaceName, typeName; if (GetAttributeTypeNameRaw(caHandle, out namespaceName, out typeName) && StringStream.EqualsRaw(typeName, asciiTypeName) && StringStream.EqualsRaw(namespaceName, asciiNamespaceName)) { return true; } } return false; } private bool GetAttributeTypeNameRaw(CustomAttributeHandle caHandle, out StringHandle namespaceName, out StringHandle typeName) { namespaceName = typeName = default(StringHandle); Handle typeDefOrRef = GetAttributeTypeRaw(caHandle); if (typeDefOrRef.IsNil) { return false; } if (typeDefOrRef.Kind == HandleKind.TypeReference) { TypeReferenceHandle typeRef = (TypeReferenceHandle)typeDefOrRef; var resolutionScope = TypeRefTable.GetResolutionScope(typeRef); if (!resolutionScope.IsNil && resolutionScope.Kind == HandleKind.TypeReference) { // we don't need to handle nested types return false; } // other resolution scopes don't affect full name typeName = TypeRefTable.GetName(typeRef); namespaceName = TypeRefTable.GetNamespace(typeRef); } else if (typeDefOrRef.Kind == HandleKind.TypeDefinition) { TypeDefinitionHandle typeDef = (TypeDefinitionHandle)typeDefOrRef; if (TypeDefTable.GetFlags(typeDef).IsNested()) { // we don't need to handle nested types return false; } typeName = TypeDefTable.GetName(typeDef); namespaceName = TypeDefTable.GetNamespaceString(typeDef); } else { // invalid metadata return false; } return true; } /// <summary> /// Returns the type definition or reference handle of the attribute type. /// </summary> /// <returns><see cref="TypeDefinitionHandle"/> or <see cref="TypeReferenceHandle"/> or nil token if the metadata is invalid and the type can't be determined.</returns> private Handle GetAttributeTypeRaw(CustomAttributeHandle handle) { var ctor = CustomAttributeTable.GetConstructor(handle); if (ctor.Kind == HandleKind.MethodDefinition) { return GetDeclaringType((MethodDefinitionHandle)ctor); } if (ctor.Kind == HandleKind.MemberReference) { // In general the parent can be MethodDef, ModuleRef, TypeDef, TypeRef, or TypeSpec. // For attributes only TypeDef and TypeRef are applicable. Handle typeDefOrRef = MemberRefTable.GetClass((MemberReferenceHandle)ctor); HandleKind handleType = typeDefOrRef.Kind; if (handleType == HandleKind.TypeReference || handleType == HandleKind.TypeDefinition) { return typeDefOrRef; } } return default(Handle); } #endregion } }
using System.Linq; using System.Threading.Tasks; using NJsonSchema; using Xunit; namespace NSwag.Core.Tests.Serialization { public class RequestBodySerializationTests { [Fact] public async Task When_request_body_is_added_then_serialized_correctly_in_Swagger() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.Swagger2); document = await OpenApiDocument.FromJsonAsync(json); // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.Equal("foo", requestBody.Name); } [Fact] public async Task When_request_body_is_added_then_serialized_correctly_in_OpenApi() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.Equal("foo", requestBody.Name); } [Fact] public async Task When_body_parameter_is_changed_then_request_body_IsRequired_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); parameter.IsRequired = true; // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.True(requestBody.IsRequired); Assert.True(parameter.IsRequired); } [Fact] public async Task When_body_parameter_is_changed_then_request_body_Name_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); parameter.Name = parameter.Name + "123"; // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.Equal("foo123", requestBody.Name); } [Fact] public async Task When_body_parameter_is_changed_then_request_body_Schema_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); parameter.Schema = new JsonSchema { Title = "blub" }; // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.Equal("blub", requestBody.Content["application/json"].Schema.Title); } [Fact] public async Task When_body_parameter_is_changed_then_request_body_Description_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); parameter.Description = parameter.Description + "123"; // Assert var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; Assert.Equal("bar123", requestBody.Description); } [Fact] public async Task When_request_body_is_changed_then_body_parameter_Name_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; requestBody.Name = requestBody.Name + "123"; // Assert var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); Assert.Equal("foo123", parameter.Name); } [Fact] public async Task When_request_body_is_changed_then_body_parameter_IsRequired_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; requestBody.IsRequired = true; // Assert var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); Assert.True(parameter.IsRequired); Assert.True(requestBody.IsRequired); } [Fact] public async Task When_request_body_is_changed_then_body_parameter_Content_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; requestBody.Content["application/json"] = new OpenApiMediaType { Schema = new JsonSchema { Title = "blub" } }; // Assert var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); Assert.Equal("blub", parameter.Schema.Title); } [Fact] public async Task When_request_body_is_changed_then_body_parameter_Description_is_updated() { // Arrange var document = CreateDocument(); // Act var json = document.ToJson(SchemaType.OpenApi3); document = await OpenApiDocument.FromJsonAsync(json); var requestBody = document.Paths["/baz"][OpenApiOperationMethod.Get].RequestBody; requestBody.Description = requestBody.Description + "123"; // Assert var parameter = document.Paths["/baz"][OpenApiOperationMethod.Get].Parameters .Single(p => p.Kind == OpenApiParameterKind.Body); Assert.Equal("bar123", parameter.Description); } private static OpenApiDocument CreateDocument() { var schema = new JsonSchema { Type = JsonObjectType.String }; var document = new OpenApiDocument { Paths = { { "/baz", new OpenApiPathItem { { OpenApiOperationMethod.Get, new OpenApiOperation { RequestBody = new OpenApiRequestBody { Name = "foo", Description = "bar", Content = { { "application/json", new OpenApiMediaType { Schema = new JsonSchema { Reference = schema } } } } } } } } } }, Definitions = { { "Abc", schema } } }; return document; } } }
using System; using System.IO; using CatLib._3rd.SharpCompress.Compressors.LZMA.LZ; using CatLib._3rd.SharpCompress.Compressors.LZMA.RangeCoder; namespace CatLib._3rd.SharpCompress.Compressors.LZMA { internal class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream { private class LenDecoder { private BitDecoder m_Choice = new BitDecoder(); private BitDecoder m_Choice2 = new BitDecoder(); private readonly BitTreeDecoder[] m_LowCoder = new BitTreeDecoder[Base.kNumPosStatesMax]; private readonly BitTreeDecoder[] m_MidCoder = new BitTreeDecoder[Base.kNumPosStatesMax]; private BitTreeDecoder m_HighCoder = new BitTreeDecoder(Base.kNumHighLenBits); private uint m_NumPosStates; public void Create(uint numPosStates) { for (uint posState = m_NumPosStates; posState < numPosStates; posState++) { m_LowCoder[posState] = new BitTreeDecoder(Base.kNumLowLenBits); m_MidCoder[posState] = new BitTreeDecoder(Base.kNumMidLenBits); } m_NumPosStates = numPosStates; } public void Init() { m_Choice.Init(); for (uint posState = 0; posState < m_NumPosStates; posState++) { m_LowCoder[posState].Init(); m_MidCoder[posState].Init(); } m_Choice2.Init(); m_HighCoder.Init(); } public uint Decode(RangeCoder.Decoder rangeDecoder, uint posState) { if (m_Choice.Decode(rangeDecoder) == 0) { return m_LowCoder[posState].Decode(rangeDecoder); } uint symbol = Base.kNumLowLenSymbols; if (m_Choice2.Decode(rangeDecoder) == 0) { symbol += m_MidCoder[posState].Decode(rangeDecoder); } else { symbol += Base.kNumMidLenSymbols; symbol += m_HighCoder.Decode(rangeDecoder); } return symbol; } } private class LiteralDecoder { private struct Decoder2 { private BitDecoder[] m_Decoders; public void Create() { m_Decoders = new BitDecoder[0x300]; } public void Init() { for (int i = 0; i < 0x300; i++) { m_Decoders[i].Init(); } } public byte DecodeNormal(RangeCoder.Decoder rangeDecoder) { uint symbol = 1; do { symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder); } while (symbol < 0x100); return (byte)symbol; } public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, byte matchByte) { uint symbol = 1; do { uint matchBit = (uint)(matchByte >> 7) & 1; matchByte <<= 1; uint bit = m_Decoders[((1 + matchBit) << 8) + symbol].Decode(rangeDecoder); symbol = (symbol << 1) | bit; if (matchBit != bit) { while (symbol < 0x100) { symbol = (symbol << 1) | m_Decoders[symbol].Decode(rangeDecoder); } break; } } while (symbol < 0x100); return (byte)symbol; } } private Decoder2[] m_Coders; private int m_NumPrevBits; private int m_NumPosBits; private uint m_PosMask; public void Create(int numPosBits, int numPrevBits) { if (m_Coders != null && m_NumPrevBits == numPrevBits && m_NumPosBits == numPosBits) { return; } m_NumPosBits = numPosBits; m_PosMask = ((uint)1 << numPosBits) - 1; m_NumPrevBits = numPrevBits; uint numStates = (uint)1 << (m_NumPrevBits + m_NumPosBits); m_Coders = new Decoder2[numStates]; for (uint i = 0; i < numStates; i++) { m_Coders[i].Create(); } } public void Init() { uint numStates = (uint)1 << (m_NumPrevBits + m_NumPosBits); for (uint i = 0; i < numStates; i++) { m_Coders[i].Init(); } } private uint GetState(uint pos, byte prevByte) { return ((pos & m_PosMask) << m_NumPrevBits) + (uint)(prevByte >> (8 - m_NumPrevBits)); } public byte DecodeNormal(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte) { return m_Coders[GetState(pos, prevByte)].DecodeNormal(rangeDecoder); } public byte DecodeWithMatchByte(RangeCoder.Decoder rangeDecoder, uint pos, byte prevByte, byte matchByte) { return m_Coders[GetState(pos, prevByte)].DecodeWithMatchByte(rangeDecoder, matchByte); } } private OutWindow m_OutWindow; private readonly BitDecoder[] m_IsMatchDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax]; private readonly BitDecoder[] m_IsRepDecoders = new BitDecoder[Base.kNumStates]; private readonly BitDecoder[] m_IsRepG0Decoders = new BitDecoder[Base.kNumStates]; private readonly BitDecoder[] m_IsRepG1Decoders = new BitDecoder[Base.kNumStates]; private readonly BitDecoder[] m_IsRepG2Decoders = new BitDecoder[Base.kNumStates]; private readonly BitDecoder[] m_IsRep0LongDecoders = new BitDecoder[Base.kNumStates << Base.kNumPosStatesBitsMax]; private readonly BitTreeDecoder[] m_PosSlotDecoder = new BitTreeDecoder[Base.kNumLenToPosStates]; private readonly BitDecoder[] m_PosDecoders = new BitDecoder[Base.kNumFullDistances - Base.kEndPosModelIndex]; private BitTreeDecoder m_PosAlignDecoder = new BitTreeDecoder(Base.kNumAlignBits); private readonly LenDecoder m_LenDecoder = new LenDecoder(); private readonly LenDecoder m_RepLenDecoder = new LenDecoder(); private readonly LiteralDecoder m_LiteralDecoder = new LiteralDecoder(); private int m_DictionarySize; private uint m_PosStateMask; private Base.State state = new Base.State(); private uint rep0, rep1, rep2, rep3; public Decoder() { m_DictionarySize = -1; for (int i = 0; i < Base.kNumLenToPosStates; i++) { m_PosSlotDecoder[i] = new BitTreeDecoder(Base.kNumPosSlotBits); } } private void CreateDictionary() { if (m_DictionarySize < 0) { throw new InvalidParamException(); } m_OutWindow = new OutWindow(); int blockSize = Math.Max(m_DictionarySize, (1 << 12)); m_OutWindow.Create(blockSize); } private void SetLiteralProperties(int lp, int lc) { if (lp > 8) { throw new InvalidParamException(); } if (lc > 8) { throw new InvalidParamException(); } m_LiteralDecoder.Create(lp, lc); } private void SetPosBitsProperties(int pb) { if (pb > Base.kNumPosStatesBitsMax) { throw new InvalidParamException(); } uint numPosStates = (uint)1 << pb; m_LenDecoder.Create(numPosStates); m_RepLenDecoder.Create(numPosStates); m_PosStateMask = numPosStates - 1; } private void Init() { uint i; for (i = 0; i < Base.kNumStates; i++) { for (uint j = 0; j <= m_PosStateMask; j++) { uint index = (i << Base.kNumPosStatesBitsMax) + j; m_IsMatchDecoders[index].Init(); m_IsRep0LongDecoders[index].Init(); } m_IsRepDecoders[i].Init(); m_IsRepG0Decoders[i].Init(); m_IsRepG1Decoders[i].Init(); m_IsRepG2Decoders[i].Init(); } m_LiteralDecoder.Init(); for (i = 0; i < Base.kNumLenToPosStates; i++) { m_PosSlotDecoder[i].Init(); } // m_PosSpecDecoder.Init(); for (i = 0; i < Base.kNumFullDistances - Base.kEndPosModelIndex; i++) { m_PosDecoders[i].Init(); } m_LenDecoder.Init(); m_RepLenDecoder.Init(); m_PosAlignDecoder.Init(); state.Init(); rep0 = 0; rep1 = 0; rep2 = 0; rep3 = 0; } public void Code(Stream inStream, Stream outStream, Int64 inSize, Int64 outSize, ICodeProgress progress) { if (m_OutWindow == null) { CreateDictionary(); } m_OutWindow.Init(outStream); if (outSize > 0) { m_OutWindow.SetLimit(outSize); } else { m_OutWindow.SetLimit(Int64.MaxValue - m_OutWindow.Total); } RangeCoder.Decoder rangeDecoder = new RangeCoder.Decoder(); rangeDecoder.Init(inStream); Code(m_DictionarySize, m_OutWindow, rangeDecoder); m_OutWindow.ReleaseStream(); rangeDecoder.ReleaseStream(); if (!rangeDecoder.IsFinished || (inSize > 0 && rangeDecoder.Total != inSize)) { throw new DataErrorException(); } if (m_OutWindow.HasPending) { throw new DataErrorException(); } m_OutWindow = null; } internal bool Code(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder) { int dictionarySizeCheck = Math.Max(dictionarySize, 1); outWindow.CopyPending(); while (outWindow.HasSpace) { uint posState = (uint)outWindow.Total & m_PosStateMask; if (m_IsMatchDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode(rangeDecoder) == 0) { byte b; byte prevByte = outWindow.GetByte(0); if (!state.IsCharState()) { b = m_LiteralDecoder.DecodeWithMatchByte(rangeDecoder, (uint)outWindow.Total, prevByte, outWindow.GetByte((int)rep0)); } else { b = m_LiteralDecoder.DecodeNormal(rangeDecoder, (uint)outWindow.Total, prevByte); } outWindow.PutByte(b); state.UpdateChar(); } else { uint len; if (m_IsRepDecoders[state.Index].Decode(rangeDecoder) == 1) { if (m_IsRepG0Decoders[state.Index].Decode(rangeDecoder) == 0) { if ( m_IsRep0LongDecoders[(state.Index << Base.kNumPosStatesBitsMax) + posState].Decode( rangeDecoder) == 0) { state.UpdateShortRep(); outWindow.PutByte(outWindow.GetByte((int)rep0)); continue; } } else { UInt32 distance; if (m_IsRepG1Decoders[state.Index].Decode(rangeDecoder) == 0) { distance = rep1; } else { if (m_IsRepG2Decoders[state.Index].Decode(rangeDecoder) == 0) { distance = rep2; } else { distance = rep3; rep3 = rep2; } rep2 = rep1; } rep1 = rep0; rep0 = distance; } len = m_RepLenDecoder.Decode(rangeDecoder, posState) + Base.kMatchMinLen; state.UpdateRep(); } else { rep3 = rep2; rep2 = rep1; rep1 = rep0; len = Base.kMatchMinLen + m_LenDecoder.Decode(rangeDecoder, posState); state.UpdateMatch(); uint posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder); if (posSlot >= Base.kStartPosModelIndex) { int numDirectBits = (int)((posSlot >> 1) - 1); rep0 = ((2 | (posSlot & 1)) << numDirectBits); if (posSlot < Base.kEndPosModelIndex) { rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders, rep0 - posSlot - 1, rangeDecoder, numDirectBits); } else { rep0 += (rangeDecoder.DecodeDirectBits( numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits); rep0 += m_PosAlignDecoder.ReverseDecode(rangeDecoder); } } else { rep0 = posSlot; } } if (rep0 >= outWindow.Total || rep0 >= dictionarySizeCheck) { if (rep0 == 0xFFFFFFFF) { return true; } throw new DataErrorException(); } outWindow.CopyBlock((int)rep0, (int)len); } } return false; } public void SetDecoderProperties(byte[] properties) { if (properties.Length < 1) { throw new InvalidParamException(); } int lc = properties[0] % 9; int remainder = properties[0] / 9; int lp = remainder % 5; int pb = remainder / 5; if (pb > Base.kNumPosStatesBitsMax) { throw new InvalidParamException(); } SetLiteralProperties(lp, lc); SetPosBitsProperties(pb); Init(); if (properties.Length >= 5) { m_DictionarySize = 0; for (int i = 0; i < 4; i++) { m_DictionarySize += properties[1 + i] << (i * 8); } } } public void Train(Stream stream) { if (m_OutWindow == null) { CreateDictionary(); } m_OutWindow.Train(stream); } /* public override bool CanRead { get { return true; }} public override bool CanWrite { get { return true; }} public override bool CanSeek { get { return true; }} public override long Length { get { return 0; }} public override long Position { get { return 0; } set { } } public override void Flush() { } public override int Read(byte[] buffer, int offset, int count) { return 0; } public override void Write(byte[] buffer, int offset, int count) { } public override long Seek(long offset, System.IO.SeekOrigin origin) { return 0; } public override void SetLength(long value) {} */ } }
/** * Copyright 2010-present Facebook. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using Android.App; using Android.Content; using Android.Graphics; using Android.Locations; using Android.OS; using Android.Runtime; using Android.Support.V4.App; using Android.Text; using Android.Views; using Android.Widget; using Xamarin.Facebook; using Xamarin.Facebook.Share.Widget; using Xamarin.Facebook.Login.Widget; using Xamarin.Facebook.AppEvents; using Xamarin.Facebook.Share.Model; using Xamarin.Facebook.Share; using Xamarin.Facebook.Login; [assembly:Permission (Name = Android.Manifest.Permission.Internet)] [assembly:Permission (Name = Android.Manifest.Permission.WriteExternalStorage)] [assembly:MetaData ("com.facebook.sdk.ApplicationId", Value ="@string/ApplicationName")] [assembly:MetaData ("com.facebook.sdk.ApplicationName", Value = "@string/ApplicationName")] namespace HelloFacebookSample { [Activity (Label = "@string/app_name", MainLauncher = true, WindowSoftInputMode = SoftInput.AdjustResize)] public class HelloFacebookSampleActivity : FragmentActivity { static readonly string [] PERMISSIONS = new [] { "publish_actions" }; static readonly Location SEATTLE_LOCATION = new Location ("") { Latitude = (47.6097), Longitude = (-122.3331) }; const String PENDING_ACTION_BUNDLE_KEY = "com.facebook.samples.hellofacebook:PendingAction"; Button postStatusUpdateButton; Button postPhotoButton; ProfilePictureView profilePictureView; TextView greeting; PendingAction pendingAction = PendingAction.NONE; bool canPresentShareDialog; bool canPresentShareDialogWithPhotos; ICallbackManager callbackManager; ProfileTracker profileTracker; ShareDialog shareDialog; FacebookCallback<SharerResult> shareCallback; enum PendingAction { NONE, POST_PHOTO, POST_STATUS_UPDATE } protected override void OnCreate (Bundle savedInstanceState) { base.OnCreate (savedInstanceState); FacebookSdk.SdkInitialize (this.ApplicationContext); callbackManager = CallbackManagerFactory.Create (); var loginCallback = new FacebookCallback<LoginResult> { HandleSuccess = loginResult => { HandlePendingAction (); UpdateUI (); }, HandleCancel = () => { if (pendingAction != PendingAction.NONE) { ShowAlert ( GetString (Resource.String.cancelled), GetString (Resource.String.permission_not_granted)); pendingAction = PendingAction.NONE; } UpdateUI (); }, HandleError = loginError => { if (pendingAction != PendingAction.NONE && loginError is FacebookAuthorizationException) { ShowAlert ( GetString (Resource.String.cancelled), GetString (Resource.String.permission_not_granted)); pendingAction = PendingAction.NONE; } UpdateUI (); } }; LoginManager.Instance.RegisterCallback (callbackManager, loginCallback); shareCallback = new FacebookCallback<SharerResult> { HandleSuccess = shareResult => { Console.WriteLine ("HelloFacebook: Success!"); if (shareResult.PostId != null) { var title = Parent.GetString (Resource.String.error); var id = shareResult.PostId; var alertMsg = Parent.GetString (Resource.String.successfully_posted_post, id); ShowAlert (title, alertMsg); } }, HandleCancel = () => { Console.WriteLine ("HelloFacebook: Canceled"); }, HandleError = shareError => { Console.WriteLine ("HelloFacebook: Error: {0}", shareError); var title = Parent.GetString (Resource.String.error); var alertMsg = shareError.Message; ShowAlert (title, alertMsg); } }; shareDialog = new ShareDialog (this); shareDialog.RegisterCallback (callbackManager, shareCallback); if (savedInstanceState != null) { var name = savedInstanceState.GetString (PENDING_ACTION_BUNDLE_KEY); pendingAction = (PendingAction)Enum.Parse (typeof(PendingAction), name); } SetContentView (Resource.Layout.main); profileTracker = new CustomProfileTracker { HandleCurrentProfileChanged = (oldProfile, currentProfile) => { UpdateUI (); HandlePendingAction (); } }; profilePictureView = FindViewById <ProfilePictureView> (Resource.Id.profilePicture); greeting = FindViewById<TextView> (Resource.Id.greeting); postStatusUpdateButton = FindViewById<Button> (Resource.Id.postStatusUpdateButton); postStatusUpdateButton.Click += (sender, e) => { PerformPublish (PendingAction.POST_STATUS_UPDATE, canPresentShareDialog); }; postPhotoButton = FindViewById<Button> (Resource.Id.postPhotoButton); postPhotoButton.Click += (sender, e) => { PerformPublish (PendingAction.POST_PHOTO, canPresentShareDialogWithPhotos); }; // Can we present the share dialog for regular links? canPresentShareDialog = ShareDialog.CanShow (Java.Lang.Class.FromType (typeof(ShareLinkContent))); // Can we present the share dialog for photos? canPresentShareDialogWithPhotos = ShareDialog.CanShow (Java.Lang.Class.FromType (typeof(SharePhotoContent))); } void ShowAlert (string title, string msg, string buttonText = null) { new AlertDialog.Builder (Parent) .SetTitle (title) .SetMessage (msg) .SetPositiveButton (buttonText, (s2, e2) => { }) .Show (); } protected override void OnResume () { base.OnResume (); AppEventsLogger.ActivateApp (this); UpdateUI (); } protected override void OnSaveInstanceState (Bundle outState) { base.OnSaveInstanceState (outState); outState.PutString (PENDING_ACTION_BUNDLE_KEY, pendingAction.ToString ()); } protected override void OnActivityResult (int requestCode, Result resultCode, Intent data) { base.OnActivityResult (requestCode, resultCode, data); callbackManager.OnActivityResult (requestCode, (int)resultCode, data); } protected override void OnPause () { base.OnPause (); AppEventsLogger.DeactivateApp (this); } protected override void OnDestroy () { base.OnDestroy (); profileTracker.StopTracking (); } private void UpdateUI () { var enableButtons = AccessToken.CurrentAccessToken != null; postStatusUpdateButton.Enabled = (enableButtons || canPresentShareDialog); postPhotoButton.Enabled = (enableButtons || canPresentShareDialogWithPhotos); var profile = Profile.CurrentProfile; if (enableButtons && profile != null) { profilePictureView.ProfileId = profile.Id; greeting.Text = GetString (Resource.String.hello_user, new Java.Lang.String (profile.FirstName)); } else { profilePictureView.ProfileId = null; greeting.Text = null; } } private void HandlePendingAction () { PendingAction previouslyPendingAction = pendingAction; // These actions may re-set pendingAction if they are still pending, but we assume they // will succeed. pendingAction = PendingAction.NONE; switch (previouslyPendingAction) { case PendingAction.POST_PHOTO: PostPhoto (); break; case PendingAction.POST_STATUS_UPDATE: PostStatusUpdate (); break; } } void PostStatusUpdate () { var profile = Profile.CurrentProfile; var linkContent = new ShareLinkContent.Builder () .SetContentTitle ("Hello Facebook") .SetContentDescription ("The 'Hello Facebook' sample showcases simple Facebook integration") .SetContentUrl (Android.Net.Uri.Parse ("http://developer.facebook.com/docs/android")) .JavaCast<ShareLinkContent.Builder> () .Build (); if (canPresentShareDialog) shareDialog.Show (linkContent); else if (profile != null && HasPublishPermission ()) ShareApi.Share (linkContent, shareCallback); else pendingAction = PendingAction.POST_STATUS_UPDATE; } private void PostPhoto () { var image = BitmapFactory.DecodeResource (this.Resources, Resource.Drawable.icon); var sharePhoto = new SharePhoto.Builder () .SetBitmap (image).Build ().JavaCast<SharePhoto> (); var photos = new List<SharePhoto> (); photos.Add (sharePhoto); var sharePhotoContent = new SharePhotoContent.Builder () .SetPhotos (photos).Build (); if (canPresentShareDialogWithPhotos) shareDialog.Show (sharePhotoContent); else if (HasPublishPermission ()) ShareApi.Share (sharePhotoContent, shareCallback); else pendingAction = PendingAction.POST_PHOTO; } bool HasPublishPermission () { var accessToken = AccessToken.CurrentAccessToken; return accessToken != null && accessToken.Permissions.Contains ("publish_actions"); } void PerformPublish (PendingAction action, bool allowNoToken) { var accessToken = AccessToken.CurrentAccessToken; if (accessToken != null) { pendingAction = action; if (HasPublishPermission ()) { HandlePendingAction (); return; } else { LoginManager.Instance.LogInWithPublishPermissions (this, PERMISSIONS); return; } } if (allowNoToken) { pendingAction = action; HandlePendingAction (); } } } class FacebookCallback<TResult> : Java.Lang.Object, IFacebookCallback where TResult : Java.Lang.Object { public Action HandleCancel { get; set; } public Action<FacebookException> HandleError { get; set; } public Action<TResult> HandleSuccess { get; set; } public void OnCancel () { var c = HandleCancel; if (c != null) c (); } public void OnError (FacebookException error) { var c = HandleError; if (c != null) c (error); } public void OnSuccess (Java.Lang.Object result) { var c = HandleSuccess; if (c != null) c (result.JavaCast<TResult> ()); } } class CustomProfileTracker : ProfileTracker { public delegate void CurrentProfileChangedDelegate (Profile oldProfile, Profile currentProfile); public CurrentProfileChangedDelegate HandleCurrentProfileChanged { get; set; } protected override void OnCurrentProfileChanged (Profile oldProfile, Profile currentProfile) { var p = HandleCurrentProfileChanged; if (p != null) p (oldProfile, currentProfile); } } }
namespace RadarORM.Migrations { using System; using System.Data.Entity.Migrations; public partial class FirstCommit : DbMigration { public override void Up() { CreateTable( "dbo.categories", c => new { CategoryId = c.Int(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 255), }) .PrimaryKey(t => t.CategoryId); CreateTable( "dbo.companies", c => new { CompanyId = c.Int(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 255), Avatar = c.String(maxLength: 255), LocationId = c.Long(nullable: false), Description = c.String(nullable: false), OpenHours = c.String(nullable: false), Extra = c.String(nullable: false), CompanyType = c.Int(nullable: false), ParentId = c.Int(), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), }) .PrimaryKey(t => t.CompanyId) .ForeignKey("dbo.locations", t => t.LocationId) .ForeignKey("dbo.companies", t => t.ParentId) .Index(t => t.LocationId) .Index(t => t.ParentId); CreateTable( "dbo.employees", c => new { EmployeeId = c.Int(nullable: false, identity: true), CompanyId = c.Int(nullable: false), UserId = c.Int(nullable: false), RoleId = c.Int(nullable: false), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), }) .PrimaryKey(t => t.EmployeeId) .ForeignKey("dbo.companies", t => t.CompanyId) .ForeignKey("dbo.roles", t => t.RoleId) .ForeignKey("dbo.users", t => t.UserId) .Index(t => t.CompanyId) .Index(t => t.RoleId) .Index(t => t.UserId); CreateTable( "dbo.posts", c => new { PostId = c.Long(nullable: false, identity: true), Body = c.String(nullable: false), Title = c.String(nullable: false, maxLength: 255), SubTitle = c.String(nullable: false, maxLength: 255), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), CompanyId = c.Int(nullable: false), EmployeeId = c.Int(nullable: false), }) .PrimaryKey(t => t.PostId) .ForeignKey("dbo.companies", t => t.CompanyId) .ForeignKey("dbo.employees", t => t.EmployeeId) .Index(t => t.CompanyId) .Index(t => t.EmployeeId); CreateTable( "dbo.users", c => new { UserId = c.Int(nullable: false, identity: true), Username = c.String(nullable: false, maxLength: 255), Email = c.String(nullable: false, maxLength: 255), DateOfBirth = c.DateTime(nullable: false), Gender = c.String(nullable: false, maxLength: 255), Avatar = c.String(nullable: false, maxLength: 255), Password = c.String(nullable: false, maxLength: 255), Salt = c.String(nullable: false, maxLength: 255), Bio = c.String(), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), LocationId = c.Long(nullable: false), RoleId = c.Int(nullable: false), }) .PrimaryKey(t => t.UserId) .ForeignKey("dbo.locations", t => t.LocationId) .ForeignKey("dbo.roles", t => t.RoleId) .Index(t => t.LocationId) .Index(t => t.RoleId); CreateTable( "dbo.comments", c => new { CommentId = c.Long(nullable: false, identity: true), Body = c.String(nullable: false), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), UserId = c.Int(nullable: false), ParentId = c.Long(), }) .PrimaryKey(t => t.CommentId) .ForeignKey("dbo.comments", t => t.ParentId) .ForeignKey("dbo.users", t => t.UserId) .Index(t => t.ParentId) .Index(t => t.UserId); CreateTable( "dbo.locations", c => new { LocationId = c.Long(nullable: false, identity: true), Latitude = c.Decimal(nullable: false, precision: 13, scale: 9), Longitude = c.Decimal(nullable: false, precision: 13, scale: 9), Street = c.String(nullable: false, maxLength: 255), Number = c.String(nullable: false, maxLength: 255), Box = c.String(nullable: false, maxLength: 255), Zipcode = c.String(nullable: false, maxLength: 255), City = c.String(nullable: false, maxLength: 255), Country = c.String(nullable: false, maxLength: 255), }) .PrimaryKey(t => t.LocationId); CreateTable( "dbo.notifications", c => new { NotificationId = c.Long(nullable: false, identity: true), UserId = c.Int(nullable: false), Viewed = c.DateTime(), }) .PrimaryKey(t => t.NotificationId) .ForeignKey("dbo.users", t => t.UserId) .Index(t => t.UserId); CreateTable( "dbo.ratings", c => new { RatingId = c.Int(nullable: false, identity: true), Score = c.Int(nullable: false), UserId = c.Int(nullable: false), }) .PrimaryKey(t => t.RatingId) .ForeignKey("dbo.users", t => t.UserId) .Index(t => t.UserId); CreateTable( "dbo.messages", c => new { MessageId = c.Long(nullable: false, identity: true), Body = c.String(nullable: false), CreatedDate = c.DateTime(nullable: false), ModifiedDate = c.DateTime(), DeletedDate = c.DateTime(), SenderId = c.Int(nullable: false), RecieverId = c.Int(nullable: false), }) .PrimaryKey(t => t.MessageId) .ForeignKey("dbo.users", t => t.RecieverId) .ForeignKey("dbo.users", t => t.SenderId) .Index(t => t.RecieverId) .Index(t => t.SenderId); CreateTable( "dbo.roles", c => new { RoleId = c.Int(nullable: false, identity: true), Name = c.String(nullable: false, maxLength: 255), }) .PrimaryKey(t => t.RoleId); CreateTable( "dbo.companies_have_categories", c => new { CompanyId = c.Int(nullable: false), CategoryId = c.Int(nullable: false), }) .PrimaryKey(t => new { t.CompanyId, t.CategoryId }) .ForeignKey("dbo.companies", t => t.CompanyId, cascadeDelete: true) .ForeignKey("dbo.categories", t => t.CategoryId, cascadeDelete: true) .Index(t => t.CompanyId) .Index(t => t.CategoryId); CreateTable( "dbo.users_interestedin_categories", c => new { UserId = c.Int(nullable: false), CategoryId = c.Int(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.CategoryId }) .ForeignKey("dbo.users", t => t.UserId, cascadeDelete: true) .ForeignKey("dbo.categories", t => t.CategoryId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.CategoryId); CreateTable( "dbo.users_follow_companies", c => new { UserId = c.Int(nullable: false), CompanyId = c.Int(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.CompanyId }) .ForeignKey("dbo.users", t => t.UserId, cascadeDelete: true) .ForeignKey("dbo.companies", t => t.CompanyId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.CompanyId); CreateTable( "dbo.users_beento_locations", c => new { UserId = c.Int(nullable: false), LocationId = c.Long(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.LocationId }) .ForeignKey("dbo.users", t => t.UserId, cascadeDelete: true) .ForeignKey("dbo.locations", t => t.LocationId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.LocationId); CreateTable( "dbo.users_liked_posts", c => new { UserId = c.Int(nullable: false), PostId = c.Long(nullable: false), }) .PrimaryKey(t => new { t.UserId, t.PostId }) .ForeignKey("dbo.users", t => t.UserId, cascadeDelete: true) .ForeignKey("dbo.posts", t => t.PostId, cascadeDelete: true) .Index(t => t.UserId) .Index(t => t.PostId); CreateTable( "dbo.users_follow_users", c => new { FollowingId = c.Int(nullable: false), FolloweeId = c.Int(nullable: false), }) .PrimaryKey(t => new { t.FollowingId, t.FolloweeId }) .ForeignKey("dbo.users", t => t.FollowingId) .ForeignKey("dbo.users", t => t.FolloweeId) .Index(t => t.FollowingId) .Index(t => t.FolloweeId); CreateTable( "dbo.companies_have_ratings", c => new { CompanyId = c.Int(nullable: false), RatingId = c.Int(nullable: false), }) .PrimaryKey(t => new { t.CompanyId, t.RatingId }) .ForeignKey("dbo.companies", t => t.CompanyId, cascadeDelete: true) .ForeignKey("dbo.ratings", t => t.RatingId, cascadeDelete: true) .Index(t => t.CompanyId) .Index(t => t.RatingId); } public override void Down() { DropForeignKey("dbo.companies_have_ratings", "RatingId", "dbo.ratings"); DropForeignKey("dbo.companies_have_ratings", "CompanyId", "dbo.companies"); DropForeignKey("dbo.companies", "ParentId", "dbo.companies"); DropForeignKey("dbo.companies", "LocationId", "dbo.locations"); DropForeignKey("dbo.employees", "UserId", "dbo.users"); DropForeignKey("dbo.employees", "RoleId", "dbo.roles"); DropForeignKey("dbo.users_follow_users", "FolloweeId", "dbo.users"); DropForeignKey("dbo.users_follow_users", "FollowingId", "dbo.users"); DropForeignKey("dbo.users", "RoleId", "dbo.roles"); DropForeignKey("dbo.messages", "SenderId", "dbo.users"); DropForeignKey("dbo.messages", "RecieverId", "dbo.users"); DropForeignKey("dbo.ratings", "UserId", "dbo.users"); DropForeignKey("dbo.users_liked_posts", "PostId", "dbo.posts"); DropForeignKey("dbo.users_liked_posts", "UserId", "dbo.users"); DropForeignKey("dbo.notifications", "UserId", "dbo.users"); DropForeignKey("dbo.users_beento_locations", "LocationId", "dbo.locations"); DropForeignKey("dbo.users_beento_locations", "UserId", "dbo.users"); DropForeignKey("dbo.users", "LocationId", "dbo.locations"); DropForeignKey("dbo.users_follow_companies", "CompanyId", "dbo.companies"); DropForeignKey("dbo.users_follow_companies", "UserId", "dbo.users"); DropForeignKey("dbo.comments", "UserId", "dbo.users"); DropForeignKey("dbo.comments", "ParentId", "dbo.comments"); DropForeignKey("dbo.users_interestedin_categories", "CategoryId", "dbo.categories"); DropForeignKey("dbo.users_interestedin_categories", "UserId", "dbo.users"); DropForeignKey("dbo.posts", "EmployeeId", "dbo.employees"); DropForeignKey("dbo.posts", "CompanyId", "dbo.companies"); DropForeignKey("dbo.employees", "CompanyId", "dbo.companies"); DropForeignKey("dbo.companies_have_categories", "CategoryId", "dbo.categories"); DropForeignKey("dbo.companies_have_categories", "CompanyId", "dbo.companies"); DropIndex("dbo.companies_have_ratings", new[] { "RatingId" }); DropIndex("dbo.companies_have_ratings", new[] { "CompanyId" }); DropIndex("dbo.companies", new[] { "ParentId" }); DropIndex("dbo.companies", new[] { "LocationId" }); DropIndex("dbo.employees", new[] { "UserId" }); DropIndex("dbo.employees", new[] { "RoleId" }); DropIndex("dbo.users_follow_users", new[] { "FolloweeId" }); DropIndex("dbo.users_follow_users", new[] { "FollowingId" }); DropIndex("dbo.users", new[] { "RoleId" }); DropIndex("dbo.messages", new[] { "SenderId" }); DropIndex("dbo.messages", new[] { "RecieverId" }); DropIndex("dbo.ratings", new[] { "UserId" }); DropIndex("dbo.users_liked_posts", new[] { "PostId" }); DropIndex("dbo.users_liked_posts", new[] { "UserId" }); DropIndex("dbo.notifications", new[] { "UserId" }); DropIndex("dbo.users_beento_locations", new[] { "LocationId" }); DropIndex("dbo.users_beento_locations", new[] { "UserId" }); DropIndex("dbo.users", new[] { "LocationId" }); DropIndex("dbo.users_follow_companies", new[] { "CompanyId" }); DropIndex("dbo.users_follow_companies", new[] { "UserId" }); DropIndex("dbo.comments", new[] { "UserId" }); DropIndex("dbo.comments", new[] { "ParentId" }); DropIndex("dbo.users_interestedin_categories", new[] { "CategoryId" }); DropIndex("dbo.users_interestedin_categories", new[] { "UserId" }); DropIndex("dbo.posts", new[] { "EmployeeId" }); DropIndex("dbo.posts", new[] { "CompanyId" }); DropIndex("dbo.employees", new[] { "CompanyId" }); DropIndex("dbo.companies_have_categories", new[] { "CategoryId" }); DropIndex("dbo.companies_have_categories", new[] { "CompanyId" }); DropTable("dbo.companies_have_ratings"); DropTable("dbo.users_follow_users"); DropTable("dbo.users_liked_posts"); DropTable("dbo.users_beento_locations"); DropTable("dbo.users_follow_companies"); DropTable("dbo.users_interestedin_categories"); DropTable("dbo.companies_have_categories"); DropTable("dbo.roles"); DropTable("dbo.messages"); DropTable("dbo.ratings"); DropTable("dbo.notifications"); DropTable("dbo.locations"); DropTable("dbo.comments"); DropTable("dbo.users"); DropTable("dbo.posts"); DropTable("dbo.employees"); DropTable("dbo.companies"); DropTable("dbo.categories"); } } }
/* * REST API Documentation for the MOTI School Bus Application * * The School Bus application tracks that inspections are performed in a timely fashion. For each school bus the application tracks information about the bus (including data from ICBC, NSC, etc.), it's past and next inspection dates and results, contacts, and the inspector responsible for next inspecting the bus. * * OpenAPI spec version: v1 * * */ using System; using System.Linq; using System.Security.Claims; using AutoMapper; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; using SchoolBusAPI.Models; using SchoolBusAPI.ViewModels; namespace SchoolBusAPI.Services { /// <summary> /// /// </summary> public interface ICurrentUserService { /// <summary> /// /// </summary> /// <remarks>Removes a specific user favourite</remarks> /// <param name="id">id of Favourite to delete</param> /// <response code="200">OK</response> IActionResult UsersCurrentFavouritesIdDeletePostAsync(int id); /// <summary> /// /// </summary> /// <remarks>Create new favourite for the current user</remarks> /// <param name="item"></param> /// <response code="201">UserFavourite created</response> IActionResult UsersCurrentFavouritesPostAsync(UserFavourite item); /// <summary> /// /// </summary> /// <remarks>Updates a favourite</remarks> /// <param name="item"></param> /// <response code="201">UserFavourite created</response> IActionResult UsersCurrentFavouritesPutAsync(UserFavourite item); /// <summary> /// /// </summary> /// <remarks>Returns a user&#39;s favourites of a given type. If type is empty, returns all.</remarks> /// <param name="type">type of favourite to return</param> /// <response code="200">OK</response> /// <response code="404">User not found</response> IActionResult UsersCurrentFavouritesTypeGetAsync(string type); /// <summary> /// /// </summary> /// <remarks>Get the currently logged in user</remarks> /// <response code="200">OK</response> IActionResult UsersCurrentGetAsync(); } /// <summary> /// /// </summary> public class CurrentUserService : ServiceBase, ICurrentUserService { private readonly DbAppContext _context; /// <summary> /// Create a service and set the database context /// </summary> public CurrentUserService(IHttpContextAccessor httpContextAccessor, DbAppContext context, IMapper mapper) : base(httpContextAccessor, context, mapper) { _context = context; } /// <summary> /// /// </summary> /// <remarks>Removes a specific user favourite</remarks> /// <param name="id">id of Favourite to delete</param> /// <response code="200">OK</response> public virtual IActionResult UsersCurrentFavouritesIdDeletePostAsync(int id) { // get the current user id int? user_id = GetCurrentUserId(); if (user_id != null) { bool exists = _context.UserFavourites.Where(x => x.User.Id == user_id) .Any(a => a.Id == id); if (exists) { var item = _context.UserFavourites.First(a => a.Id == id); _context.UserFavourites.Remove(item); // Save the changes _context.SaveChanges(); return new ObjectResult(item); } else { // record not found return new StatusCodeResult(404); } } else { return new StatusCodeResult(403); } } /// <summary> /// /// </summary> /// <remarks>Create new favourite for the current user</remarks> /// <param name="item"></param> /// <response code="201">UserFavourite created</response> public virtual IActionResult UsersCurrentFavouritesPostAsync(UserFavourite item) { item.User = null; // get the current user id int? id = GetCurrentUserId(); if (id != null) { bool user_exists = _context.Users.Any(a => a.Id == id); if (user_exists) { User user = _context.Users.First(a => a.Id == id); item.User = user; } } bool exists = _context.UserFavourites.Any(a => a.Id == item.Id); if (exists) { _context.UserFavourites.Update(item); // Save the changes _context.SaveChanges(); return new ObjectResult(item); } else { // record not found. add the record. _context.UserFavourites.Add(item); // Save the changes _context.SaveChanges(); return new ObjectResult(item); } } /// <summary> /// /// </summary> /// <remarks>Updates a favourite</remarks> /// <param name="item"></param> /// <response code="201">UserFavourite created</response> public virtual IActionResult UsersCurrentFavouritesPutAsync(UserFavourite item) { item.User = null; // get the current user id int? id = GetCurrentUserId(); if (id != null) { bool user_exists = _context.Users.Any(a => a.Id == id); if (user_exists) { User user = _context.Users.First(a => a.Id == id); item.User = user; } } bool exists = _context.UserFavourites.Any(a => a.Id == item.Id); if (exists) { _context.UserFavourites.Update(item); // Save the changes _context.SaveChanges(); return new ObjectResult(item); } else { // record not found return new StatusCodeResult(404); } } /// <summary> /// /// </summary> /// <remarks>Returns a user&#39;s favourites of a given type. If type is empty, returns all.</remarks> /// <param name="type">type of favourite to return</param> /// <response code="200">OK</response> /// <response code="404">User not found</response> public virtual IActionResult UsersCurrentFavouritesTypeGetAsync(string type) { // get the current user id int? id = GetCurrentUserId(); if (id != null) { var data = _context.UserFavourites .Where(x => x.User.Id == id) .Select(x => x); if (type != null) { data = data.Where(x => x.Type == type); } return new ObjectResult(data.ToList()); } else { // no user context. return new StatusCodeResult(403); } } /// <summary> /// /// </summary> /// <remarks>Get the currently logged in user</remarks> /// <response code="200">OK</response> public virtual IActionResult UsersCurrentGetAsync() { // get the current user id int? id = GetCurrentUserId(); if (id != null) { User currentUser = _context.Users .AsNoTracking() .Include(x => x.District) .Include(x => x.UserRoles) .ThenInclude(y => y.Role) .ThenInclude(z => z.RolePermissions) .ThenInclude(z => z.Permission) .First(x => x.Id == id); var result = Mapper.Map<CurrentUserViewModel>(currentUser); // get the name for the current logged in user result.GivenName = User.FindFirst(ClaimTypes.GivenName).Value; result.Surname = User.FindFirst(ClaimTypes.Surname).Value; DateTime today = DateTime.UtcNow.Date; DateTime dateTo = today.AddDays(31).AddSeconds(-1); int overdue = _context.SchoolBuss.AsNoTracking() .Count(x => x.Inspector.Id == id && x.NextInspectionDate < today && x.Status.ToLower() == "active"); int within30days = _context.SchoolBuss.AsNoTracking() .Count(x => x.Inspector.Id == id && x.NextInspectionDate >= today && x.NextInspectionDate <= dateTo && x.Status.ToLower() == "active"); int scheduledInspections = _context.SchoolBuss.AsNoTracking() .Count(x => x.Inspector.Id == id && x.NextInspectionDate >= today && x.Status.ToLower() == "active"); int reInspections = _context.SchoolBuss.AsNoTracking() .Count(x => x.Inspector.Id == id && x.NextInspectionTypeCode.ToLower() == "re-inspection" && x.Status.ToLower() == "active"); int ccwNotifications = _context.SchoolBuss.AsNoTracking() .Where(x => x.Inspector.Id == id) .SelectMany(x => x.CCWNotifications) .Count(x => !x.HasBeenViewed); result.OverdueInspections = overdue; result.DueWithin30DaysInspections = within30days; result.ScheduledInspections = scheduledInspections; result.ReInspections = reInspections; result.CCWNotifications = ccwNotifications; result.Permissions = currentUser .UserRoles .Where(ur => (ur.EffectiveDate == DateTime.MinValue || ur.EffectiveDate <= DateTime.Now) && (!ur.ExpiryDate.HasValue || ur.ExpiryDate == DateTime.MinValue || ur.ExpiryDate > DateTime.Now)) .Select(ur => ur.Role) .Where(r => !r.ExpiryDate.HasValue || r.ExpiryDate == DateTime.MinValue || r.ExpiryDate > DateTime.Now) //active roles .SelectMany(r => r.RolePermissions.Select(rp => rp.Permission)) .Where(p => !p.ExpiryDate.HasValue || p.ExpiryDate == DateTime.MinValue || p.ExpiryDate > DateTime.Now) //active permissions .ToLookup(p => p.Code) .Select(p => p.First()) .Select(p => p.Code) .ToList(); result.IsSystemAdmin = currentUser.UserRoles.Any(x => x.Role.Name == Roles.SystemAdmininstrator && (x.EffectiveDate == DateTime.MinValue || x.EffectiveDate <= DateTime.Now) && (!x.ExpiryDate.HasValue || x.ExpiryDate == DateTime.MinValue || x.ExpiryDate > DateTime.Now)); result.IsInspector = currentUser.UserRoles.Any(x => x.Role.Name == Roles.Inspector && (x.EffectiveDate == DateTime.MinValue || x.EffectiveDate <= DateTime.Now) && (!x.ExpiryDate.HasValue || x.ExpiryDate == DateTime.MinValue || x.ExpiryDate > DateTime.Now)); result.IsManager = currentUser.UserRoles.Any(x => x.Role.Name == Roles.Manager && (x.EffectiveDate == DateTime.MinValue || x.EffectiveDate <= DateTime.Now) && (!x.ExpiryDate.HasValue || x.ExpiryDate == DateTime.MinValue || x.ExpiryDate > DateTime.Now)); return new ObjectResult(result); } else { return new StatusCodeResult(404); // no current user ID } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Linq; namespace UnityWeld.Binding { public class ObservableList<T> : IList<T>, IList, INotifyCollectionChanged, ITypedList { /// <summary> /// Inner (non-obsevable) list. /// </summary> private readonly List<T> innerList = new List<T>(); /// <summary> /// Event raised when the collection has been changed. /// </summary> public event EventHandler<NotifyCollectionChangedEventArgs> CollectionChanged; /// <summary> /// Default constructor /// </summary> public ObservableList() { } /// <summary> /// Create from existing items. /// </summary> public ObservableList(IEnumerable<T> items) { innerList.AddRange(items); } public int IndexOf(T item) { return innerList.IndexOf(item); } public void Insert(int index, T item) { innerList.Insert(index, item); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemAdded(item, index)); } } public void RemoveAt(int index) { var item = innerList[index]; innerList.RemoveAt(index); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemRemoved(item, index)); } } public T this[int index] { get { return innerList[index]; } set { innerList[index] = value; } } public void Add(T item) { var newIndex = innerList.Count; innerList.Add(item); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemAdded(item, newIndex)); } } public void Clear() { var oldItems = innerList.Cast<object>().ToArray(); innerList.Clear(); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.Reset(oldItems)); } } public bool Contains(T item) { return innerList.Contains(item); } public void CopyTo(T[] array, int arrayIndex) { innerList.CopyTo(array, arrayIndex); } public int Count { get { return innerList.Count; } } /// <summary> /// Specifies the type of items in the list. /// </summary> public Type ItemType { get { return typeof(T); } } public bool IsReadOnly { get { return false; } } public bool IsFixedSize { get { return true; } } public object SyncRoot { get { return this; } } public bool IsSynchronized { get { return false; } } object IList.this[int index] { get { return innerList[index]; } set { innerList[index] = (T)value; } } public bool Remove(T item) { var index = innerList.IndexOf(item); var result = innerList.Remove(item); if (result && CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemRemoved(item, index)); } return result; } public IEnumerator<T> GetEnumerator() { return innerList.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return innerList.GetEnumerator(); } public int Add(object item) { var newIndex = innerList.Count; innerList.Add((T)item); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemAdded(item, newIndex)); } return innerList.Count - 1; } public bool Contains(object item) { return innerList.Contains((T)item); } public int IndexOf(object item) { return innerList.IndexOf((T)item); } public void Insert(int index, object item) { innerList.Insert(index, (T)item); if (CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemAdded(item, index)); } } public void Remove(object item) { var index = innerList.IndexOf((T)item); var result = innerList.Remove((T)item); if (result && CollectionChanged != null) { CollectionChanged(this, NotifyCollectionChangedEventArgs.ItemRemoved(item, index)); } } public void CopyTo(Array array, int index) { innerList.CopyTo((T[])array, index); } } public static class LinqExts { /// <summary> /// Convert an IEnumerable into an observable list. /// </summary> public static ObservableList<T> ToObservableList<T>(this IEnumerable<T> source) { return new ObservableList<T>(source); } /// <summary> /// Convert a variable length argument list of items to an ObservableList. /// </summary> public static ObservableList<T> ObservableListFromItems<T>(params T[] items) { return new ObservableList<T>(items); } } }
using System; using System.Threading.Tasks; using EasyNetQ.Consumer; using EasyNetQ.FluentConfiguration; namespace EasyNetQ { /// <summary> /// Provides a simple Publish/Subscribe and Request/Response API for a message bus. /// </summary> public interface IBus : IDisposable { /// <summary> /// Publishes a message. /// </summary> /// <typeparam name="T">The message type</typeparam> /// <param name="message">The message to publish</param> void Publish<T>(T message) where T : class; /// <summary> /// Publishes a message with a topic /// </summary> /// <typeparam name="T">The message type</typeparam> /// <param name="message">The message to publish</param> /// <param name="topic">The topic string</param> void Publish<T>(T message, string topic) where T : class; /// <summary> /// Publishes a message. /// When used with publisher confirms the task completes when the publish is confirmed. /// Task will throw an exception if the confirm is NACK'd or times out. /// </summary> /// <typeparam name="T">The message type</typeparam> /// <param name="message">The message to publish</param> /// <returns></returns> Task PublishAsync<T>(T message) where T : class; /// <summary> /// Publishes a message with a topic. /// When used with publisher confirms the task completes when the publish is confirmed. /// Task will throw an exception if the confirm is NACK'd or times out. /// </summary> /// <typeparam name="T">The message type</typeparam> /// <param name="message">The message to publish</param> /// <param name="topic">The topic string</param> /// <returns></returns> Task PublishAsync<T>(T message, string topic) where T : class; /// <summary> /// Subscribes to a stream of messages that match a .NET type. /// </summary> /// <typeparam name="T">The type to subscribe to</typeparam> /// <param name="subscriptionId"> /// A unique identifier for the subscription. Two subscriptions with the same subscriptionId /// and type will get messages delivered in turn. This is useful if you want multiple subscribers /// to load balance a subscription in a round-robin fashion. /// </param> /// <param name="onMessage"> /// The action to run when a message arrives. When onMessage completes the message /// recipt is Ack'd. All onMessage delegates are processed on a single thread so you should /// avoid long running blocking IO operations. Consider using SubscribeAsync /// </param> IDisposable Subscribe<T>(string subscriptionId, Action<T> onMessage) where T : class; /// <summary> /// Subscribes to a stream of messages that match a .NET type. /// </summary> /// <typeparam name="T">The type to subscribe to</typeparam> /// <param name="subscriptionId"> /// A unique identifier for the subscription. Two subscriptions with the same subscriptionId /// and type will get messages delivered in turn. This is useful if you want multiple subscribers /// to load balance a subscription in a round-robin fashion. /// </param> /// <param name="onMessage"> /// The action to run when a message arrives. When onMessage completes the message /// recipt is Ack'd. All onMessage delegates are processed on a single thread so you should /// avoid long running blocking IO operations. Consider using SubscribeAsync /// </param> /// <param name="configure"> /// Fluent configuration e.g. x => x.WithTopic("uk.london") /// </param> IDisposable Subscribe<T>(string subscriptionId, Action<T> onMessage, Action<ISubscriptionConfiguration> configure) where T : class; /// <summary> /// Subscribes to a stream of messages that match a .NET type. /// Allows the subscriber to complete asynchronously. /// </summary> /// <typeparam name="T">The type to subscribe to</typeparam> /// <param name="subscriptionId"> /// A unique identifier for the subscription. Two subscriptions with the same subscriptionId /// and type will get messages delivered in turn. This is useful if you want multiple subscribers /// to load balance a subscription in a round-robin fashion. /// </param> /// <param name="onMessage"> /// The action to run when a message arrives. onMessage can immediately return a Task and /// then continue processing asynchronously. When the Task completes the message will be /// Ack'd. /// </param> IDisposable SubscribeAsync<T>(string subscriptionId, Func<T, Task> onMessage) where T : class; /// <summary> /// Subscribes to a stream of messages that match a .NET type. /// </summary> /// <typeparam name="T">The type to subscribe to</typeparam> /// <param name="subscriptionId"> /// A unique identifier for the subscription. Two subscriptions with the same subscriptionId /// and type will get messages delivered in turn. This is useful if you want multiple subscribers /// to load balance a subscription in a round-robin fashion. /// </param> /// <param name="onMessage"> /// The action to run when a message arrives. onMessage can immediately return a Task and /// then continue processing asynchronously. When the Task completes the message will be /// Ack'd. /// </param> /// <param name="configure"> /// Fluent configuration e.g. x => x.WithTopic("uk.london").WithArgument("x-message-ttl", "60") /// </param> IDisposable SubscribeAsync<T>(string subscriptionId, Func<T, Task> onMessage, Action<ISubscriptionConfiguration> configure) where T : class; /// <summary> /// Makes an RPC style request /// </summary> /// <typeparam name="TRequest">The request type.</typeparam> /// <typeparam name="TResponse">The response type.</typeparam> /// <param name="request">The request message.</param> /// <returns>The response</returns> TResponse Request<TRequest, TResponse>(TRequest request) where TRequest : class where TResponse : class; /// <summary> /// Makes an RPC style request. /// </summary> /// <typeparam name="TRequest">The request type.</typeparam> /// <typeparam name="TResponse">The response type.</typeparam> /// <param name="request">The request message.</param> /// <returns>A task that completes when the response returns</returns> Task<TResponse> RequestAsync<TRequest, TResponse>(TRequest request) where TRequest : class where TResponse : class; /// <summary> /// Responds to an RPC request. /// </summary> /// <typeparam name="TRequest">The request type.</typeparam> /// <typeparam name="TResponse">The response type.</typeparam> /// <param name="responder"> /// A function to run when the request is received. It should return the response. /// </param> IDisposable Respond<TRequest, TResponse>(Func<TRequest, TResponse> responder) where TRequest : class where TResponse : class; /// <summary> /// Responds to an RPC request asynchronously. /// </summary> /// <typeparam name="TRequest">The request type.</typeparam> /// <typeparam name="TResponse">The response type</typeparam> /// <param name="responder"> /// A function to run when the request is received. /// </param> IDisposable RespondAsync<TRequest, TResponse>(Func<TRequest, Task<TResponse>> responder) where TRequest : class where TResponse : class; /// <summary> /// Send a message directly to a queue /// </summary> /// <typeparam name="T">The type of message to send</typeparam> /// <param name="queue">The queue to send to</param> /// <param name="message">The message</param> void Send<T>(string queue, T message) where T : class; /// <summary> /// Receive messages from a queue. /// Multiple calls to Receive for the same queue, but with different message types /// will add multiple message handlers to the same consumer. /// </summary> /// <typeparam name="T">The type of message to receive</typeparam> /// <param name="queue">The queue to receive from</param> /// <param name="onMessage">The message handler</param> IDisposable Receive<T>(string queue, Action<T> onMessage) where T : class; /// <summary> /// Receive messages from a queue. /// Multiple calls to Receive for the same queue, but with different message types /// will add multiple message handlers to the same consumer. /// </summary> /// <typeparam name="T">The type of message to receive</typeparam> /// <param name="queue">The queue to receive from</param> /// <param name="onMessage">The asychronous message handler</param> IDisposable Receive<T>(string queue, Func<T, Task> onMessage) where T : class; /// <summary> /// Receive a message from the specified queue. Dispatch them to the given handlers /// </summary> /// <param name="queue">The queue to take messages from</param> /// <param name="addHandlers">A function to add handlers</param> /// <returns>Consumer cancellation. Call Dispose to stop consuming</returns> IDisposable Receive(string queue, Action<IReceiveRegistration> addHandlers); /// <summary> /// Fires once the bus has connected to a RabbitMQ server. /// </summary> event Action Connected; /// <summary> /// Fires when the bus disconnects from a RabbitMQ server. /// </summary> event Action Disconnected; /// <summary> /// True if the bus is connected, False if it is not. /// </summary> bool IsConnected { get; } /// <summary> /// Return the advanced EasyNetQ advanced API. /// </summary> IAdvancedBus Advanced { get; } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; namespace Aurora.Framework { public static class AuroraModuleLoader { private static bool ALLOW_CACHE = true; private static List<string> dllBlackList; private static readonly List<string> firstLoad = new List<string>(); private static readonly Dictionary<string, List<Type>> LoadedDlls = new Dictionary<string, List<Type>>(); private static readonly Dictionary<string, Assembly> LoadedAssemblys = new Dictionary<string, Assembly>(); #region Module Loaders /// <summary> /// Find all T modules in the current directory /// </summary> /// <typeparam name = "T"></typeparam> /// <returns></returns> public static List<T> PickupModules<T>() { return LoadModules<T>(Util.BasePathCombine("")); } /// <summary> /// Gets all modules found in the given directory. /// Identifier is the name of the interface. /// </summary> /// <typeparam name = "T"></typeparam> /// <param name = "moduleDir"></param> /// <param name = "identifier"></param> /// <returns></returns> public static List<T> LoadModules<T>(string moduleDir) { if (moduleDir == "") moduleDir = Util.BasePathCombine(""); List<T> modules = new List<T>(); lock (firstLoad) { if (!firstLoad.Contains(moduleDir)) { DirectoryInfo dir = new DirectoryInfo(moduleDir); #region blacklist if (dllBlackList == null || dllBlackList.Count == 0) { dllBlackList = new List<string> { Path.Combine(dir.FullName, "AsyncCtpLibrary.dll"), Path.Combine(dir.FullName, "NHibernate.ByteCode.Castle.dll"), Path.Combine(dir.FullName, "Antlr3.Runtime.dll"), Path.Combine(dir.FullName, "AprSharp.dll"), Path.Combine(dir.FullName, "Axiom.MathLib.dll"), Path.Combine(dir.FullName, "BclExtras35.dll"), Path.Combine(dir.FullName, "BulletSim.dll"), Path.Combine(dir.FullName, "BulletSim-x86_64.dll"), Path.Combine(dir.FullName, "BulletDotNET.dll"), Path.Combine(dir.FullName, "C5.dll"), Path.Combine(dir.FullName, "Castle.Core.dll"), Path.Combine(dir.FullName, "Castle.DynamicProxy.dll"), Path.Combine(dir.FullName, "Castle.DynamicProxy2.dll"), Path.Combine(dir.FullName, "Community.CsharpSqlite.dll"), Path.Combine(dir.FullName, "Community.CsharpSqlite.Sqlite.dll"), Path.Combine(dir.FullName, "CookComputing.XmlRpcV2.dll"), Path.Combine(dir.FullName, "CSJ2K.dll"), Path.Combine(dir.FullName, "DotNetOpenId.dll"), Path.Combine(dir.FullName, "DotNetOpenMail.dll"), Path.Combine(dir.FullName, "DotSets.dll"), Path.Combine(dir.FullName, "Fadd.dll"), Path.Combine(dir.FullName, "Fadd.Globalization.Yaml.dll"), Path.Combine(dir.FullName, "FluentNHibernate.dll"), Path.Combine(dir.FullName, "Glacier2.dll"), Path.Combine(dir.FullName, "GlynnTucker.Cache.dll"), Path.Combine(dir.FullName, "Google.ProtocolBuffers.dll"), Path.Combine(dir.FullName, "GoogleTranslateAPI.dll"), Path.Combine(dir.FullName, "HttpServer.dll"), Path.Combine(dir.FullName, "HttpServer_OpenSim.dll"), Path.Combine(dir.FullName, "Ice.dll"), Path.Combine(dir.FullName, "Iesi.Collections.dll"), Path.Combine(dir.FullName, "intl3_svn.dll"), Path.Combine(dir.FullName, "Kds.Serialization.dll"), Path.Combine(dir.FullName, "libapr.dll"), Path.Combine(dir.FullName, "libapriconv.dll"), Path.Combine(dir.FullName, "libaprutil.dll"), Path.Combine(dir.FullName, "libbulletnet.dll"), Path.Combine(dir.FullName, "libdb44d.dll"), Path.Combine(dir.FullName, "libdb_dotNET43.dll"), Path.Combine(dir.FullName, "libeay32.dll"), Path.Combine(dir.FullName, "log4net.dll"), Path.Combine(dir.FullName, "Modified.XnaDevRu.BulletX.dll"), Path.Combine(dir.FullName, "Mono.Addins.CecilReflector.dll"), Path.Combine(dir.FullName, "Mono.Addins.dll"), Path.Combine(dir.FullName, "Mono.Addins.Setup.dll"), Path.Combine(dir.FullName, "Mono.Data.Sqlite.dll"), Path.Combine(dir.FullName, "Mono.Data.SqliteClient.dll"), Path.Combine(dir.FullName, "Mono.GetOptions.dll"), Path.Combine(dir.FullName, "Mono.PEToolkit.dll"), Path.Combine(dir.FullName, "Mono.Security.dll"), Path.Combine(dir.FullName, "MonoXnaCompactMaths.dll"), Path.Combine(dir.FullName, "MXP.dll"), Path.Combine(dir.FullName, "MySql.Data.dll"), Path.Combine(dir.FullName, "NDesk.Options.dll"), Path.Combine(dir.FullName, "Newtonsoft.Json.dll"), Path.Combine(dir.FullName, "Newtonsoft.Json.Net20.dll"), Path.Combine(dir.FullName, "NHibernate.ByteCode.Castle.dll"), Path.Combine(dir.FullName, "NHibernate.dll"), Path.Combine(dir.FullName, "HttpServer_OpenSim.dll"), Path.Combine(dir.FullName, "Nini.dll"), Path.Combine(dir.FullName, "Npgsql.dll"), Path.Combine(dir.FullName, "nunit.framework.dll"), Path.Combine(dir.FullName, "ode.dll"), Path.Combine(dir.FullName, "odex86.dll"), Path.Combine(dir.FullName, "odex64.dll"), Path.Combine(dir.FullName, "odeNoSSE.dll"), Path.Combine(dir.FullName, "odeSSE1.dll"), Path.Combine(dir.FullName, "ode10.dll"), Path.Combine(dir.FullName, "ode11.dll"), Path.Combine(dir.FullName, "Ode.NET.dll"), Path.Combine(dir.FullName, "Ode.NET.Single.dll"), Path.Combine(dir.FullName, "Ode.NET.Double.dll"), Path.Combine(dir.FullName, "openjpeg-dotnet-x86_64.dll"), Path.Combine(dir.FullName, "openjpeg-dotnet.dll"), Path.Combine(dir.FullName, "openjpeg.dll"), Path.Combine(dir.FullName, "OpenMetaverse.dll"), Path.Combine(dir.FullName, "OpenMetaverse.GUI.dll"), Path.Combine(dir.FullName, "OpenMetaverse.Rendering.Simple.dll"), Path.Combine(dir.FullName, "OpenMetaverse.Rendering.Meshmerizer.dll"), Path.Combine(dir.FullName, "OpenMetaverse.Http.dll"), Path.Combine(dir.FullName, "OpenMetaverse.StructuredData.dll"), Path.Combine(dir.FullName, "OpenMetaverse.Utilities.dll"), Path.Combine(dir.FullName, "OpenMetaverseTypes.dll"), Path.Combine(dir.FullName, "OpenMetaverse.Tests.dll"), Path.Combine(dir.FullName, "PhysX-wrapper.dll"), Path.Combine(dir.FullName, "PhysX_Wrapper_Dotnet.dll"), Path.Combine(dir.FullName, "PrimMesher.dll"), Path.Combine(dir.FullName, "protobuf-net.dll"), Path.Combine(dir.FullName, "PumaCode.SvnDotNet.dll"), Path.Combine(dir.FullName, "RAIL.dll"), Path.Combine(dir.FullName, "SmartThreadPool.dll"), Path.Combine(dir.FullName, "sqlite3.dll"), Path.Combine(dir.FullName, "ssleay32.dll"), Path.Combine(dir.FullName, "SubversionSharp.dll"), Path.Combine(dir.FullName, "svn_client-1.dll"), Path.Combine(dir.FullName, "System.Data.SQLite.dll"), Path.Combine(dir.FullName, "System.Data.SQLitex64.dll"), Path.Combine(dir.FullName, "System.Data.SQLitex86.dll"), Path.Combine(dir.FullName, "Tools.dll"), Path.Combine(dir.FullName, "xunit.dll"), Path.Combine(dir.FullName, "XMLRPC.dll"), Path.Combine(dir.FullName, "Warp3D.dll"), Path.Combine(dir.FullName, "zlib.net.dll") }; } #endregion if (ALLOW_CACHE) LoadedDlls.Add(moduleDir, new List<Type>()); foreach (FileInfo fileInfo in dir.GetFiles("*.dll")) modules.AddRange(LoadModulesFromDLL<T>(moduleDir, fileInfo.FullName)); LoadedAssemblys.Clear(); if (ALLOW_CACHE) firstLoad.Add(moduleDir); } else { try { List<Type> loadedDllModules; LoadedDlls.TryGetValue(moduleDir, out loadedDllModules); foreach (Type pluginType in loadedDllModules) { try { if (pluginType.IsPublic) { if (!pluginType.IsAbstract) { if (pluginType.GetInterface(typeof(T).Name) != null) { modules.Add((T)Activator.CreateInstance(pluginType)); } } } } catch (Exception) { } } } catch (Exception) { } } } return modules; } public static void ClearCache() { LoadedDlls.Clear(); firstLoad.Clear(); } /// <summary> /// Load all T modules from dllname /// </summary> /// <typeparam name = "T"></typeparam> /// <param name = "dllName"></param> /// <returns></returns> private static List<T> LoadModulesFromDLL<T>(string moduleDir, string dllName) { List<T> modules = new List<T>(); if (dllBlackList.Contains(dllName)) return modules; Assembly pluginAssembly; if (!LoadedAssemblys.TryGetValue(dllName, out pluginAssembly)) { try { pluginAssembly = Assembly.Load(AssemblyName.GetAssemblyName(dllName)); LoadedAssemblys.Add(dllName, pluginAssembly); } catch (BadImageFormatException) { } catch { } } if (pluginAssembly != null) { try { List<Type> loadedTypes = new List<Type>(); foreach (Type pluginType in pluginAssembly.GetTypes().Where((p) => p.IsPublic && !p.IsAbstract)) { try { if (ALLOW_CACHE) { if (!firstLoad.Contains(moduleDir)) { //Only add on the first load if (!loadedTypes.Contains(pluginType)) loadedTypes.Add(pluginType); } } if (pluginType.GetInterface(typeof(T).Name, true) != null) { modules.Add((T)Activator.CreateInstance(pluginType)); } } catch (Exception ex) { MainConsole.Instance.Warn("[MODULELOADER]: Error loading module " + pluginType.Name + " in file " + dllName + " : " + ex); } } if (ALLOW_CACHE) LoadedDlls[moduleDir].AddRange(loadedTypes); } catch (Exception) { } } return modules; } #endregion /// <summary> /// Load all plugins from the given .dll file with the interface 'type' /// </summary> /// <typeparam name = "T"></typeparam> /// <param name = "dllName"></param> /// <param name = "type"></param> /// <returns></returns> public static T LoadPlugin<T>(string dllName) { string type = typeof (T).ToString(); try { Assembly pluginAssembly = Assembly.Load(AssemblyName.GetAssemblyName(dllName)); foreach (Type pluginType in pluginAssembly.GetTypes().Where(pluginType => pluginType.IsPublic)) { try { Type typeInterface = pluginType.GetInterface(type, true); if (typeInterface != null) { return (T) Activator.CreateInstance(pluginType); } } catch (Exception) { } } } catch (ReflectionTypeLoadException e) { foreach (Exception e2 in e.LoaderExceptions) { MainConsole.Instance.Error(e2.ToString()); } throw e; } return default(T); } /// <summary> /// Load all plugins from the given .dll file with the interface 'type' /// </summary> /// <typeparam name = "T"></typeparam> /// <param name = "dllName"></param> /// <param name = "type"></param> /// <returns></returns> public static List<T> LoadPlugins<T>(string dllName) { List<T> plugins = new List<T>(); string type = typeof (T).ToString(); try { Assembly pluginAssembly = Assembly.Load(AssemblyName.GetAssemblyName(dllName)); foreach (Type pluginType in pluginAssembly.GetTypes().Where(pluginType => pluginType.IsPublic)) { try { Type typeInterface = pluginType.GetInterface(type, true); if (typeInterface != null) { plugins.Add((T) Activator.CreateInstance(pluginType)); } } catch (Exception) { } } } catch (ReflectionTypeLoadException e) { foreach (Exception e2 in e.LoaderExceptions) { MainConsole.Instance.Error(e2.ToString()); } throw e; } return plugins; } /// <summary> /// Load a plugin from a dll with the given class or interface /// </summary> /// <param name = "dllName"></param> /// <param name = "args">The arguments which control which constructor is invoked on the plugin</param> /// <returns></returns> public static T LoadPlugin<T>(string dllName, Object[] args) where T : class { string[] parts = dllName.Split(new[] {':'}); dllName = parts[0]; string className = String.Empty; if (parts.Length > 1) className = parts[1]; return LoadPlugin<T>(dllName, className, args); } /// <summary> /// Load a plugin from a dll with the given class or interface /// </summary> /// <param name = "dllName"></param> /// <param name = "className"></param> /// <param name = "args">The arguments which control which constructor is invoked on the plugin</param> /// <returns></returns> public static T LoadPlugin<T>(string dllName, string className, Object[] args) where T : class { string interfaceName = typeof (T).ToString(); try { Assembly pluginAssembly = Assembly.Load(AssemblyName.GetAssemblyName(dllName)); foreach (Type pluginType in pluginAssembly.GetTypes().Where((p) => p.IsPublic && !(className != String.Empty && p.ToString() != p.Namespace + "." + className))) { Type typeInterface = pluginType.GetInterface(interfaceName, true); if (typeInterface != null) { T plug = null; try { plug = (T)Activator.CreateInstance(pluginType, args); } catch (Exception e) { if (!(e is MissingMethodException)) MainConsole.Instance.ErrorFormat("Error loading plugin from {0}, exception {1}", dllName, e.InnerException); return null; } return plug; } } return null; } catch (Exception e) { MainConsole.Instance.Error(string.Format("Error loading plugin from {0}", dllName), e); return null; } } } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * [email protected]. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ using System.Diagnostics; namespace System.Management.Automation.Interpreter { internal abstract class MulInstruction : Instruction { private static Instruction s_int16, s_int32, s_int64, s_UInt16, s_UInt32, s_UInt64, s_single, s_double; public override int ConsumedStack { get { return 2; } } public override int ProducedStack { get { return 1; } } private MulInstruction() { } internal sealed class MulInt32 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = ScriptingRuntimeHelpers.Int32ToObject(unchecked((Int32)l * (Int32)r)); frame.StackIndex--; return +1; } } internal sealed class MulInt16 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Int16)unchecked((Int16)l * (Int16)r); frame.StackIndex--; return +1; } } internal sealed class MulInt64 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Int64)unchecked((Int64)l * (Int64)r); frame.StackIndex--; return +1; } } internal sealed class MulUInt16 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt16)unchecked((UInt16)l * (UInt16)r); frame.StackIndex--; return +1; } } internal sealed class MulUInt32 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt32)unchecked((UInt32)l * (UInt32)r); frame.StackIndex--; return +1; } } internal sealed class MulUInt64 : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt64)unchecked((Int16)l * (Int16)r); frame.StackIndex--; return +1; } } internal sealed class MulSingle : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Single)((Single)l * (Single)r); frame.StackIndex--; return +1; } } internal sealed class MulDouble : MulInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (double)l * (double)r; frame.StackIndex--; return +1; } } public static Instruction Create(Type type) { Debug.Assert(!type.IsEnum); switch (type.GetTypeCode()) { case TypeCode.Int16: return s_int16 ??= new MulInt16(); case TypeCode.Int32: return s_int32 ??= new MulInt32(); case TypeCode.Int64: return s_int64 ??= new MulInt64(); case TypeCode.UInt16: return s_UInt16 ??= new MulUInt16(); case TypeCode.UInt32: return s_UInt32 ??= new MulUInt32(); case TypeCode.UInt64: return s_UInt64 ??= new MulUInt64(); case TypeCode.Single: return s_single ??= new MulSingle(); case TypeCode.Double: return s_double ??= new MulDouble(); default: throw Assert.Unreachable; } } public override string ToString() { return "Mul()"; } } internal abstract class MulOvfInstruction : Instruction { private static Instruction s_int16, s_int32, s_int64, s_UInt16, s_UInt32, s_UInt64, s_single, s_double; public override int ConsumedStack { get { return 2; } } public override int ProducedStack { get { return 1; } } private MulOvfInstruction() { } internal sealed class MulOvfInt32 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = ScriptingRuntimeHelpers.Int32ToObject(checked((Int32)l * (Int32)r)); frame.StackIndex--; return +1; } } internal sealed class MulOvfInt16 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Int16)checked((Int16)l * (Int16)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfInt64 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Int64)checked((Int64)l * (Int64)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfUInt16 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt16)checked((UInt16)l * (UInt16)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfUInt32 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt32)checked((UInt32)l * (UInt32)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfUInt64 : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (UInt64)checked((Int16)l * (Int16)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfSingle : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (Single)((Single)l * (Single)r); frame.StackIndex--; return +1; } } internal sealed class MulOvfDouble : MulOvfInstruction { public override int Run(InterpretedFrame frame) { object l = frame.Data[frame.StackIndex - 2]; object r = frame.Data[frame.StackIndex - 1]; frame.Data[frame.StackIndex - 2] = (double)l * (double)r; frame.StackIndex--; return +1; } } public static Instruction Create(Type type) { Debug.Assert(!type.IsEnum); switch (type.GetTypeCode()) { case TypeCode.Int16: return s_int16 ??= new MulOvfInt16(); case TypeCode.Int32: return s_int32 ??= new MulOvfInt32(); case TypeCode.Int64: return s_int64 ??= new MulOvfInt64(); case TypeCode.UInt16: return s_UInt16 ??= new MulOvfUInt16(); case TypeCode.UInt32: return s_UInt32 ??= new MulOvfUInt32(); case TypeCode.UInt64: return s_UInt64 ??= new MulOvfUInt64(); case TypeCode.Single: return s_single ??= new MulOvfSingle(); case TypeCode.Double: return s_double ??= new MulOvfDouble(); default: throw Assert.Unreachable; } } public override string ToString() { return "MulOvf()"; } } }
//--------------------------------------------------------------------------- // // <copyright file="SpecularMaterial.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // This file was generated, please do not edit it directly. // // Please see http://wiki/default.aspx/Microsoft.Projects.Avalon/MilCodeGen.html for more information. // //--------------------------------------------------------------------------- using MS.Internal; using MS.Internal.Collections; using MS.Internal.PresentationCore; using MS.Utility; using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.Design.Serialization; using System.Diagnostics; using System.Globalization; using System.Reflection; using System.Runtime.InteropServices; using System.Text; using System.Windows.Markup; using System.Windows.Media.Media3D.Converters; using System.Windows.Media; using System.Windows.Media.Animation; using System.Windows.Media.Composition; using System.Security; using System.Security.Permissions; using SR=MS.Internal.PresentationCore.SR; using SRID=MS.Internal.PresentationCore.SRID; using System.Windows.Media.Imaging; // These types are aliased to match the unamanaged names used in interop using BOOL = System.UInt32; using WORD = System.UInt16; using Float = System.Single; namespace System.Windows.Media.Media3D { sealed partial class SpecularMaterial : Material { //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// Shadows inherited Clone() with a strongly typed /// version for convenience. /// </summary> public new SpecularMaterial Clone() { return (SpecularMaterial)base.Clone(); } /// <summary> /// Shadows inherited CloneCurrentValue() with a strongly typed /// version for convenience. /// </summary> public new SpecularMaterial CloneCurrentValue() { return (SpecularMaterial)base.CloneCurrentValue(); } #endregion Public Methods //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ private static void ColorPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { SpecularMaterial target = ((SpecularMaterial) d); target.PropertyChanged(ColorProperty); } private static void BrushPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { // The first change to the default value of a mutable collection property (e.g. GeometryGroup.Children) // will promote the property value from a default value to a local value. This is technically a sub-property // change because the collection was changed and not a new collection set (GeometryGroup.Children. // Add versus GeometryGroup.Children = myNewChildrenCollection). However, we never marshalled // the default value to the compositor. If the property changes from a default value, the new local value // needs to be marshalled to the compositor. We detect this scenario with the second condition // e.OldValueSource != e.NewValueSource. Specifically in this scenario the OldValueSource will be // Default and the NewValueSource will be Local. if (e.IsASubPropertyChange && (e.OldValueSource == e.NewValueSource)) { return; } SpecularMaterial target = ((SpecularMaterial) d); Brush oldV = (Brush) e.OldValue; Brush newV = (Brush) e.NewValue; System.Windows.Threading.Dispatcher dispatcher = target.Dispatcher; if (dispatcher != null) { DUCE.IResource targetResource = (DUCE.IResource)target; using (CompositionEngineLock.Acquire()) { int channelCount = targetResource.GetChannelCount(); for (int channelIndex = 0; channelIndex < channelCount; channelIndex++) { DUCE.Channel channel = targetResource.GetChannel(channelIndex); Debug.Assert(!channel.IsOutOfBandChannel); Debug.Assert(!targetResource.GetHandle(channel).IsNull); target.ReleaseResource(oldV,channel); target.AddRefResource(newV,channel); } } } target.PropertyChanged(BrushProperty); } private static void SpecularPowerPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { SpecularMaterial target = ((SpecularMaterial) d); target.PropertyChanged(SpecularPowerProperty); } #region Public Properties /// <summary> /// Color - Color. Default value is Colors.White. /// </summary> public Color Color { get { return (Color) GetValue(ColorProperty); } set { SetValueInternal(ColorProperty, value); } } /// <summary> /// Brush - Brush. Default value is null. /// </summary> public Brush Brush { get { return (Brush) GetValue(BrushProperty); } set { SetValueInternal(BrushProperty, value); } } /// <summary> /// SpecularPower - double. Default value is 40.0. /// </summary> public double SpecularPower { get { return (double) GetValue(SpecularPowerProperty); } set { SetValueInternal(SpecularPowerProperty, value); } } #endregion Public Properties //------------------------------------------------------ // // Protected Methods // //------------------------------------------------------ #region Protected Methods /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CreateInstanceCore">Freezable.CreateInstanceCore</see>. /// </summary> /// <returns>The new Freezable.</returns> protected override Freezable CreateInstanceCore() { return new SpecularMaterial(); } #endregion ProtectedMethods //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods /// <SecurityNote> /// Critical: This code calls into an unsafe code block /// TreatAsSafe: This code does not return any critical data.It is ok to expose /// Channels are safe to call into and do not go cross domain and cross process /// </SecurityNote> [SecurityCritical,SecurityTreatAsSafe] internal override void UpdateResource(DUCE.Channel channel, bool skipOnChannelCheck) { // If we're told we can skip the channel check, then we must be on channel Debug.Assert(!skipOnChannelCheck || _duceResource.IsOnChannel(channel)); if (skipOnChannelCheck || _duceResource.IsOnChannel(channel)) { base.UpdateResource(channel, skipOnChannelCheck); // Read values of properties into local variables Brush vBrush = Brush; // Obtain handles for properties that implement DUCE.IResource DUCE.ResourceHandle hBrush = vBrush != null ? ((DUCE.IResource)vBrush).GetHandle(channel) : DUCE.ResourceHandle.Null; // Pack & send command packet DUCE.MILCMD_SPECULARMATERIAL data; unsafe { data.Type = MILCMD.MilCmdSpecularMaterial; data.Handle = _duceResource.GetHandle(channel); data.color = CompositionResourceManager.ColorToMilColorF(Color); data.hbrush = hBrush; data.specularPower = SpecularPower; // Send packed command structure channel.SendCommand( (byte*)&data, sizeof(DUCE.MILCMD_SPECULARMATERIAL)); } } } internal override DUCE.ResourceHandle AddRefOnChannelCore(DUCE.Channel channel) { if (_duceResource.CreateOrAddRefOnChannel(this, channel, System.Windows.Media.Composition.DUCE.ResourceType.TYPE_SPECULARMATERIAL)) { Brush vBrush = Brush; if (vBrush != null) ((DUCE.IResource)vBrush).AddRefOnChannel(channel); AddRefOnChannelAnimations(channel); UpdateResource(channel, true /* skip "on channel" check - we already know that we're on channel */ ); } return _duceResource.GetHandle(channel); } internal override void ReleaseOnChannelCore(DUCE.Channel channel) { Debug.Assert(_duceResource.IsOnChannel(channel)); if (_duceResource.ReleaseOnChannel(channel)) { Brush vBrush = Brush; if (vBrush != null) ((DUCE.IResource)vBrush).ReleaseOnChannel(channel); ReleaseOnChannelAnimations(channel); } } internal override DUCE.ResourceHandle GetHandleCore(DUCE.Channel channel) { // Note that we are in a lock here already. return _duceResource.GetHandle(channel); } internal override int GetChannelCountCore() { // must already be in composition lock here return _duceResource.GetChannelCount(); } internal override DUCE.Channel GetChannelCore(int index) { // Note that we are in a lock here already. return _duceResource.GetChannel(index); } #endregion Internal Methods //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ #region Internal Properties // // This property finds the correct initial size for the _effectiveValues store on the // current DependencyObject as a performance optimization // // This includes: // Brush // internal override int EffectiveValuesInitialSize { get { return 1; } } #endregion Internal Properties //------------------------------------------------------ // // Dependency Properties // //------------------------------------------------------ #region Dependency Properties /// <summary> /// The DependencyProperty for the SpecularMaterial.Color property. /// </summary> public static readonly DependencyProperty ColorProperty; /// <summary> /// The DependencyProperty for the SpecularMaterial.Brush property. /// </summary> public static readonly DependencyProperty BrushProperty; /// <summary> /// The DependencyProperty for the SpecularMaterial.SpecularPower property. /// </summary> public static readonly DependencyProperty SpecularPowerProperty; #endregion Dependency Properties //------------------------------------------------------ // // Internal Fields // //------------------------------------------------------ #region Internal Fields internal System.Windows.Media.Composition.DUCE.MultiChannelResource _duceResource = new System.Windows.Media.Composition.DUCE.MultiChannelResource(); internal static Color s_Color = Colors.White; internal static Brush s_Brush = null; internal const double c_SpecularPower = 40.0; #endregion Internal Fields #region Constructors //------------------------------------------------------ // // Constructors // //------------------------------------------------------ static SpecularMaterial() { // We check our static default fields which are of type Freezable // to make sure that they are not mutable, otherwise we will throw // if these get touched by more than one thread in the lifetime // of your app. (Windows OS Bug #947272) // Debug.Assert(s_Brush == null || s_Brush.IsFrozen, "Detected context bound default value SpecularMaterial.s_Brush (See OS Bug #947272)."); // Initializations Type typeofThis = typeof(SpecularMaterial); ColorProperty = RegisterProperty("Color", typeof(Color), typeofThis, Colors.White, new PropertyChangedCallback(ColorPropertyChanged), null, /* isIndependentlyAnimated = */ false, /* coerceValueCallback */ null); BrushProperty = RegisterProperty("Brush", typeof(Brush), typeofThis, null, new PropertyChangedCallback(BrushPropertyChanged), null, /* isIndependentlyAnimated = */ false, /* coerceValueCallback */ null); SpecularPowerProperty = RegisterProperty("SpecularPower", typeof(double), typeofThis, 40.0, new PropertyChangedCallback(SpecularPowerPropertyChanged), null, /* isIndependentlyAnimated = */ false, /* coerceValueCallback */ null); } #endregion Constructors } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.WindowsAzure.Management.Compute.Models; namespace Microsoft.WindowsAzure.Management.Compute.Models { /// <summary> /// The detailed Get Hosted Service operation response. /// </summary> public partial class HostedServiceGetDetailedResponse : HostedServiceGetResponse { private IList<HostedServiceGetDetailedResponse.Deployment> _deployments; /// <summary> /// Optional. The deployments that exist in the cloud service. /// </summary> public IList<HostedServiceGetDetailedResponse.Deployment> Deployments { get { return this._deployments; } set { this._deployments = value; } } /// <summary> /// Initializes a new instance of the HostedServiceGetDetailedResponse /// class. /// </summary> public HostedServiceGetDetailedResponse() { this.Deployments = new LazyList<HostedServiceGetDetailedResponse.Deployment>(); } /// <summary> /// A deployment that exists in the cloud service. /// </summary> public partial class Deployment { private string _configuration; /// <summary> /// Optional. The configuration file of the deployment. /// </summary> public string Configuration { get { return this._configuration; } set { this._configuration = value; } } private DateTime _createdTime; /// <summary> /// Optional. The time that the deployment was created. /// </summary> public DateTime CreatedTime { get { return this._createdTime; } set { this._createdTime = value; } } private DeploymentSlot _deploymentSlot; /// <summary> /// Optional. The deployment environment in which this deployment /// is running. /// </summary> public DeploymentSlot DeploymentSlot { get { return this._deploymentSlot; } set { this._deploymentSlot = value; } } private DnsSettings _dnsSettings; /// <summary> /// Optional. The custom DNS settings that are specified for the /// deployment. /// </summary> public DnsSettings DnsSettings { get { return this._dnsSettings; } set { this._dnsSettings = value; } } private IDictionary<string, string> _extendedProperties; /// <summary> /// Optional. Represents the name of an extended cloud service /// property. Each extended property must have a defined name and /// a value. You can have a maximum of 50 extended property name /// and value pairs. The maximum length of the name element is 64 /// characters, only alphanumeric characters and underscores are /// valid in the name, and it must start with a letter. Attempting /// to use other characters, starting with a non-letter character, /// or entering a name that is identical to that of another /// extended property owned by the same service will result in a /// status code 400 (Bad Request) error. Each extended property /// value has a maximum length of 255 characters. /// </summary> public IDictionary<string, string> ExtendedProperties { get { return this._extendedProperties; } set { this._extendedProperties = value; } } private string _label; /// <summary> /// Optional. The user-supplied name of the deployment. This name /// can be used identify the deployment for your tracking purposes. /// </summary> public string Label { get { return this._label; } set { this._label = value; } } private string _lastModifiedTime; /// <summary> /// Optional. The last time that the deployment was modified. /// </summary> public string LastModifiedTime { get { return this._lastModifiedTime; } set { this._lastModifiedTime = value; } } private bool _locked; /// <summary> /// Optional. Indicates whether the deployment is locked for new /// write operations because an existing operation is updating the /// deployment. /// </summary> public bool Locked { get { return this._locked; } set { this._locked = value; } } private string _name; /// <summary> /// Optional. The name of the deployment. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private PersistentVMDowntime _persistentVMDowntime; /// <summary> /// Optional. Specifies information about when the virtual machine /// has been started and stopped. /// </summary> public PersistentVMDowntime PersistentVMDowntime { get { return this._persistentVMDowntime; } set { this._persistentVMDowntime = value; } } private string _privateId; /// <summary> /// Optional. The unique identifier for this deployment. /// </summary> public string PrivateId { get { return this._privateId; } set { this._privateId = value; } } private IList<RoleInstance> _roleInstances; /// <summary> /// Optional. The list of role instances in the deployment. /// </summary> public IList<RoleInstance> RoleInstances { get { return this._roleInstances; } set { this._roleInstances = value; } } private IList<Role> _roles; /// <summary> /// Optional. The list of roles in the deployment. /// </summary> public IList<Role> Roles { get { return this._roles; } set { this._roles = value; } } private bool _rollbackAllowed; /// <summary> /// Optional. Indicates whether the Rollback Update Or Upgrade /// operation is allowed at this time. /// </summary> public bool RollbackAllowed { get { return this._rollbackAllowed; } set { this._rollbackAllowed = value; } } private string _sdkVersion; /// <summary> /// Optional. The version of the Azure SDK that was used to /// generate the .cspkg that created this deployment. The first /// two numerical components of the returned version represent the /// version of the SDK used to create the package. /// </summary> public string SdkVersion { get { return this._sdkVersion; } set { this._sdkVersion = value; } } private DeploymentStatus _status; /// <summary> /// Optional. The status of the deployment. /// </summary> public DeploymentStatus Status { get { return this._status; } set { this._status = value; } } private int _upgradeDomainCount; /// <summary> /// Optional. The number of upgrade domains available to this cloud /// service. /// </summary> public int UpgradeDomainCount { get { return this._upgradeDomainCount; } set { this._upgradeDomainCount = value; } } private UpgradeStatus _upgradeStatus; /// <summary> /// Optional. Specifies information about an update occurring on /// the deployment. /// </summary> public UpgradeStatus UpgradeStatus { get { return this._upgradeStatus; } set { this._upgradeStatus = value; } } private Uri _uri; /// <summary> /// Optional. The URL used to access the hosted service. For /// example, if the service name is MyService you could access the /// access the service by calling: http://MyService.cloudapp.net. /// </summary> public Uri Uri { get { return this._uri; } set { this._uri = value; } } private IList<VirtualIPAddress> _virtualIPAddresses; /// <summary> /// Optional. The virtual IP addresses that are specified for the /// deployment. /// </summary> public IList<VirtualIPAddress> VirtualIPAddresses { get { return this._virtualIPAddresses; } set { this._virtualIPAddresses = value; } } private string _virtualNetworkName; /// <summary> /// Optional. The name of the Virtual Network that the virtual /// machine connects to. /// </summary> public string VirtualNetworkName { get { return this._virtualNetworkName; } set { this._virtualNetworkName = value; } } /// <summary> /// Initializes a new instance of the Deployment class. /// </summary> public Deployment() { this.ExtendedProperties = new LazyDictionary<string, string>(); this.RoleInstances = new LazyList<RoleInstance>(); this.Roles = new LazyList<Role>(); this.VirtualIPAddresses = new LazyList<VirtualIPAddress>(); } } } }