mirror of
https://github.com/nsnail/Ocelot.git
synced 2025-04-22 09:52:50 +08:00
Merge pull request #35 from TomPallister/develop
Merge load balancing reroutes
This commit is contained in:
commit
3e2c410626
@ -1,9 +1,9 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = crlf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.cs]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = crlf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.cs]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
506
.gitignore
vendored
506
.gitignore
vendored
@ -1,254 +1,254 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
build/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
results/
|
||||
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
.vscode/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
site/wwwroot/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# DNX
|
||||
project.lock.json
|
||||
artifacts/
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# TODO: Comment the next line if you want to checkin your web deploy settings
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/packages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/packages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/packages/repositories.config
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Microsoft Azure ApplicationInsights config file
|
||||
ApplicationInsights.config
|
||||
|
||||
# Windows Store app package directory
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.pfx
|
||||
!idsrv3test.pfx
|
||||
*.publishsettings
|
||||
node_modules/
|
||||
orleans.codegen.cs
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
!tools/packages.config
|
||||
tools/
|
||||
|
||||
# MacOS
|
||||
.DS_Store
|
||||
|
||||
# Ocelot acceptance test config
|
||||
test/Ocelot.AcceptanceTests/configuration.json
|
||||
|
||||
# Read the docstates
|
||||
_build/
|
||||
_static/
|
||||
_templates/
|
||||
|
||||
# JetBrains Rider
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
build/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
results/
|
||||
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
.vscode/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
site/wwwroot/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# DNX
|
||||
project.lock.json
|
||||
artifacts/
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# TODO: Comment the next line if you want to checkin your web deploy settings
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/packages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/packages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/packages/repositories.config
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Microsoft Azure ApplicationInsights config file
|
||||
ApplicationInsights.config
|
||||
|
||||
# Windows Store app package directory
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.pfx
|
||||
!idsrv3test.pfx
|
||||
*.publishsettings
|
||||
node_modules/
|
||||
orleans.codegen.cs
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
!tools/packages.config
|
||||
tools/
|
||||
|
||||
# MacOS
|
||||
.DS_Store
|
||||
|
||||
# Ocelot acceptance test config
|
||||
test/Ocelot.AcceptanceTests/configuration.json
|
||||
|
||||
# Read the docstates
|
||||
_build/
|
||||
_static/
|
||||
_templates/
|
||||
|
||||
# JetBrains Rider
|
||||
.idea/
|
@ -1,4 +1,4 @@
|
||||
mode: ContinuousDelivery
|
||||
branches: {}
|
||||
ignore:
|
||||
sha: []
|
||||
mode: ContinuousDelivery
|
||||
branches: {}
|
||||
ignore:
|
||||
sha: []
|
||||
|
14
LICENSE.md
14
LICENSE.md
@ -1,8 +1,8 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Tom Pallister
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2016 Tom Pallister
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
174
Ocelot.sln
174
Ocelot.sln
@ -1,87 +1,87 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 15
|
||||
VisualStudioVersion = 15.0.26730.15
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{5CFB79B7-C9DC-45A4-9A75-625D92471702}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{3FA7C349-DBE8-4904-A2CE-015B8869CE6C}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
.gitignore = .gitignore
|
||||
build-and-release-unstable.ps1 = build-and-release-unstable.ps1
|
||||
build-and-run-tests.ps1 = build-and-run-tests.ps1
|
||||
build.cake = build.cake
|
||||
build.ps1 = build.ps1
|
||||
GitVersion.yml = GitVersion.yml
|
||||
global.json = global.json
|
||||
LICENSE.md = LICENSE.md
|
||||
ocelot.postman_collection.json = ocelot.postman_collection.json
|
||||
README.md = README.md
|
||||
release.ps1 = release.ps1
|
||||
ReleaseNotes.md = ReleaseNotes.md
|
||||
run-acceptance-tests.ps1 = run-acceptance-tests.ps1
|
||||
run-benchmarks.ps1 = run-benchmarks.ps1
|
||||
run-unit-tests.ps1 = run-unit-tests.ps1
|
||||
version.ps1 = version.ps1
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{5B401523-36DA-4491-B73A-7590A26E420B}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot", "src\Ocelot\Ocelot.csproj", "{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.UnitTests", "test\Ocelot.UnitTests\Ocelot.UnitTests.csproj", "{54E84F1A-E525-4443-96EC-039CBD50C263}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.AcceptanceTests", "test\Ocelot.AcceptanceTests\Ocelot.AcceptanceTests.csproj", "{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.ManualTest", "test\Ocelot.ManualTest\Ocelot.ManualTest.csproj", "{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.Benchmarks", "test\Ocelot.Benchmarks\Ocelot.Benchmarks.csproj", "{106B49E6-95F6-4A7B-B81C-96BFA74AF035}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.IntegrationTests", "test\Ocelot.IntegrationTests\Ocelot.IntegrationTests.csproj", "{D4575572-99CA-4530-8737-C296EDA326F8}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Release|Any CPU = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB} = {5CFB79B7-C9DC-45A4-9A75-625D92471702}
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {21476EFF-778A-4F97-8A56-D1AF1CEC0C48}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 15
|
||||
VisualStudioVersion = 15.0.26730.15
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{5CFB79B7-C9DC-45A4-9A75-625D92471702}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{3FA7C349-DBE8-4904-A2CE-015B8869CE6C}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
.gitignore = .gitignore
|
||||
build-and-release-unstable.ps1 = build-and-release-unstable.ps1
|
||||
build-and-run-tests.ps1 = build-and-run-tests.ps1
|
||||
build.cake = build.cake
|
||||
build.ps1 = build.ps1
|
||||
GitVersion.yml = GitVersion.yml
|
||||
global.json = global.json
|
||||
LICENSE.md = LICENSE.md
|
||||
ocelot.postman_collection.json = ocelot.postman_collection.json
|
||||
README.md = README.md
|
||||
release.ps1 = release.ps1
|
||||
ReleaseNotes.md = ReleaseNotes.md
|
||||
run-acceptance-tests.ps1 = run-acceptance-tests.ps1
|
||||
run-benchmarks.ps1 = run-benchmarks.ps1
|
||||
run-unit-tests.ps1 = run-unit-tests.ps1
|
||||
version.ps1 = version.ps1
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{5B401523-36DA-4491-B73A-7590A26E420B}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot", "src\Ocelot\Ocelot.csproj", "{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.UnitTests", "test\Ocelot.UnitTests\Ocelot.UnitTests.csproj", "{54E84F1A-E525-4443-96EC-039CBD50C263}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.AcceptanceTests", "test\Ocelot.AcceptanceTests\Ocelot.AcceptanceTests.csproj", "{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.ManualTest", "test\Ocelot.ManualTest\Ocelot.ManualTest.csproj", "{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.Benchmarks", "test\Ocelot.Benchmarks\Ocelot.Benchmarks.csproj", "{106B49E6-95F6-4A7B-B81C-96BFA74AF035}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ocelot.IntegrationTests", "test\Ocelot.IntegrationTests\Ocelot.IntegrationTests.csproj", "{D4575572-99CA-4530-8737-C296EDA326F8}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Release|Any CPU = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{D6DF4206-0DBA-41D8-884D-C3E08290FDBB} = {5CFB79B7-C9DC-45A4-9A75-625D92471702}
|
||||
{54E84F1A-E525-4443-96EC-039CBD50C263} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{F8C224FE-36BE-45F5-9B0E-666D8F4A9B52} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{02BBF4C5-517E-4157-8D21-4B8B9E118B7A} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{106B49E6-95F6-4A7B-B81C-96BFA74AF035} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
{D4575572-99CA-4530-8737-C296EDA326F8} = {5B401523-36DA-4491-B73A-7590A26E420B}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {21476EFF-778A-4F97-8A56-D1AF1CEC0C48}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
130
README.md
130
README.md
@ -1,65 +1,65 @@
|
||||
# Ocelot
|
||||
|
||||
[](https://ci.appveyor.com/project/TomPallister/ocelot-fcfpb)
|
||||
|
||||
[](https://coveralls.io/github/TomPallister/Ocelot?branch=develop)
|
||||
|
||||
Ocelot is a .NET Api Gateway. This project is aimed at people using .NET running
|
||||
a micro services / service orientated architecture
|
||||
that need a unified point of entry into their system.
|
||||
|
||||
In particular I want easy integration with
|
||||
IdentityServer reference and bearer tokens.
|
||||
|
||||
We have been unable to find this in my current workplace
|
||||
without having to write our own Javascript middlewares
|
||||
to handle the IdentityServer reference tokens. We would
|
||||
rather use the IdentityServer code that already exists
|
||||
to do this.
|
||||
|
||||
Ocelot is a bunch of middlewares in a specific order.
|
||||
|
||||
Ocelot manipulates the HttpRequest object into a state specified by its configuration until
|
||||
it reaches a request builder middleware where it creates a HttpRequestMessage object which is
|
||||
used to make a request to a downstream service. The middleware that makes the request is
|
||||
the last thing in the Ocelot pipeline. It does not call the next middleware.
|
||||
The response from the downstream service is stored in a per request scoped repository
|
||||
and retrieved as the requests goes back up the Ocelot pipeline. There is a piece of middleware
|
||||
that maps the HttpResponseMessage onto the HttpResponse object and that is returned to the client.
|
||||
That is basically it with a bunch of other features.
|
||||
|
||||
## How to install
|
||||
|
||||
Ocelot is designed to work with ASP.NET core only and is currently
|
||||
built to netcoreapp2.0 [this](https://docs.microsoft.com/en-us/dotnet/articles/standard/library) documentation may prove helpful when working out if Ocelot would be suitable for you.
|
||||
|
||||
Install Ocelot and it's dependencies using NuGet.
|
||||
|
||||
`Install-Package Ocelot`
|
||||
|
||||
All versions can be found [here](https://www.nuget.org/packages/Ocelot/)
|
||||
|
||||
## Documentation
|
||||
|
||||
Please click [here](http://ocelot.readthedocs.io/en/latest/) for the Ocleot documentation. This includes lots of information and will be helpful if you want to understand the features Ocelot currently offers.
|
||||
|
||||
## Coming up
|
||||
|
||||
You can see what we are working on [here](https://github.com/TomPallister/Ocelot/projects/1)
|
||||
|
||||
## Contributing
|
||||
|
||||
Pull requests, issues and commentary welcome! No special process just create a request and get in
|
||||
touch either via gitter or create an issue.
|
||||
|
||||
|
||||
## Things that are currently annoying me
|
||||
|
||||
+ The base OcelotMiddleware lets you access things that are going to be null
|
||||
and doesnt check the response is OK. I think the fact you can even call stuff
|
||||
that isnt available is annoying. Let alone it be null.
|
||||
|
||||
[ Get more details at **codescene.io**.](https://codescene.io/projects/697/jobs/latest-successful/results)
|
||||
|
||||
|
||||
|
||||
# Ocelot
|
||||
|
||||
[](https://ci.appveyor.com/project/TomPallister/ocelot-fcfpb)
|
||||
|
||||
[](https://coveralls.io/github/TomPallister/Ocelot?branch=develop)
|
||||
|
||||
Ocelot is a .NET Api Gateway. This project is aimed at people using .NET running
|
||||
a micro services / service orientated architecture
|
||||
that need a unified point of entry into their system.
|
||||
|
||||
In particular I want easy integration with
|
||||
IdentityServer reference and bearer tokens.
|
||||
|
||||
We have been unable to find this in my current workplace
|
||||
without having to write our own Javascript middlewares
|
||||
to handle the IdentityServer reference tokens. We would
|
||||
rather use the IdentityServer code that already exists
|
||||
to do this.
|
||||
|
||||
Ocelot is a bunch of middlewares in a specific order.
|
||||
|
||||
Ocelot manipulates the HttpRequest object into a state specified by its configuration until
|
||||
it reaches a request builder middleware where it creates a HttpRequestMessage object which is
|
||||
used to make a request to a downstream service. The middleware that makes the request is
|
||||
the last thing in the Ocelot pipeline. It does not call the next middleware.
|
||||
The response from the downstream service is stored in a per request scoped repository
|
||||
and retrieved as the requests goes back up the Ocelot pipeline. There is a piece of middleware
|
||||
that maps the HttpResponseMessage onto the HttpResponse object and that is returned to the client.
|
||||
That is basically it with a bunch of other features.
|
||||
|
||||
## How to install
|
||||
|
||||
Ocelot is designed to work with ASP.NET core only and is currently
|
||||
built to netcoreapp2.0 [this](https://docs.microsoft.com/en-us/dotnet/articles/standard/library) documentation may prove helpful when working out if Ocelot would be suitable for you.
|
||||
|
||||
Install Ocelot and it's dependencies using NuGet.
|
||||
|
||||
`Install-Package Ocelot`
|
||||
|
||||
All versions can be found [here](https://www.nuget.org/packages/Ocelot/)
|
||||
|
||||
## Documentation
|
||||
|
||||
Please click [here](http://ocelot.readthedocs.io/en/latest/) for the Ocleot documentation. This includes lots of information and will be helpful if you want to understand the features Ocelot currently offers.
|
||||
|
||||
## Coming up
|
||||
|
||||
You can see what we are working on [here](https://github.com/TomPallister/Ocelot/projects/1)
|
||||
|
||||
## Contributing
|
||||
|
||||
Pull requests, issues and commentary welcome! No special process just create a request and get in
|
||||
touch either via gitter or create an issue.
|
||||
|
||||
|
||||
## Things that are currently annoying me
|
||||
|
||||
+ The base OcelotMiddleware lets you access things that are going to be null
|
||||
and doesnt check the response is OK. I think the fact you can even call stuff
|
||||
that isnt available is annoying. Let alone it be null.
|
||||
|
||||
[ Get more details at **codescene.io**.](https://codescene.io/projects/697/jobs/latest-successful/results)
|
||||
|
||||
|
||||
|
||||
|
@ -1,2 +1,2 @@
|
||||
./build.ps1 -target BuildAndReleaseUnstable
|
||||
./build.ps1 -target BuildAndReleaseUnstable
|
||||
exit $LASTEXITCODE
|
@ -1,2 +1,2 @@
|
||||
./build.ps1 -target RunTests
|
||||
./build.ps1 -target RunTests
|
||||
exit $LASTEXITCODE
|
952
build.cake
952
build.cake
@ -1,476 +1,476 @@
|
||||
#tool "nuget:?package=GitVersion.CommandLine"
|
||||
#tool "nuget:?package=GitReleaseNotes"
|
||||
#addin nuget:?package=Cake.Json
|
||||
#addin nuget:?package=Newtonsoft.Json&version=9.0.1
|
||||
#tool "nuget:?package=OpenCover"
|
||||
#tool "nuget:?package=ReportGenerator"
|
||||
#tool coveralls.net
|
||||
#addin Cake.Coveralls
|
||||
|
||||
// compile
|
||||
var compileConfig = Argument("configuration", "Release");
|
||||
var slnFile = "./Ocelot.sln";
|
||||
|
||||
// build artifacts
|
||||
var artifactsDir = Directory("artifacts");
|
||||
|
||||
// unit testing
|
||||
var artifactsForUnitTestsDir = artifactsDir + Directory("UnitTests");
|
||||
var unitTestAssemblies = @"./test/Ocelot.UnitTests/Ocelot.UnitTests.csproj";
|
||||
var minCodeCoverage = 76.4d;
|
||||
var coverallsRepoToken = "coveralls-repo-token-ocelot";
|
||||
var coverallsRepo = "https://coveralls.io/github/TomPallister/Ocelot";
|
||||
|
||||
// acceptance testing
|
||||
var artifactsForAcceptanceTestsDir = artifactsDir + Directory("AcceptanceTests");
|
||||
var acceptanceTestAssemblies = @"./test/Ocelot.AcceptanceTests/Ocelot.AcceptanceTests.csproj";
|
||||
|
||||
// integration testing
|
||||
var artifactsForIntegrationTestsDir = artifactsDir + Directory("IntegrationTests");
|
||||
var integrationTestAssemblies = @"./test/Ocelot.IntegrationTests/Ocelot.IntegrationTests.csproj";
|
||||
|
||||
// benchmark testing
|
||||
var artifactsForBenchmarkTestsDir = artifactsDir + Directory("BenchmarkTests");
|
||||
var benchmarkTestAssemblies = @"./test/Ocelot.Benchmarks";
|
||||
|
||||
// packaging
|
||||
var packagesDir = artifactsDir + Directory("Packages");
|
||||
var releaseNotesFile = packagesDir + File("releasenotes.md");
|
||||
var artifactsFile = packagesDir + File("artifacts.txt");
|
||||
|
||||
// unstable releases
|
||||
var nugetFeedUnstableKey = EnvironmentVariable("nuget-apikey-unstable");
|
||||
var nugetFeedUnstableUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
var nugetFeedUnstableSymbolsUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
|
||||
// stable releases
|
||||
var tagsUrl = "https://api.github.com/repos/tompallister/ocelot/releases/tags/";
|
||||
var nugetFeedStableKey = EnvironmentVariable("nuget-apikey-stable");
|
||||
var nugetFeedStableUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
var nugetFeedStableSymbolsUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
|
||||
// internal build variables - don't change these.
|
||||
var releaseTag = "";
|
||||
string committedVersion = "0.0.0-dev";
|
||||
var buildVersion = committedVersion;
|
||||
GitVersion versioning = null;
|
||||
var nugetFeedUnstableBranchFilter = "^(develop)$|^(PullRequest/)";
|
||||
|
||||
var target = Argument("target", "Default");
|
||||
|
||||
|
||||
Information("target is " +target);
|
||||
Information("Build configuration is " + compileConfig);
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("RunTests")
|
||||
.IsDependentOn("CreatePackages");
|
||||
|
||||
Task("BuildAndReleaseUnstable")
|
||||
.IsDependentOn("Build")
|
||||
.IsDependentOn("ReleasePackagesToUnstableFeed");
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
if (DirectoryExists(artifactsDir))
|
||||
{
|
||||
DeleteDirectory(artifactsDir, recursive:true);
|
||||
}
|
||||
CreateDirectory(artifactsDir);
|
||||
});
|
||||
|
||||
Task("Version")
|
||||
.Does(() =>
|
||||
{
|
||||
versioning = GetNuGetVersionForCommit();
|
||||
var nugetVersion = versioning.NuGetVersion;
|
||||
Information("SemVer version number: " + nugetVersion);
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
Information("Persisting version number...");
|
||||
PersistVersion(committedVersion, nugetVersion);
|
||||
buildVersion = nugetVersion;
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("We are not running on build server, so we won't persist the version number.");
|
||||
}
|
||||
});
|
||||
|
||||
Task("Compile")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Version")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreBuildSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
};
|
||||
|
||||
DotNetCoreBuild(slnFile, settings);
|
||||
});
|
||||
|
||||
Task("RunUnitTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
if (IsRunningOnWindows())
|
||||
{
|
||||
var coverageSummaryFile = artifactsForUnitTestsDir + File("coverage.xml");
|
||||
|
||||
EnsureDirectoryExists(artifactsForUnitTestsDir);
|
||||
|
||||
OpenCover(tool =>
|
||||
{
|
||||
tool.DotNetCoreTest(unitTestAssemblies);
|
||||
},
|
||||
new FilePath(coverageSummaryFile),
|
||||
new OpenCoverSettings()
|
||||
{
|
||||
Register="user",
|
||||
ArgumentCustomization=args=>args.Append(@"-oldstyle -returntargetcode -excludebyattribute:*.ExcludeFromCoverage*")
|
||||
}
|
||||
.WithFilter("+[Ocelot*]*")
|
||||
.WithFilter("-[xunit*]*")
|
||||
.WithFilter("-[Ocelot*Tests]*")
|
||||
);
|
||||
|
||||
ReportGenerator(coverageSummaryFile, artifactsForUnitTestsDir);
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
var repoToken = EnvironmentVariable(coverallsRepoToken);
|
||||
if (string.IsNullOrEmpty(repoToken))
|
||||
{
|
||||
throw new Exception(string.Format("Coveralls repo token not found. Set environment variable '{0}'", coverallsRepoToken));
|
||||
}
|
||||
|
||||
Information(string.Format("Uploading test coverage to {0}", coverallsRepo));
|
||||
CoverallsNet(coverageSummaryFile, CoverallsNetReportType.OpenCover, new CoverallsNetSettings()
|
||||
{
|
||||
RepoToken = repoToken
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("We are not running on the build server so we won't publish the coverage report to coveralls.io");
|
||||
}
|
||||
|
||||
var sequenceCoverage = XmlPeek(coverageSummaryFile, "//CoverageSession/Summary/@sequenceCoverage");
|
||||
var branchCoverage = XmlPeek(coverageSummaryFile, "//CoverageSession/Summary/@branchCoverage");
|
||||
|
||||
Information("Sequence Coverage: " + sequenceCoverage);
|
||||
|
||||
if(double.Parse(sequenceCoverage) < minCodeCoverage)
|
||||
{
|
||||
var whereToCheck = !AppVeyor.IsRunningOnAppVeyor ? coverallsRepo : artifactsForUnitTestsDir;
|
||||
throw new Exception(string.Format("Code coverage fell below the threshold of {0}%. You can find the code coverage report at {1}", minCodeCoverage, whereToCheck));
|
||||
};
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForUnitTestsDir);
|
||||
DotNetCoreTest(unitTestAssemblies, settings);
|
||||
}
|
||||
});
|
||||
|
||||
Task("RunAcceptanceTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
ArgumentCustomization = args => args
|
||||
.Append("--no-restore")
|
||||
.Append("--no-build")
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForAcceptanceTestsDir);
|
||||
DotNetCoreTest(acceptanceTestAssemblies, settings);
|
||||
});
|
||||
|
||||
Task("RunIntegrationTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
ArgumentCustomization = args => args
|
||||
.Append("--no-restore")
|
||||
.Append("--no-build")
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForIntegrationTestsDir);
|
||||
DotNetCoreTest(integrationTestAssemblies, settings);
|
||||
});
|
||||
|
||||
Task("RunTests")
|
||||
.IsDependentOn("RunUnitTests")
|
||||
.IsDependentOn("RunAcceptanceTests")
|
||||
.IsDependentOn("RunIntegrationTests");
|
||||
|
||||
Task("CreatePackages")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
EnsureDirectoryExists(packagesDir);
|
||||
CopyFiles("./src/**/Ocelot.*.nupkg", packagesDir);
|
||||
|
||||
//GenerateReleaseNotes(releaseNotesFile);
|
||||
|
||||
System.IO.File.WriteAllLines(artifactsFile, new[]{
|
||||
"nuget:Ocelot." + buildVersion + ".nupkg",
|
||||
//"releaseNotes:releasenotes.md"
|
||||
});
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
var path = packagesDir.ToString() + @"/**/*";
|
||||
|
||||
foreach (var file in GetFiles(path))
|
||||
{
|
||||
AppVeyor.UploadArtifact(file.FullPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Task("ReleasePackagesToUnstableFeed")
|
||||
.IsDependentOn("CreatePackages")
|
||||
.Does(() =>
|
||||
{
|
||||
if (ShouldPublishToUnstableFeed(nugetFeedUnstableBranchFilter, versioning.BranchName))
|
||||
{
|
||||
PublishPackages(packagesDir, artifactsFile, nugetFeedUnstableKey, nugetFeedUnstableUploadUrl, nugetFeedUnstableSymbolsUploadUrl);
|
||||
}
|
||||
});
|
||||
|
||||
Task("EnsureStableReleaseRequirements")
|
||||
.Does(() =>
|
||||
{
|
||||
Information("Check if stable release...");
|
||||
|
||||
if (!AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
throw new Exception("Stable release should happen via appveyor");
|
||||
}
|
||||
|
||||
Information("Running on AppVeyor...");
|
||||
|
||||
Information("IsTag = " + AppVeyor.Environment.Repository.Tag.IsTag);
|
||||
|
||||
Information("Name = " + AppVeyor.Environment.Repository.Tag.Name);
|
||||
|
||||
var isTag =
|
||||
AppVeyor.Environment.Repository.Tag.IsTag &&
|
||||
!string.IsNullOrWhiteSpace(AppVeyor.Environment.Repository.Tag.Name);
|
||||
|
||||
if (!isTag)
|
||||
{
|
||||
throw new Exception("Stable release should happen from a published GitHub release");
|
||||
}
|
||||
|
||||
Information("Release is stable...");
|
||||
});
|
||||
|
||||
Task("UpdateVersionInfo")
|
||||
.IsDependentOn("EnsureStableReleaseRequirements")
|
||||
.Does(() =>
|
||||
{
|
||||
releaseTag = AppVeyor.Environment.Repository.Tag.Name;
|
||||
AppVeyor.UpdateBuildVersion(releaseTag);
|
||||
});
|
||||
|
||||
Task("DownloadGitHubReleaseArtifacts")
|
||||
.IsDependentOn("UpdateVersionInfo")
|
||||
.Does(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
Information("DownloadGitHubReleaseArtifacts");
|
||||
|
||||
EnsureDirectoryExists(packagesDir);
|
||||
|
||||
Information("Directory exists...");
|
||||
|
||||
var releaseUrl = tagsUrl + releaseTag;
|
||||
|
||||
Information("Release url " + releaseUrl);
|
||||
|
||||
//var releaseJson = Newtonsoft.Json.Linq.JObject.Parse(GetResource(releaseUrl));
|
||||
|
||||
var assets_url = Newtonsoft.Json.Linq.JObject.Parse(GetResource(releaseUrl))
|
||||
.GetValue("assets_url")
|
||||
.Value<string>();
|
||||
|
||||
Information("Assets url " + assets_url);
|
||||
|
||||
var assets = GetResource(assets_url);
|
||||
|
||||
Information("Assets " + assets_url);
|
||||
|
||||
foreach(var asset in Newtonsoft.Json.JsonConvert.DeserializeObject<JArray>(assets))
|
||||
{
|
||||
Information("In the loop..");
|
||||
|
||||
var file = packagesDir + File(asset.Value<string>("name"));
|
||||
|
||||
Information("Downloading " + file);
|
||||
|
||||
DownloadFile(asset.Value<string>("browser_download_url"), file);
|
||||
}
|
||||
|
||||
Information("Out of the loop...");
|
||||
}
|
||||
catch(Exception exception)
|
||||
{
|
||||
Information("There was an exception " + exception);
|
||||
throw;
|
||||
}
|
||||
});
|
||||
|
||||
Task("ReleasePackagesToStableFeed")
|
||||
.IsDependentOn("DownloadGitHubReleaseArtifacts")
|
||||
.Does(() =>
|
||||
{
|
||||
PublishPackages(packagesDir, artifactsFile, nugetFeedStableKey, nugetFeedStableUploadUrl, nugetFeedStableSymbolsUploadUrl);
|
||||
});
|
||||
|
||||
Task("Release")
|
||||
.IsDependentOn("ReleasePackagesToStableFeed");
|
||||
|
||||
RunTarget(target);
|
||||
|
||||
/// Gets nuique nuget version for this commit
|
||||
private GitVersion GetNuGetVersionForCommit()
|
||||
{
|
||||
GitVersion(new GitVersionSettings{
|
||||
UpdateAssemblyInfo = false,
|
||||
OutputType = GitVersionOutput.BuildServer
|
||||
});
|
||||
|
||||
return GitVersion(new GitVersionSettings{ OutputType = GitVersionOutput.Json });
|
||||
}
|
||||
|
||||
/// Updates project version in all of our projects
|
||||
private void PersistVersion(string committedVersion, string newVersion)
|
||||
{
|
||||
Information(string.Format("We'll search all csproj files for {0} and replace with {1}...", committedVersion, newVersion));
|
||||
|
||||
var projectFiles = GetFiles("./**/*.csproj");
|
||||
|
||||
foreach(var projectFile in projectFiles)
|
||||
{
|
||||
var file = projectFile.ToString();
|
||||
|
||||
Information(string.Format("Updating {0}...", file));
|
||||
|
||||
var updatedProjectFile = System.IO.File.ReadAllText(file)
|
||||
.Replace(committedVersion, newVersion);
|
||||
|
||||
System.IO.File.WriteAllText(file, updatedProjectFile);
|
||||
}
|
||||
}
|
||||
|
||||
/// generates release notes based on issues closed in GitHub since the last release
|
||||
private void GenerateReleaseNotes(ConvertableFilePath file)
|
||||
{
|
||||
if(!IsRunningOnWindows())
|
||||
{
|
||||
Warning("We are not running on Windows so we cannot generate release notes.");
|
||||
return;
|
||||
}
|
||||
|
||||
Information("Generating release notes at " + file);
|
||||
|
||||
var releaseNotesExitCode = StartProcess(
|
||||
@"tools/GitReleaseNotes/tools/gitreleasenotes.exe",
|
||||
new ProcessSettings { Arguments = ". /o " + file });
|
||||
|
||||
if (string.IsNullOrEmpty(System.IO.File.ReadAllText(file)))
|
||||
{
|
||||
System.IO.File.WriteAllText(file, "No issues closed since last release");
|
||||
}
|
||||
|
||||
if (releaseNotesExitCode != 0)
|
||||
{
|
||||
throw new Exception("Failed to generate release notes");
|
||||
}
|
||||
}
|
||||
|
||||
/// Publishes code and symbols packages to nuget feed, based on contents of artifacts file
|
||||
private void PublishPackages(ConvertableDirectoryPath packagesDir, ConvertableFilePath artifactsFile, string feedApiKey, string codeFeedUrl, string symbolFeedUrl)
|
||||
{
|
||||
var artifacts = System.IO.File
|
||||
.ReadAllLines(artifactsFile)
|
||||
.Select(l => l.Split(':'))
|
||||
.ToDictionary(v => v[0], v => v[1]);
|
||||
|
||||
var codePackage = packagesDir + File(artifacts["nuget"]);
|
||||
|
||||
Information("Pushing package " + codePackage);
|
||||
|
||||
NuGetPush(
|
||||
codePackage,
|
||||
new NuGetPushSettings {
|
||||
ApiKey = feedApiKey,
|
||||
Source = codeFeedUrl
|
||||
});
|
||||
}
|
||||
|
||||
/// gets the resource from the specified url
|
||||
private string GetResource(string url)
|
||||
{
|
||||
try
|
||||
{
|
||||
Information("Getting resource from " + url);
|
||||
|
||||
var assetsRequest = System.Net.WebRequest.CreateHttp(url);
|
||||
assetsRequest.Method = "GET";
|
||||
assetsRequest.Accept = "application/vnd.github.v3+json";
|
||||
assetsRequest.UserAgent = "BuildScript";
|
||||
|
||||
using (var assetsResponse = assetsRequest.GetResponse())
|
||||
{
|
||||
var assetsStream = assetsResponse.GetResponseStream();
|
||||
var assetsReader = new StreamReader(assetsStream);
|
||||
var response = assetsReader.ReadToEnd();
|
||||
|
||||
Information("Response is " + response);
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
catch(Exception exception)
|
||||
{
|
||||
Information("There was an exception " + exception);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private bool ShouldPublishToUnstableFeed(string filter, string branchName)
|
||||
{
|
||||
var regex = new System.Text.RegularExpressions.Regex(filter);
|
||||
var publish = regex.IsMatch(branchName);
|
||||
if (publish)
|
||||
{
|
||||
Information("Branch " + branchName + " will be published to the unstable feed");
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("Branch " + branchName + " will not be published to the unstable feed");
|
||||
}
|
||||
return publish;
|
||||
}
|
||||
#tool "nuget:?package=GitVersion.CommandLine"
|
||||
#tool "nuget:?package=GitReleaseNotes"
|
||||
#addin nuget:?package=Cake.Json
|
||||
#addin nuget:?package=Newtonsoft.Json&version=9.0.1
|
||||
#tool "nuget:?package=OpenCover"
|
||||
#tool "nuget:?package=ReportGenerator"
|
||||
#tool coveralls.net
|
||||
#addin Cake.Coveralls
|
||||
|
||||
// compile
|
||||
var compileConfig = Argument("configuration", "Release");
|
||||
var slnFile = "./Ocelot.sln";
|
||||
|
||||
// build artifacts
|
||||
var artifactsDir = Directory("artifacts");
|
||||
|
||||
// unit testing
|
||||
var artifactsForUnitTestsDir = artifactsDir + Directory("UnitTests");
|
||||
var unitTestAssemblies = @"./test/Ocelot.UnitTests/Ocelot.UnitTests.csproj";
|
||||
var minCodeCoverage = 76.4d;
|
||||
var coverallsRepoToken = "coveralls-repo-token-ocelot";
|
||||
var coverallsRepo = "https://coveralls.io/github/TomPallister/Ocelot";
|
||||
|
||||
// acceptance testing
|
||||
var artifactsForAcceptanceTestsDir = artifactsDir + Directory("AcceptanceTests");
|
||||
var acceptanceTestAssemblies = @"./test/Ocelot.AcceptanceTests/Ocelot.AcceptanceTests.csproj";
|
||||
|
||||
// integration testing
|
||||
var artifactsForIntegrationTestsDir = artifactsDir + Directory("IntegrationTests");
|
||||
var integrationTestAssemblies = @"./test/Ocelot.IntegrationTests/Ocelot.IntegrationTests.csproj";
|
||||
|
||||
// benchmark testing
|
||||
var artifactsForBenchmarkTestsDir = artifactsDir + Directory("BenchmarkTests");
|
||||
var benchmarkTestAssemblies = @"./test/Ocelot.Benchmarks";
|
||||
|
||||
// packaging
|
||||
var packagesDir = artifactsDir + Directory("Packages");
|
||||
var releaseNotesFile = packagesDir + File("releasenotes.md");
|
||||
var artifactsFile = packagesDir + File("artifacts.txt");
|
||||
|
||||
// unstable releases
|
||||
var nugetFeedUnstableKey = EnvironmentVariable("nuget-apikey-unstable");
|
||||
var nugetFeedUnstableUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
var nugetFeedUnstableSymbolsUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
|
||||
// stable releases
|
||||
var tagsUrl = "https://api.github.com/repos/tompallister/ocelot/releases/tags/";
|
||||
var nugetFeedStableKey = EnvironmentVariable("nuget-apikey-stable");
|
||||
var nugetFeedStableUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
var nugetFeedStableSymbolsUploadUrl = "https://www.nuget.org/api/v2/package";
|
||||
|
||||
// internal build variables - don't change these.
|
||||
var releaseTag = "";
|
||||
string committedVersion = "0.0.0-dev";
|
||||
var buildVersion = committedVersion;
|
||||
GitVersion versioning = null;
|
||||
var nugetFeedUnstableBranchFilter = "^(develop)$|^(PullRequest/)";
|
||||
|
||||
var target = Argument("target", "Default");
|
||||
|
||||
|
||||
Information("target is " +target);
|
||||
Information("Build configuration is " + compileConfig);
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("RunTests")
|
||||
.IsDependentOn("CreatePackages");
|
||||
|
||||
Task("BuildAndReleaseUnstable")
|
||||
.IsDependentOn("Build")
|
||||
.IsDependentOn("ReleasePackagesToUnstableFeed");
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
if (DirectoryExists(artifactsDir))
|
||||
{
|
||||
DeleteDirectory(artifactsDir, recursive:true);
|
||||
}
|
||||
CreateDirectory(artifactsDir);
|
||||
});
|
||||
|
||||
Task("Version")
|
||||
.Does(() =>
|
||||
{
|
||||
versioning = GetNuGetVersionForCommit();
|
||||
var nugetVersion = versioning.NuGetVersion;
|
||||
Information("SemVer version number: " + nugetVersion);
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
Information("Persisting version number...");
|
||||
PersistVersion(committedVersion, nugetVersion);
|
||||
buildVersion = nugetVersion;
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("We are not running on build server, so we won't persist the version number.");
|
||||
}
|
||||
});
|
||||
|
||||
Task("Compile")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Version")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreBuildSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
};
|
||||
|
||||
DotNetCoreBuild(slnFile, settings);
|
||||
});
|
||||
|
||||
Task("RunUnitTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
if (IsRunningOnWindows())
|
||||
{
|
||||
var coverageSummaryFile = artifactsForUnitTestsDir + File("coverage.xml");
|
||||
|
||||
EnsureDirectoryExists(artifactsForUnitTestsDir);
|
||||
|
||||
OpenCover(tool =>
|
||||
{
|
||||
tool.DotNetCoreTest(unitTestAssemblies);
|
||||
},
|
||||
new FilePath(coverageSummaryFile),
|
||||
new OpenCoverSettings()
|
||||
{
|
||||
Register="user",
|
||||
ArgumentCustomization=args=>args.Append(@"-oldstyle -returntargetcode -excludebyattribute:*.ExcludeFromCoverage*")
|
||||
}
|
||||
.WithFilter("+[Ocelot*]*")
|
||||
.WithFilter("-[xunit*]*")
|
||||
.WithFilter("-[Ocelot*Tests]*")
|
||||
);
|
||||
|
||||
ReportGenerator(coverageSummaryFile, artifactsForUnitTestsDir);
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
var repoToken = EnvironmentVariable(coverallsRepoToken);
|
||||
if (string.IsNullOrEmpty(repoToken))
|
||||
{
|
||||
throw new Exception(string.Format("Coveralls repo token not found. Set environment variable '{0}'", coverallsRepoToken));
|
||||
}
|
||||
|
||||
Information(string.Format("Uploading test coverage to {0}", coverallsRepo));
|
||||
CoverallsNet(coverageSummaryFile, CoverallsNetReportType.OpenCover, new CoverallsNetSettings()
|
||||
{
|
||||
RepoToken = repoToken
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("We are not running on the build server so we won't publish the coverage report to coveralls.io");
|
||||
}
|
||||
|
||||
var sequenceCoverage = XmlPeek(coverageSummaryFile, "//CoverageSession/Summary/@sequenceCoverage");
|
||||
var branchCoverage = XmlPeek(coverageSummaryFile, "//CoverageSession/Summary/@branchCoverage");
|
||||
|
||||
Information("Sequence Coverage: " + sequenceCoverage);
|
||||
|
||||
if(double.Parse(sequenceCoverage) < minCodeCoverage)
|
||||
{
|
||||
var whereToCheck = !AppVeyor.IsRunningOnAppVeyor ? coverallsRepo : artifactsForUnitTestsDir;
|
||||
throw new Exception(string.Format("Code coverage fell below the threshold of {0}%. You can find the code coverage report at {1}", minCodeCoverage, whereToCheck));
|
||||
};
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForUnitTestsDir);
|
||||
DotNetCoreTest(unitTestAssemblies, settings);
|
||||
}
|
||||
});
|
||||
|
||||
Task("RunAcceptanceTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
ArgumentCustomization = args => args
|
||||
.Append("--no-restore")
|
||||
.Append("--no-build")
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForAcceptanceTestsDir);
|
||||
DotNetCoreTest(acceptanceTestAssemblies, settings);
|
||||
});
|
||||
|
||||
Task("RunIntegrationTests")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
var settings = new DotNetCoreTestSettings
|
||||
{
|
||||
Configuration = compileConfig,
|
||||
ArgumentCustomization = args => args
|
||||
.Append("--no-restore")
|
||||
.Append("--no-build")
|
||||
};
|
||||
|
||||
EnsureDirectoryExists(artifactsForIntegrationTestsDir);
|
||||
DotNetCoreTest(integrationTestAssemblies, settings);
|
||||
});
|
||||
|
||||
Task("RunTests")
|
||||
.IsDependentOn("RunUnitTests")
|
||||
.IsDependentOn("RunAcceptanceTests")
|
||||
.IsDependentOn("RunIntegrationTests");
|
||||
|
||||
Task("CreatePackages")
|
||||
.IsDependentOn("Compile")
|
||||
.Does(() =>
|
||||
{
|
||||
EnsureDirectoryExists(packagesDir);
|
||||
CopyFiles("./src/**/Ocelot.*.nupkg", packagesDir);
|
||||
|
||||
//GenerateReleaseNotes(releaseNotesFile);
|
||||
|
||||
System.IO.File.WriteAllLines(artifactsFile, new[]{
|
||||
"nuget:Ocelot." + buildVersion + ".nupkg",
|
||||
//"releaseNotes:releasenotes.md"
|
||||
});
|
||||
|
||||
if (AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
var path = packagesDir.ToString() + @"/**/*";
|
||||
|
||||
foreach (var file in GetFiles(path))
|
||||
{
|
||||
AppVeyor.UploadArtifact(file.FullPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Task("ReleasePackagesToUnstableFeed")
|
||||
.IsDependentOn("CreatePackages")
|
||||
.Does(() =>
|
||||
{
|
||||
if (ShouldPublishToUnstableFeed(nugetFeedUnstableBranchFilter, versioning.BranchName))
|
||||
{
|
||||
PublishPackages(packagesDir, artifactsFile, nugetFeedUnstableKey, nugetFeedUnstableUploadUrl, nugetFeedUnstableSymbolsUploadUrl);
|
||||
}
|
||||
});
|
||||
|
||||
Task("EnsureStableReleaseRequirements")
|
||||
.Does(() =>
|
||||
{
|
||||
Information("Check if stable release...");
|
||||
|
||||
if (!AppVeyor.IsRunningOnAppVeyor)
|
||||
{
|
||||
throw new Exception("Stable release should happen via appveyor");
|
||||
}
|
||||
|
||||
Information("Running on AppVeyor...");
|
||||
|
||||
Information("IsTag = " + AppVeyor.Environment.Repository.Tag.IsTag);
|
||||
|
||||
Information("Name = " + AppVeyor.Environment.Repository.Tag.Name);
|
||||
|
||||
var isTag =
|
||||
AppVeyor.Environment.Repository.Tag.IsTag &&
|
||||
!string.IsNullOrWhiteSpace(AppVeyor.Environment.Repository.Tag.Name);
|
||||
|
||||
if (!isTag)
|
||||
{
|
||||
throw new Exception("Stable release should happen from a published GitHub release");
|
||||
}
|
||||
|
||||
Information("Release is stable...");
|
||||
});
|
||||
|
||||
Task("UpdateVersionInfo")
|
||||
.IsDependentOn("EnsureStableReleaseRequirements")
|
||||
.Does(() =>
|
||||
{
|
||||
releaseTag = AppVeyor.Environment.Repository.Tag.Name;
|
||||
AppVeyor.UpdateBuildVersion(releaseTag);
|
||||
});
|
||||
|
||||
Task("DownloadGitHubReleaseArtifacts")
|
||||
.IsDependentOn("UpdateVersionInfo")
|
||||
.Does(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
Information("DownloadGitHubReleaseArtifacts");
|
||||
|
||||
EnsureDirectoryExists(packagesDir);
|
||||
|
||||
Information("Directory exists...");
|
||||
|
||||
var releaseUrl = tagsUrl + releaseTag;
|
||||
|
||||
Information("Release url " + releaseUrl);
|
||||
|
||||
//var releaseJson = Newtonsoft.Json.Linq.JObject.Parse(GetResource(releaseUrl));
|
||||
|
||||
var assets_url = Newtonsoft.Json.Linq.JObject.Parse(GetResource(releaseUrl))
|
||||
.GetValue("assets_url")
|
||||
.Value<string>();
|
||||
|
||||
Information("Assets url " + assets_url);
|
||||
|
||||
var assets = GetResource(assets_url);
|
||||
|
||||
Information("Assets " + assets_url);
|
||||
|
||||
foreach(var asset in Newtonsoft.Json.JsonConvert.DeserializeObject<JArray>(assets))
|
||||
{
|
||||
Information("In the loop..");
|
||||
|
||||
var file = packagesDir + File(asset.Value<string>("name"));
|
||||
|
||||
Information("Downloading " + file);
|
||||
|
||||
DownloadFile(asset.Value<string>("browser_download_url"), file);
|
||||
}
|
||||
|
||||
Information("Out of the loop...");
|
||||
}
|
||||
catch(Exception exception)
|
||||
{
|
||||
Information("There was an exception " + exception);
|
||||
throw;
|
||||
}
|
||||
});
|
||||
|
||||
Task("ReleasePackagesToStableFeed")
|
||||
.IsDependentOn("DownloadGitHubReleaseArtifacts")
|
||||
.Does(() =>
|
||||
{
|
||||
PublishPackages(packagesDir, artifactsFile, nugetFeedStableKey, nugetFeedStableUploadUrl, nugetFeedStableSymbolsUploadUrl);
|
||||
});
|
||||
|
||||
Task("Release")
|
||||
.IsDependentOn("ReleasePackagesToStableFeed");
|
||||
|
||||
RunTarget(target);
|
||||
|
||||
/// Gets nuique nuget version for this commit
|
||||
private GitVersion GetNuGetVersionForCommit()
|
||||
{
|
||||
GitVersion(new GitVersionSettings{
|
||||
UpdateAssemblyInfo = false,
|
||||
OutputType = GitVersionOutput.BuildServer
|
||||
});
|
||||
|
||||
return GitVersion(new GitVersionSettings{ OutputType = GitVersionOutput.Json });
|
||||
}
|
||||
|
||||
/// Updates project version in all of our projects
|
||||
private void PersistVersion(string committedVersion, string newVersion)
|
||||
{
|
||||
Information(string.Format("We'll search all csproj files for {0} and replace with {1}...", committedVersion, newVersion));
|
||||
|
||||
var projectFiles = GetFiles("./**/*.csproj");
|
||||
|
||||
foreach(var projectFile in projectFiles)
|
||||
{
|
||||
var file = projectFile.ToString();
|
||||
|
||||
Information(string.Format("Updating {0}...", file));
|
||||
|
||||
var updatedProjectFile = System.IO.File.ReadAllText(file)
|
||||
.Replace(committedVersion, newVersion);
|
||||
|
||||
System.IO.File.WriteAllText(file, updatedProjectFile);
|
||||
}
|
||||
}
|
||||
|
||||
/// generates release notes based on issues closed in GitHub since the last release
|
||||
private void GenerateReleaseNotes(ConvertableFilePath file)
|
||||
{
|
||||
if(!IsRunningOnWindows())
|
||||
{
|
||||
Warning("We are not running on Windows so we cannot generate release notes.");
|
||||
return;
|
||||
}
|
||||
|
||||
Information("Generating release notes at " + file);
|
||||
|
||||
var releaseNotesExitCode = StartProcess(
|
||||
@"tools/GitReleaseNotes/tools/gitreleasenotes.exe",
|
||||
new ProcessSettings { Arguments = ". /o " + file });
|
||||
|
||||
if (string.IsNullOrEmpty(System.IO.File.ReadAllText(file)))
|
||||
{
|
||||
System.IO.File.WriteAllText(file, "No issues closed since last release");
|
||||
}
|
||||
|
||||
if (releaseNotesExitCode != 0)
|
||||
{
|
||||
throw new Exception("Failed to generate release notes");
|
||||
}
|
||||
}
|
||||
|
||||
/// Publishes code and symbols packages to nuget feed, based on contents of artifacts file
|
||||
private void PublishPackages(ConvertableDirectoryPath packagesDir, ConvertableFilePath artifactsFile, string feedApiKey, string codeFeedUrl, string symbolFeedUrl)
|
||||
{
|
||||
var artifacts = System.IO.File
|
||||
.ReadAllLines(artifactsFile)
|
||||
.Select(l => l.Split(':'))
|
||||
.ToDictionary(v => v[0], v => v[1]);
|
||||
|
||||
var codePackage = packagesDir + File(artifacts["nuget"]);
|
||||
|
||||
Information("Pushing package " + codePackage);
|
||||
|
||||
NuGetPush(
|
||||
codePackage,
|
||||
new NuGetPushSettings {
|
||||
ApiKey = feedApiKey,
|
||||
Source = codeFeedUrl
|
||||
});
|
||||
}
|
||||
|
||||
/// gets the resource from the specified url
|
||||
private string GetResource(string url)
|
||||
{
|
||||
try
|
||||
{
|
||||
Information("Getting resource from " + url);
|
||||
|
||||
var assetsRequest = System.Net.WebRequest.CreateHttp(url);
|
||||
assetsRequest.Method = "GET";
|
||||
assetsRequest.Accept = "application/vnd.github.v3+json";
|
||||
assetsRequest.UserAgent = "BuildScript";
|
||||
|
||||
using (var assetsResponse = assetsRequest.GetResponse())
|
||||
{
|
||||
var assetsStream = assetsResponse.GetResponseStream();
|
||||
var assetsReader = new StreamReader(assetsStream);
|
||||
var response = assetsReader.ReadToEnd();
|
||||
|
||||
Information("Response is " + response);
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
catch(Exception exception)
|
||||
{
|
||||
Information("There was an exception " + exception);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private bool ShouldPublishToUnstableFeed(string filter, string branchName)
|
||||
{
|
||||
var regex = new System.Text.RegularExpressions.Regex(filter);
|
||||
var publish = regex.IsMatch(branchName);
|
||||
if (publish)
|
||||
{
|
||||
Information("Branch " + branchName + " will be published to the unstable feed");
|
||||
}
|
||||
else
|
||||
{
|
||||
Information("Branch " + branchName + " will not be published to the unstable feed");
|
||||
}
|
||||
return publish;
|
||||
}
|
||||
|
468
build.ps1
468
build.ps1
@ -1,234 +1,234 @@
|
||||
##########################################################################
|
||||
# This is the Cake bootstrapper script for PowerShell.
|
||||
# This file was downloaded from https://github.com/cake-build/resources
|
||||
# Feel free to change this file to fit your needs.
|
||||
##########################################################################
|
||||
|
||||
<#
|
||||
|
||||
.SYNOPSIS
|
||||
This is a Powershell script to bootstrap a Cake build.
|
||||
|
||||
.DESCRIPTION
|
||||
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
|
||||
and execute your Cake build script with the parameters you provide.
|
||||
|
||||
.PARAMETER Script
|
||||
The build script to execute.
|
||||
.PARAMETER Target
|
||||
The build script target to run.
|
||||
.PARAMETER Configuration
|
||||
The build configuration to use.
|
||||
.PARAMETER Verbosity
|
||||
Specifies the amount of information to be displayed.
|
||||
.PARAMETER ShowDescription
|
||||
Shows description about tasks.
|
||||
.PARAMETER DryRun
|
||||
Performs a dry run.
|
||||
.PARAMETER Experimental
|
||||
Uses the nightly builds of the Roslyn script engine.
|
||||
.PARAMETER Mono
|
||||
Uses the Mono Compiler rather than the Roslyn script engine.
|
||||
.PARAMETER SkipToolPackageRestore
|
||||
Skips restoring of packages.
|
||||
.PARAMETER ScriptArgs
|
||||
Remaining arguments are added here.
|
||||
|
||||
.LINK
|
||||
https://cakebuild.net
|
||||
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
[string]$Script = "build.cake",
|
||||
[string]$Target,
|
||||
[string]$Configuration,
|
||||
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
|
||||
[string]$Verbosity,
|
||||
[switch]$ShowDescription,
|
||||
[Alias("WhatIf", "Noop")]
|
||||
[switch]$DryRun,
|
||||
[switch]$Experimental,
|
||||
[switch]$Mono,
|
||||
[switch]$SkipToolPackageRestore,
|
||||
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
|
||||
[string[]]$ScriptArgs
|
||||
)
|
||||
|
||||
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
|
||||
function MD5HashFile([string] $filePath)
|
||||
{
|
||||
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
|
||||
{
|
||||
return $null
|
||||
}
|
||||
|
||||
[System.IO.Stream] $file = $null;
|
||||
[System.Security.Cryptography.MD5] $md5 = $null;
|
||||
try
|
||||
{
|
||||
$md5 = [System.Security.Cryptography.MD5]::Create()
|
||||
$file = [System.IO.File]::OpenRead($filePath)
|
||||
return [System.BitConverter]::ToString($md5.ComputeHash($file))
|
||||
}
|
||||
finally
|
||||
{
|
||||
if ($file -ne $null)
|
||||
{
|
||||
$file.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function GetProxyEnabledWebClient
|
||||
{
|
||||
$wc = New-Object System.Net.WebClient
|
||||
$proxy = [System.Net.WebRequest]::GetSystemWebProxy()
|
||||
$proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials
|
||||
$wc.Proxy = $proxy
|
||||
return $wc
|
||||
}
|
||||
|
||||
Write-Host "Preparing to run build script..."
|
||||
|
||||
if(!$PSScriptRoot){
|
||||
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
|
||||
}
|
||||
|
||||
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
|
||||
$ADDINS_DIR = Join-Path $TOOLS_DIR "Addins"
|
||||
$MODULES_DIR = Join-Path $TOOLS_DIR "Modules"
|
||||
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
|
||||
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
|
||||
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
|
||||
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
|
||||
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
|
||||
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
|
||||
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
|
||||
|
||||
# Make sure tools folder exists
|
||||
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
|
||||
Write-Verbose -Message "Creating tools directory..."
|
||||
New-Item -Path $TOOLS_DIR -Type directory | out-null
|
||||
}
|
||||
|
||||
# Make sure that packages.config exist.
|
||||
if (!(Test-Path $PACKAGES_CONFIG)) {
|
||||
Write-Verbose -Message "Downloading packages.config..."
|
||||
try {
|
||||
$wc = GetProxyEnabledWebClient
|
||||
$wc.DownloadFile("https://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
|
||||
Throw "Could not download packages.config."
|
||||
}
|
||||
}
|
||||
|
||||
# Try find NuGet.exe in path if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
|
||||
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
|
||||
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
|
||||
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
|
||||
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
|
||||
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
|
||||
}
|
||||
}
|
||||
|
||||
# Try download NuGet.exe if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Downloading NuGet.exe..."
|
||||
try {
|
||||
$wc = GetProxyEnabledWebClient
|
||||
$wc.DownloadFile($NUGET_URL, $NUGET_EXE)
|
||||
} catch {
|
||||
Throw "Could not download NuGet.exe."
|
||||
}
|
||||
}
|
||||
|
||||
# Save nuget.exe path to environment to be available to child processed
|
||||
$ENV:NUGET_EXE = $NUGET_EXE
|
||||
|
||||
# Restore tools from NuGet?
|
||||
if(-Not $SkipToolPackageRestore.IsPresent) {
|
||||
Push-Location
|
||||
Set-Location $TOOLS_DIR
|
||||
|
||||
# Check for changes in packages.config and remove installed tools if true.
|
||||
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
|
||||
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
|
||||
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
|
||||
Write-Verbose -Message "Missing or changed package.config hash..."
|
||||
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
|
||||
}
|
||||
|
||||
Write-Verbose -Message "Restoring tools from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet tools."
|
||||
}
|
||||
else
|
||||
{
|
||||
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
|
||||
}
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore addins from NuGet
|
||||
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $ADDINS_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring addins from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet addins."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore modules from NuGet
|
||||
if (Test-Path $MODULES_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $MODULES_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring modules from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet modules."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Make sure that Cake has been installed.
|
||||
if (!(Test-Path $CAKE_EXE)) {
|
||||
Throw "Could not find Cake.exe at $CAKE_EXE"
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Build Cake arguments
|
||||
$cakeArguments = @("$Script");
|
||||
if ($Target) { $cakeArguments += "-target=$Target" }
|
||||
if ($Configuration) { $cakeArguments += "-configuration=$Configuration" }
|
||||
if ($Verbosity) { $cakeArguments += "-verbosity=$Verbosity" }
|
||||
if ($ShowDescription) { $cakeArguments += "-showdescription" }
|
||||
if ($DryRun) { $cakeArguments += "-dryrun" }
|
||||
if ($Experimental) { $cakeArguments += "-experimental" }
|
||||
if ($Mono) { $cakeArguments += "-mono" }
|
||||
$cakeArguments += $ScriptArgs
|
||||
|
||||
# Start Cake
|
||||
Write-Host "Running build script..."
|
||||
&$CAKE_EXE $cakeArguments
|
||||
exit $LASTEXITCODE
|
||||
##########################################################################
|
||||
# This is the Cake bootstrapper script for PowerShell.
|
||||
# This file was downloaded from https://github.com/cake-build/resources
|
||||
# Feel free to change this file to fit your needs.
|
||||
##########################################################################
|
||||
|
||||
<#
|
||||
|
||||
.SYNOPSIS
|
||||
This is a Powershell script to bootstrap a Cake build.
|
||||
|
||||
.DESCRIPTION
|
||||
This Powershell script will download NuGet if missing, restore NuGet tools (including Cake)
|
||||
and execute your Cake build script with the parameters you provide.
|
||||
|
||||
.PARAMETER Script
|
||||
The build script to execute.
|
||||
.PARAMETER Target
|
||||
The build script target to run.
|
||||
.PARAMETER Configuration
|
||||
The build configuration to use.
|
||||
.PARAMETER Verbosity
|
||||
Specifies the amount of information to be displayed.
|
||||
.PARAMETER ShowDescription
|
||||
Shows description about tasks.
|
||||
.PARAMETER DryRun
|
||||
Performs a dry run.
|
||||
.PARAMETER Experimental
|
||||
Uses the nightly builds of the Roslyn script engine.
|
||||
.PARAMETER Mono
|
||||
Uses the Mono Compiler rather than the Roslyn script engine.
|
||||
.PARAMETER SkipToolPackageRestore
|
||||
Skips restoring of packages.
|
||||
.PARAMETER ScriptArgs
|
||||
Remaining arguments are added here.
|
||||
|
||||
.LINK
|
||||
https://cakebuild.net
|
||||
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
[string]$Script = "build.cake",
|
||||
[string]$Target,
|
||||
[string]$Configuration,
|
||||
[ValidateSet("Quiet", "Minimal", "Normal", "Verbose", "Diagnostic")]
|
||||
[string]$Verbosity,
|
||||
[switch]$ShowDescription,
|
||||
[Alias("WhatIf", "Noop")]
|
||||
[switch]$DryRun,
|
||||
[switch]$Experimental,
|
||||
[switch]$Mono,
|
||||
[switch]$SkipToolPackageRestore,
|
||||
[Parameter(Position=0,Mandatory=$false,ValueFromRemainingArguments=$true)]
|
||||
[string[]]$ScriptArgs
|
||||
)
|
||||
|
||||
[Reflection.Assembly]::LoadWithPartialName("System.Security") | Out-Null
|
||||
function MD5HashFile([string] $filePath)
|
||||
{
|
||||
if ([string]::IsNullOrEmpty($filePath) -or !(Test-Path $filePath -PathType Leaf))
|
||||
{
|
||||
return $null
|
||||
}
|
||||
|
||||
[System.IO.Stream] $file = $null;
|
||||
[System.Security.Cryptography.MD5] $md5 = $null;
|
||||
try
|
||||
{
|
||||
$md5 = [System.Security.Cryptography.MD5]::Create()
|
||||
$file = [System.IO.File]::OpenRead($filePath)
|
||||
return [System.BitConverter]::ToString($md5.ComputeHash($file))
|
||||
}
|
||||
finally
|
||||
{
|
||||
if ($file -ne $null)
|
||||
{
|
||||
$file.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function GetProxyEnabledWebClient
|
||||
{
|
||||
$wc = New-Object System.Net.WebClient
|
||||
$proxy = [System.Net.WebRequest]::GetSystemWebProxy()
|
||||
$proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials
|
||||
$wc.Proxy = $proxy
|
||||
return $wc
|
||||
}
|
||||
|
||||
Write-Host "Preparing to run build script..."
|
||||
|
||||
if(!$PSScriptRoot){
|
||||
$PSScriptRoot = Split-Path $MyInvocation.MyCommand.Path -Parent
|
||||
}
|
||||
|
||||
$TOOLS_DIR = Join-Path $PSScriptRoot "tools"
|
||||
$ADDINS_DIR = Join-Path $TOOLS_DIR "Addins"
|
||||
$MODULES_DIR = Join-Path $TOOLS_DIR "Modules"
|
||||
$NUGET_EXE = Join-Path $TOOLS_DIR "nuget.exe"
|
||||
$CAKE_EXE = Join-Path $TOOLS_DIR "Cake/Cake.exe"
|
||||
$NUGET_URL = "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe"
|
||||
$PACKAGES_CONFIG = Join-Path $TOOLS_DIR "packages.config"
|
||||
$PACKAGES_CONFIG_MD5 = Join-Path $TOOLS_DIR "packages.config.md5sum"
|
||||
$ADDINS_PACKAGES_CONFIG = Join-Path $ADDINS_DIR "packages.config"
|
||||
$MODULES_PACKAGES_CONFIG = Join-Path $MODULES_DIR "packages.config"
|
||||
|
||||
# Make sure tools folder exists
|
||||
if ((Test-Path $PSScriptRoot) -and !(Test-Path $TOOLS_DIR)) {
|
||||
Write-Verbose -Message "Creating tools directory..."
|
||||
New-Item -Path $TOOLS_DIR -Type directory | out-null
|
||||
}
|
||||
|
||||
# Make sure that packages.config exist.
|
||||
if (!(Test-Path $PACKAGES_CONFIG)) {
|
||||
Write-Verbose -Message "Downloading packages.config..."
|
||||
try {
|
||||
$wc = GetProxyEnabledWebClient
|
||||
$wc.DownloadFile("https://cakebuild.net/download/bootstrapper/packages", $PACKAGES_CONFIG) } catch {
|
||||
Throw "Could not download packages.config."
|
||||
}
|
||||
}
|
||||
|
||||
# Try find NuGet.exe in path if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Trying to find nuget.exe in PATH..."
|
||||
$existingPaths = $Env:Path -Split ';' | Where-Object { (![string]::IsNullOrEmpty($_)) -and (Test-Path $_ -PathType Container) }
|
||||
$NUGET_EXE_IN_PATH = Get-ChildItem -Path $existingPaths -Filter "nuget.exe" | Select -First 1
|
||||
if ($NUGET_EXE_IN_PATH -ne $null -and (Test-Path $NUGET_EXE_IN_PATH.FullName)) {
|
||||
Write-Verbose -Message "Found in PATH at $($NUGET_EXE_IN_PATH.FullName)."
|
||||
$NUGET_EXE = $NUGET_EXE_IN_PATH.FullName
|
||||
}
|
||||
}
|
||||
|
||||
# Try download NuGet.exe if not exists
|
||||
if (!(Test-Path $NUGET_EXE)) {
|
||||
Write-Verbose -Message "Downloading NuGet.exe..."
|
||||
try {
|
||||
$wc = GetProxyEnabledWebClient
|
||||
$wc.DownloadFile($NUGET_URL, $NUGET_EXE)
|
||||
} catch {
|
||||
Throw "Could not download NuGet.exe."
|
||||
}
|
||||
}
|
||||
|
||||
# Save nuget.exe path to environment to be available to child processed
|
||||
$ENV:NUGET_EXE = $NUGET_EXE
|
||||
|
||||
# Restore tools from NuGet?
|
||||
if(-Not $SkipToolPackageRestore.IsPresent) {
|
||||
Push-Location
|
||||
Set-Location $TOOLS_DIR
|
||||
|
||||
# Check for changes in packages.config and remove installed tools if true.
|
||||
[string] $md5Hash = MD5HashFile($PACKAGES_CONFIG)
|
||||
if((!(Test-Path $PACKAGES_CONFIG_MD5)) -Or
|
||||
($md5Hash -ne (Get-Content $PACKAGES_CONFIG_MD5 ))) {
|
||||
Write-Verbose -Message "Missing or changed package.config hash..."
|
||||
Remove-Item * -Recurse -Exclude packages.config,nuget.exe
|
||||
}
|
||||
|
||||
Write-Verbose -Message "Restoring tools from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$TOOLS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet tools."
|
||||
}
|
||||
else
|
||||
{
|
||||
$md5Hash | Out-File $PACKAGES_CONFIG_MD5 -Encoding "ASCII"
|
||||
}
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore addins from NuGet
|
||||
if (Test-Path $ADDINS_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $ADDINS_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring addins from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$ADDINS_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet addins."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Restore modules from NuGet
|
||||
if (Test-Path $MODULES_PACKAGES_CONFIG) {
|
||||
Push-Location
|
||||
Set-Location $MODULES_DIR
|
||||
|
||||
Write-Verbose -Message "Restoring modules from NuGet..."
|
||||
$NuGetOutput = Invoke-Expression "&`"$NUGET_EXE`" install -ExcludeVersion -OutputDirectory `"$MODULES_DIR`""
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Throw "An error occured while restoring NuGet modules."
|
||||
}
|
||||
|
||||
Write-Verbose -Message ($NuGetOutput | out-string)
|
||||
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
# Make sure that Cake has been installed.
|
||||
if (!(Test-Path $CAKE_EXE)) {
|
||||
Throw "Could not find Cake.exe at $CAKE_EXE"
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Build Cake arguments
|
||||
$cakeArguments = @("$Script");
|
||||
if ($Target) { $cakeArguments += "-target=$Target" }
|
||||
if ($Configuration) { $cakeArguments += "-configuration=$Configuration" }
|
||||
if ($Verbosity) { $cakeArguments += "-verbosity=$Verbosity" }
|
||||
if ($ShowDescription) { $cakeArguments += "-showdescription" }
|
||||
if ($DryRun) { $cakeArguments += "-dryrun" }
|
||||
if ($Experimental) { $cakeArguments += "-experimental" }
|
||||
if ($Mono) { $cakeArguments += "-mono" }
|
||||
$cakeArguments += $ScriptArgs
|
||||
|
||||
# Start Cake
|
||||
Write-Host "Running build script..."
|
||||
&$CAKE_EXE $cakeArguments
|
||||
exit $LASTEXITCODE
|
||||
|
450
docs/Makefile
450
docs/Makefile
@ -1,225 +1,225 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " epub3 to make an epub3"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
@echo " dummy to check syntax errors of document sources"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
.PHONY: html
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: dirhtml
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
.PHONY: singlehtml
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
.PHONY: pickle
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
.PHONY: json
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
.PHONY: htmlhelp
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
.PHONY: qthelp
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Ocelot.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Ocelot.qhc"
|
||||
|
||||
.PHONY: applehelp
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
.PHONY: devhelp
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Ocelot"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Ocelot"
|
||||
@echo "# devhelp"
|
||||
|
||||
.PHONY: epub
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
.PHONY: epub3
|
||||
epub3:
|
||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
||||
@echo
|
||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
||||
|
||||
.PHONY: latex
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
.PHONY: latexpdf
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: latexpdfja
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: text
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
.PHONY: man
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
.PHONY: texinfo
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
.PHONY: info
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
.PHONY: gettext
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
.PHONY: changes
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
.PHONY: linkcheck
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
.PHONY: doctest
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
.PHONY: coverage
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
.PHONY: xml
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
.PHONY: pseudoxml
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
|
||||
.PHONY: dummy
|
||||
dummy:
|
||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
||||
@echo
|
||||
@echo "Build finished. Dummy builder generates no files."
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " epub3 to make an epub3"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
@echo " dummy to check syntax errors of document sources"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
.PHONY: html
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: dirhtml
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
.PHONY: singlehtml
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
.PHONY: pickle
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
.PHONY: json
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
.PHONY: htmlhelp
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
.PHONY: qthelp
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Ocelot.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Ocelot.qhc"
|
||||
|
||||
.PHONY: applehelp
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
.PHONY: devhelp
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/Ocelot"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Ocelot"
|
||||
@echo "# devhelp"
|
||||
|
||||
.PHONY: epub
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
.PHONY: epub3
|
||||
epub3:
|
||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
||||
@echo
|
||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
||||
|
||||
.PHONY: latex
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
.PHONY: latexpdf
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: latexpdfja
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: text
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
.PHONY: man
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
.PHONY: texinfo
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
.PHONY: info
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
.PHONY: gettext
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
.PHONY: changes
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
.PHONY: linkcheck
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
.PHONY: doctest
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
.PHONY: coverage
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
.PHONY: xml
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
.PHONY: pseudoxml
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
|
||||
.PHONY: dummy
|
||||
dummy:
|
||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
||||
@echo
|
||||
@echo "Build finished. Dummy builder generates no files."
|
||||
|
@ -1,12 +1,12 @@
|
||||
Building
|
||||
========
|
||||
|
||||
* You'll generally want to run the `./build.ps1` script. This will compile, run unit and acceptance tests and build the output packages locally. Output will got to the `./artifacts` directory.
|
||||
|
||||
* You can view the current commit's `SemVer <http://semver.org/>`_ build information by running `./version.ps1`.
|
||||
|
||||
* The other `./*.ps1` scripts perform subsets of the build process, if you don't want to run the full build.
|
||||
|
||||
* The release process works best with GitFlow branching; this allows us to publish every development commit to an unstable feed with a unique SemVer version, and then choose when to release to a stable feed.
|
||||
|
||||
Building
|
||||
========
|
||||
|
||||
* You'll generally want to run the `./build.ps1` script. This will compile, run unit and acceptance tests and build the output packages locally. Output will got to the `./artifacts` directory.
|
||||
|
||||
* You can view the current commit's `SemVer <http://semver.org/>`_ build information by running `./version.ps1`.
|
||||
|
||||
* The other `./*.ps1` scripts perform subsets of the build process, if you don't want to run the full build.
|
||||
|
||||
* The release process works best with GitFlow branching; this allows us to publish every development commit to an unstable feed with a unique SemVer version, and then choose when to release to a stable feed.
|
||||
|
||||
* Alternatively you can build the project in VS2017 with the latest .NET Core SDK.
|
@ -1,4 +1,4 @@
|
||||
Overview
|
||||
========
|
||||
|
||||
Overview
|
||||
========
|
||||
|
||||
This document summarises the build and release process for the project. The build scripts are written using `Cake <http://cakebuild.net/>`_, and are defined in `./build.cake`. The scripts have been designed to be run by either developers locally or by a build server (currently `AppVeyor <https://www.appveyor.com/>`_), with minimal logic defined in the build server itself.
|
@ -1,23 +1,23 @@
|
||||
Release process
|
||||
===============
|
||||
|
||||
This section defines the release process for the maintainers of the project.
|
||||
* Merge pull requests to the `release` branch.
|
||||
|
||||
* Every commit pushed to the Origin repo will kick off the `ocelot-build <https://ci.appveyor.com/project/TomPallister/ocelot-fcfpb>`_ project in AppVeyor. This performs the same tasks as the command line build, and in addition pushes the packages to the unstable nuget feed.
|
||||
|
||||
* When you're ready for a release, create a release branch. You'll probably want to update the committed `./ReleaseNotes.md` based on the contents of the equivalent file in the `./artifacts` directory.
|
||||
|
||||
* When the `release` branch has built successfully in Appveyor, select the build and then Deploy to the `GitHub Release` environment. This will create a new release in GitHub.
|
||||
|
||||
* In Github, navigate to the `release <https://github.com/TomPallister/Ocelot/releases>`_. Modify the release name and tag as desired.
|
||||
|
||||
* When you're ready, publish the release. This will tag the commit with the specified release number.
|
||||
|
||||
* The `ocelot-release <https://ci.appveyor.com/project/TomPallister/ocelot-ayj4w>`_ project will detect the newly created tag and kick off the release process. This will download the artifacts from GitHub, and publish the packages to the stable nuget feed.
|
||||
|
||||
* When you have a final stable release build, merge the `release` branch into `master` and `develop`. Deploy the master branch to github and following the full release process as described above. Don't forget to uncheck the "This is a pre-release" checkbox in GitHub before publishing.
|
||||
|
||||
* Note - because the release builds are initiated by tagging a commit, if for some reason a release build fails in AppVeyor you'll need to delete the tag from the repo and republish the release in GitHub.
|
||||
|
||||
|
||||
Release process
|
||||
===============
|
||||
|
||||
This section defines the release process for the maintainers of the project.
|
||||
* Merge pull requests to the `release` branch.
|
||||
|
||||
* Every commit pushed to the Origin repo will kick off the `ocelot-build <https://ci.appveyor.com/project/TomPallister/ocelot-fcfpb>`_ project in AppVeyor. This performs the same tasks as the command line build, and in addition pushes the packages to the unstable nuget feed.
|
||||
|
||||
* When you're ready for a release, create a release branch. You'll probably want to update the committed `./ReleaseNotes.md` based on the contents of the equivalent file in the `./artifacts` directory.
|
||||
|
||||
* When the `release` branch has built successfully in Appveyor, select the build and then Deploy to the `GitHub Release` environment. This will create a new release in GitHub.
|
||||
|
||||
* In Github, navigate to the `release <https://github.com/TomPallister/Ocelot/releases>`_. Modify the release name and tag as desired.
|
||||
|
||||
* When you're ready, publish the release. This will tag the commit with the specified release number.
|
||||
|
||||
* The `ocelot-release <https://ci.appveyor.com/project/TomPallister/ocelot-ayj4w>`_ project will detect the newly created tag and kick off the release process. This will download the artifacts from GitHub, and publish the packages to the stable nuget feed.
|
||||
|
||||
* When you have a final stable release build, merge the `release` branch into `master` and `develop`. Deploy the master branch to github and following the full release process as described above. Don't forget to uncheck the "This is a pre-release" checkbox in GitHub before publishing.
|
||||
|
||||
* Note - because the release builds are initiated by tagging a commit, if for some reason a release build fails in AppVeyor you'll need to delete the tag from the repo and republish the release in GitHub.
|
||||
|
||||
|
||||
|
@ -1,24 +1,24 @@
|
||||
Tests
|
||||
=====
|
||||
|
||||
The tests should all just run and work apart from the integration tests which need the following
|
||||
environmental variables setting. This is a manual step at the moment.
|
||||
|
||||
``OCELOT_USERNAME=admin``
|
||||
|
||||
``OCELOT_HASH=kE/mxd1hO9h9Sl2VhGhwJUd9xZEv4NP6qXoN39nIqM4=``
|
||||
|
||||
``OCELOT_SALT=zzWITpnDximUNKYLiUam/w==``
|
||||
|
||||
On windows you can use..
|
||||
|
||||
``SETX OCELOT_USERNAME admin``
|
||||
|
||||
On mac..
|
||||
|
||||
``export OCELOT_USERNAME=admin``
|
||||
|
||||
I need to work out a nicer way of doing this in the future.
|
||||
|
||||
|
||||
|
||||
Tests
|
||||
=====
|
||||
|
||||
The tests should all just run and work apart from the integration tests which need the following
|
||||
environmental variables setting. This is a manual step at the moment.
|
||||
|
||||
``OCELOT_USERNAME=admin``
|
||||
|
||||
``OCELOT_HASH=kE/mxd1hO9h9Sl2VhGhwJUd9xZEv4NP6qXoN39nIqM4=``
|
||||
|
||||
``OCELOT_SALT=zzWITpnDximUNKYLiUam/w==``
|
||||
|
||||
On windows you can use..
|
||||
|
||||
``SETX OCELOT_USERNAME admin``
|
||||
|
||||
On mac..
|
||||
|
||||
``export OCELOT_USERNAME=admin``
|
||||
|
||||
I need to work out a nicer way of doing this in the future.
|
||||
|
||||
|
||||
|
||||
|
718
docs/conf.py
718
docs/conf.py
@ -1,359 +1,359 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Ocelot documentation build configuration file, created by
|
||||
# sphinx-quickstart on Wed Jul 20 08:57:27 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
# source_suffix = ['.rst', '.md']
|
||||
|
||||
# markdown support
|
||||
#from recommonmark.parser import CommonMarkParser
|
||||
|
||||
#source_parsers = {
|
||||
# '.md': CommonMarkParser,
|
||||
#}
|
||||
|
||||
source_suffix = ['.rst']
|
||||
|
||||
|
||||
# The encoding of source files.
|
||||
#
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'Ocelot'
|
||||
copyright = '2016, Tom Pallister'
|
||||
author = 'Tom Pallister'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1.0.0'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1.0.0'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#
|
||||
# today = ''
|
||||
#
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
highlight_language = 'csharp'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
|
||||
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
|
||||
import os
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents.
|
||||
# "<project> v<release> documentation" by default.
|
||||
#
|
||||
# html_title = 'Ocelot v1.0.0'
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (relative to this directory) to use as a favicon of
|
||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#
|
||||
html_favicon = 'favicon.ico'
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#
|
||||
# html_extra_path = []
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
#
|
||||
# html_last_updated_fmt = None
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
|
||||
#
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# 'ja' uses this config value.
|
||||
# 'zh' user can custom change `jieba` dictionary path.
|
||||
#
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Ocelotdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Ocelot.tex', 'Ocelot Documentation',
|
||||
'Tom Pallister', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# latex_appendices = []
|
||||
|
||||
# It false, will not define \strong, \code, itleref, \crossref ... but only
|
||||
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
|
||||
# packages.
|
||||
#
|
||||
# latex_keep_old_macro_names = True
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'Ocelot', 'Ocelot Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Ocelot', 'Ocelot Documentation',
|
||||
author, 'Ocelot', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#
|
||||
# texinfo_no_detailmenu = False
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Ocelot documentation build configuration file, created by
|
||||
# sphinx-quickstart on Wed Jul 20 08:57:27 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
# source_suffix = ['.rst', '.md']
|
||||
|
||||
# markdown support
|
||||
#from recommonmark.parser import CommonMarkParser
|
||||
|
||||
#source_parsers = {
|
||||
# '.md': CommonMarkParser,
|
||||
#}
|
||||
|
||||
source_suffix = ['.rst']
|
||||
|
||||
|
||||
# The encoding of source files.
|
||||
#
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'Ocelot'
|
||||
copyright = '2016, Tom Pallister'
|
||||
author = 'Tom Pallister'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1.0.0'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1.0.0'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#
|
||||
# today = ''
|
||||
#
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
highlight_language = 'csharp'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
|
||||
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
|
||||
import os
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents.
|
||||
# "<project> v<release> documentation" by default.
|
||||
#
|
||||
# html_title = 'Ocelot v1.0.0'
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (relative to this directory) to use as a favicon of
|
||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#
|
||||
html_favicon = 'favicon.ico'
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#
|
||||
# html_extra_path = []
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
#
|
||||
# html_last_updated_fmt = None
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
|
||||
#
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# 'ja' uses this config value.
|
||||
# 'zh' user can custom change `jieba` dictionary path.
|
||||
#
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
#
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Ocelotdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Ocelot.tex', 'Ocelot Documentation',
|
||||
'Tom Pallister', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# latex_appendices = []
|
||||
|
||||
# It false, will not define \strong, \code, itleref, \crossref ... but only
|
||||
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
|
||||
# packages.
|
||||
#
|
||||
# latex_keep_old_macro_names = True
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'Ocelot', 'Ocelot Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'Ocelot', 'Ocelot Documentation',
|
||||
author, 'Ocelot', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#
|
||||
# texinfo_no_detailmenu = False
|
||||
|
@ -1,81 +1,81 @@
|
||||
Administration
|
||||
==============
|
||||
|
||||
Ocelot supports changing configuration during runtime via an authenticated HTTP API. The API is authenticated
|
||||
using bearer tokens that you request from Ocelot iteself. This is provided by the amazing
|
||||
`Identity Server <https://github.com/IdentityServer/IdentityServer4>`_ project that I have been using for a few years now. Check them out.
|
||||
|
||||
In order to enable the administration section you need to do a few things. First of all add this to your
|
||||
initial Startup.cs.
|
||||
|
||||
The path can be anything you want and it is obviously reccomended don't use
|
||||
a url you would like to route through with Ocelot as this will not work. The administration uses the
|
||||
MapWhen functionality of asp.net core and all requests to {root}/administration will be sent there not
|
||||
to the Ocelot middleware.
|
||||
|
||||
The secret is the client secret that Ocelot's internal IdentityServer will use to authenticate requests to the administration API. This can be whatever you want it to be!
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public virtual void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddAdministration("/administration", "secret");
|
||||
}
|
||||
|
||||
Now if you went with the configuration options above and want to access the API you can use the postman scripts
|
||||
called ocelot.postman_collection.json in the solution to change the Ocelot configuration. Obviously these
|
||||
will need to be changed if you are running Ocelot on a different url to http://localhost:5000.
|
||||
|
||||
The scripts show you how to request a bearer token from ocelot and then use it to GET the existing configuration and POST
|
||||
a configuration.
|
||||
|
||||
Administration running multiple Ocelot's
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If you are running multiple Ocelot's in a cluster then you need to use a certificate to sign the bearer tokens used to access the administration API.
|
||||
|
||||
In order to do this you need to add two more environmental variables for each Ocelot in the cluster.
|
||||
|
||||
``OCELOT_CERTIFICATE``
|
||||
The path to a certificate that can be used to sign the tokens. The certificate needs to be of the type X509 and obviously Ocelot needs to be able to access it.
|
||||
``OCELOT_CERTIFICATE_PASSWORD``
|
||||
The password for the certificate.
|
||||
|
||||
Normally Ocelot just uses temporary signing credentials but if you set these environmental variables then it will use the certificate. If all the other Ocelots in the cluster have the same certificate then you are good!
|
||||
|
||||
Administration API
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
**POST {adminPath}/connect/token**
|
||||
|
||||
This gets a token for use with the admin area using the client credentials we talk about setting above. Under the hood this calls into an IdentityServer hosted within Ocelot.
|
||||
|
||||
The body of the request is form-data as follows
|
||||
|
||||
``client_id`` set as admin
|
||||
|
||||
``client_secret`` set as whatever you used when setting up the administration services.
|
||||
|
||||
``scope`` set as admin
|
||||
|
||||
``grant_type`` set as client_credentials
|
||||
|
||||
**GET {adminPath}/configuration**
|
||||
|
||||
|
||||
This gets the current Ocelot configuration. It is exactly the same JSON we use to set Ocelot up with in the first place.
|
||||
|
||||
**POST {adminPath}/configuration**
|
||||
|
||||
|
||||
This overrwrites the existing configuration (should probably be a put!). I reccomend getting your config from the GET endpoint, making any changes and posting it back...simples.
|
||||
|
||||
The body of the request is JSON and it is the same format as the FileConfiguration.cs that we use to set up
|
||||
Ocelot on a file system.
|
||||
|
||||
**DELETE {adminPath}/outputcache/{region}**
|
||||
|
||||
This clears a region of the cache. If you are using a backplane it will clear all instances of the cache! Giving your the ability to run a cluster of Ocelots and cache over all of them in memory and clear them all at the same time / just use a distributed cache.
|
||||
|
||||
The region is whatever you set against the Region field in the FileCacheOptions section of the Ocelot configuration.
|
||||
Administration
|
||||
==============
|
||||
|
||||
Ocelot supports changing configuration during runtime via an authenticated HTTP API. The API is authenticated
|
||||
using bearer tokens that you request from Ocelot iteself. This is provided by the amazing
|
||||
`Identity Server <https://github.com/IdentityServer/IdentityServer4>`_ project that I have been using for a few years now. Check them out.
|
||||
|
||||
In order to enable the administration section you need to do a few things. First of all add this to your
|
||||
initial Startup.cs.
|
||||
|
||||
The path can be anything you want and it is obviously reccomended don't use
|
||||
a url you would like to route through with Ocelot as this will not work. The administration uses the
|
||||
MapWhen functionality of asp.net core and all requests to {root}/administration will be sent there not
|
||||
to the Ocelot middleware.
|
||||
|
||||
The secret is the client secret that Ocelot's internal IdentityServer will use to authenticate requests to the administration API. This can be whatever you want it to be!
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public virtual void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddAdministration("/administration", "secret");
|
||||
}
|
||||
|
||||
Now if you went with the configuration options above and want to access the API you can use the postman scripts
|
||||
called ocelot.postman_collection.json in the solution to change the Ocelot configuration. Obviously these
|
||||
will need to be changed if you are running Ocelot on a different url to http://localhost:5000.
|
||||
|
||||
The scripts show you how to request a bearer token from ocelot and then use it to GET the existing configuration and POST
|
||||
a configuration.
|
||||
|
||||
Administration running multiple Ocelot's
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If you are running multiple Ocelot's in a cluster then you need to use a certificate to sign the bearer tokens used to access the administration API.
|
||||
|
||||
In order to do this you need to add two more environmental variables for each Ocelot in the cluster.
|
||||
|
||||
``OCELOT_CERTIFICATE``
|
||||
The path to a certificate that can be used to sign the tokens. The certificate needs to be of the type X509 and obviously Ocelot needs to be able to access it.
|
||||
``OCELOT_CERTIFICATE_PASSWORD``
|
||||
The password for the certificate.
|
||||
|
||||
Normally Ocelot just uses temporary signing credentials but if you set these environmental variables then it will use the certificate. If all the other Ocelots in the cluster have the same certificate then you are good!
|
||||
|
||||
Administration API
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
**POST {adminPath}/connect/token**
|
||||
|
||||
This gets a token for use with the admin area using the client credentials we talk about setting above. Under the hood this calls into an IdentityServer hosted within Ocelot.
|
||||
|
||||
The body of the request is form-data as follows
|
||||
|
||||
``client_id`` set as admin
|
||||
|
||||
``client_secret`` set as whatever you used when setting up the administration services.
|
||||
|
||||
``scope`` set as admin
|
||||
|
||||
``grant_type`` set as client_credentials
|
||||
|
||||
**GET {adminPath}/configuration**
|
||||
|
||||
|
||||
This gets the current Ocelot configuration. It is exactly the same JSON we use to set Ocelot up with in the first place.
|
||||
|
||||
**POST {adminPath}/configuration**
|
||||
|
||||
|
||||
This overrwrites the existing configuration (should probably be a put!). I reccomend getting your config from the GET endpoint, making any changes and posting it back...simples.
|
||||
|
||||
The body of the request is JSON and it is the same format as the FileConfiguration.cs that we use to set up
|
||||
Ocelot on a file system.
|
||||
|
||||
**DELETE {adminPath}/outputcache/{region}**
|
||||
|
||||
This clears a region of the cache. If you are using a backplane it will clear all instances of the cache! Giving your the ability to run a cluster of Ocelots and cache over all of them in memory and clear them all at the same time / just use a distributed cache.
|
||||
|
||||
The region is whatever you set against the Region field in the FileCacheOptions section of the Ocelot configuration.
|
||||
|
@ -1,125 +1,137 @@
|
||||
Authentication
|
||||
==============
|
||||
|
||||
In order to authenticate ReRoutes and subsequently use any of Ocelot's claims based features such as authorisation or modifying the request with values from the token. Users must register authentication services in their Startup.cs as usual but they provide a scheme (authentication provider key) with each registration e.g.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddJwtBearer(authenticationProviderKey, x =>
|
||||
{
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
In this example TestKey is the scheme that this provider has been registered with.
|
||||
We then map this to a ReRoute in the configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"DownstreamHost": "localhost",
|
||||
"DownstreamPort": 51876,
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
|
||||
When Ocelot runs it will look at this ReRoutes AuthenticationOptions.AuthenticationProviderKey
|
||||
and check that there is an Authentication provider registered with the given key. If there isn't then Ocelot
|
||||
will not start up, if there is then the ReRoute will use that provider when it executes.
|
||||
|
||||
If a ReRoute is authenticated Ocelot will invoke whatever scheme is associated with it while executing the authentication middleware. If the request fails authentication Ocelot returns a http status code 401.
|
||||
|
||||
JWT Tokens
|
||||
^^^^^^^^^^
|
||||
|
||||
If you want to authenticate using JWT tokens maybe from a provider like Auth0 you can register your authentication middleware as normal e.g.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddJwtBearer(authenticationProviderKey, x =>
|
||||
{
|
||||
x.Authority = "test";
|
||||
x.Audience = "test";
|
||||
});
|
||||
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
Then map the authentication provider key to a ReRoute in your configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"DownstreamHost": "localhost",
|
||||
"DownstreamPort": 51876,
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
|
||||
|
||||
|
||||
Identity Server Bearer Tokens
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In order to use IdentityServer bearer tokens register your IdentityServer services as usual in ConfigureServices with a scheme (key). If you don't understand how to do this please consul the IdentityServer documentation.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
var options = o =>
|
||||
{
|
||||
o.Authority = "https://whereyouridentityserverlives.com";
|
||||
o.ApiName = "api";
|
||||
o.SupportedTokens = SupportedTokens.Both;
|
||||
o.ApiSecret = "secret";
|
||||
};
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddIdentityServerAuthentication(authenticationProviderKey, options);
|
||||
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
Then map the authentication provider key to a ReRoute in your configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"DownstreamHost": "localhost",
|
||||
"DownstreamPort": 51876,
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
Authentication
|
||||
==============
|
||||
|
||||
In order to authenticate ReRoutes and subsequently use any of Ocelot's claims based features such as authorisation or modifying the request with values from the token. Users must register authentication services in their Startup.cs as usual but they provide a scheme (authentication provider key) with each registration e.g.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddJwtBearer(authenticationProviderKey, x =>
|
||||
{
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
In this example TestKey is the scheme that this provider has been registered with.
|
||||
We then map this to a ReRoute in the configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 51876,
|
||||
}
|
||||
],
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
|
||||
When Ocelot runs it will look at this ReRoutes AuthenticationOptions.AuthenticationProviderKey
|
||||
and check that there is an Authentication provider registered with the given key. If there isn't then Ocelot
|
||||
will not start up, if there is then the ReRoute will use that provider when it executes.
|
||||
|
||||
If a ReRoute is authenticated Ocelot will invoke whatever scheme is associated with it while executing the authentication middleware. If the request fails authentication Ocelot returns a http status code 401.
|
||||
|
||||
JWT Tokens
|
||||
^^^^^^^^^^
|
||||
|
||||
If you want to authenticate using JWT tokens maybe from a provider like Auth0 you can register your authentication middleware as normal e.g.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddJwtBearer(authenticationProviderKey, x =>
|
||||
{
|
||||
x.Authority = "test";
|
||||
x.Audience = "test";
|
||||
});
|
||||
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
Then map the authentication provider key to a ReRoute in your configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 51876,
|
||||
}
|
||||
],
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
|
||||
|
||||
|
||||
Identity Server Bearer Tokens
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In order to use IdentityServer bearer tokens register your IdentityServer services as usual in ConfigureServices with a scheme (key). If you don't understand how to do this please consul the IdentityServer documentation.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
var authenticationProviderKey = "TestKey";
|
||||
var options = o =>
|
||||
{
|
||||
o.Authority = "https://whereyouridentityserverlives.com";
|
||||
o.ApiName = "api";
|
||||
o.SupportedTokens = SupportedTokens.Both;
|
||||
o.ApiSecret = "secret";
|
||||
};
|
||||
|
||||
services.AddAuthentication()
|
||||
.AddIdentityServerAuthentication(authenticationProviderKey, options);
|
||||
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
Then map the authentication provider key to a ReRoute in your configuration e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRoutes": [{
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 51876,
|
||||
}
|
||||
],
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": ["Post"],
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"DownstreamScheme": "http",
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "TestKey",
|
||||
"AllowedScopes": []
|
||||
}
|
||||
}]
|
||||
|
@ -1,18 +1,18 @@
|
||||
Authorisation
|
||||
=============
|
||||
|
||||
Ocelot supports claims based authorisation which is run post authentication. This means if
|
||||
you have a route you want to authorise you can add the following to you ReRoute configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"RouteClaimsRequirement": {
|
||||
"UserType": "registered"
|
||||
}
|
||||
|
||||
In this example when the authorisation middleware is called Ocelot will check to see
|
||||
if the user has the claim type UserType and if the value of that claim is registered.
|
||||
If it isn't then the user will not be authorised and the response will be 403 forbidden.
|
||||
|
||||
|
||||
|
||||
Authorisation
|
||||
=============
|
||||
|
||||
Ocelot supports claims based authorisation which is run post authentication. This means if
|
||||
you have a route you want to authorise you can add the following to you ReRoute configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"RouteClaimsRequirement": {
|
||||
"UserType": "registered"
|
||||
}
|
||||
|
||||
In this example when the authorisation middleware is called Ocelot will check to see
|
||||
if the user has the claim type UserType and if the value of that claim is registered.
|
||||
If it isn't then the user will not be authorised and the response will be 403 forbidden.
|
||||
|
||||
|
||||
|
||||
|
@ -1,21 +1,21 @@
|
||||
Caching
|
||||
=======
|
||||
|
||||
Ocelot supports some very rudimentary caching at the moment provider by
|
||||
the `CacheManager <http://cachemanager.net/>`_ project. This is an amazing project
|
||||
that is solving a lot of caching problems. I would reccomend using this package to
|
||||
cache with Ocelot. If you look at the example `here <https://github.com/TomPallister/Ocelot/blob/develop/test/Ocelot.ManualTest/Startup.cs>`_ you can see how the cache manager is setup and then passed into the Ocelot
|
||||
AddOcelotOutputCaching configuration method. You can use any settings supported by
|
||||
the CacheManager package and just pass them in.
|
||||
|
||||
Anyway Ocelot currently supports caching on the URL of the downstream service
|
||||
and setting a TTL in seconds to expire the cache. You can also clear the cache for a region
|
||||
by calling Ocelot's administration API.
|
||||
|
||||
In order to use caching on a route in your ReRoute configuration add this setting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"FileCacheOptions": { "TtlSeconds": 15, "Region": "somename" }
|
||||
|
||||
In this example ttl seconds is set to 15 which means the cache will expire after 15 seconds.
|
||||
Caching
|
||||
=======
|
||||
|
||||
Ocelot supports some very rudimentary caching at the moment provider by
|
||||
the `CacheManager <http://cachemanager.net/>`_ project. This is an amazing project
|
||||
that is solving a lot of caching problems. I would reccomend using this package to
|
||||
cache with Ocelot. If you look at the example `here <https://github.com/TomPallister/Ocelot/blob/develop/test/Ocelot.ManualTest/Startup.cs>`_ you can see how the cache manager is setup and then passed into the Ocelot
|
||||
AddOcelotOutputCaching configuration method. You can use any settings supported by
|
||||
the CacheManager package and just pass them in.
|
||||
|
||||
Anyway Ocelot currently supports caching on the URL of the downstream service
|
||||
and setting a TTL in seconds to expire the cache. You can also clear the cache for a region
|
||||
by calling Ocelot's administration API.
|
||||
|
||||
In order to use caching on a route in your ReRoute configuration add this setting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"FileCacheOptions": { "TtlSeconds": 15, "Region": "somename" }
|
||||
|
||||
In this example ttl seconds is set to 15 which means the cache will expire after 15 seconds.
|
||||
|
@ -1,72 +1,72 @@
|
||||
Claims Transformation
|
||||
=====================
|
||||
|
||||
Ocelot allows the user to access claims and transform them into headers, query string
|
||||
parameters and other claims. This is only available once a user has been authenticated.
|
||||
|
||||
After the user is authenticated we run the claims to claims transformation middleware.
|
||||
This allows the user to transform claims before the authorisation middleware is called.
|
||||
After the user is authorised first we call the claims to headers middleware and Finally
|
||||
the claims to query strig parameters middleware.
|
||||
|
||||
The syntax for performing the transforms is the same for each proces. In the ReRoute
|
||||
configuration a json dictionary is added with a specific name either AddClaimsToRequest,
|
||||
AddHeadersToRequest, AddQueriesToRequest.
|
||||
|
||||
Note I'm not a hotshot programmer so have no idea if this syntax is good..
|
||||
|
||||
Within this dictionary the entries specify how Ocelot should transform things!
|
||||
The key to the dictionary is going to become the key of either a claim, header
|
||||
or query parameter.
|
||||
|
||||
The value of the entry is parsed to logic that will perform the transform. First of
|
||||
all a dictionary accessor is specified e.g. Claims[CustomerId]. This means we want
|
||||
to access the claims and get the CustomerId claim type. Next is a greater than (>)
|
||||
symbol which is just used to split the string. The next entry is either value or value with
|
||||
and indexer. If value is specifed Ocelot will just take the value and add it to the
|
||||
transform. If the value has an indexer Ocelot will look for a delimiter which is provided
|
||||
after another greater than symbol. Ocelot will then split the value on the delimiter
|
||||
and add whatever was at the index requested to the transform.
|
||||
|
||||
Claims to Claims Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to claims
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddClaimsToRequest": {
|
||||
"UserType": "Claims[sub] > value[0] > |",
|
||||
"UserId": "Claims[sub] > value[1] > |"
|
||||
}
|
||||
|
||||
This shows a transforms where Ocelot looks at the users sub claim and transforms it into
|
||||
UserType and UserId claims. Assuming the sub looks like this "usertypevalue|useridvalue".
|
||||
|
||||
Claims to Headers Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to headers
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddHeadersToRequest": {
|
||||
"CustomerId": "Claims[sub] > value[1] > |"
|
||||
}
|
||||
|
||||
This shows a transform where Ocelot looks at the users sub claim and trasnforms it into a
|
||||
CustomerId header. Assuming the sub looks like this "usertypevalue|useridvalue".
|
||||
|
||||
Claims to Query String Parameters Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to query string parameters
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddQueriesToRequest": {
|
||||
"LocationId": "Claims[LocationId] > value",
|
||||
}
|
||||
|
||||
This shows a transform where Ocelot looks at the users LocationId claim and add its as
|
||||
Claims Transformation
|
||||
=====================
|
||||
|
||||
Ocelot allows the user to access claims and transform them into headers, query string
|
||||
parameters and other claims. This is only available once a user has been authenticated.
|
||||
|
||||
After the user is authenticated we run the claims to claims transformation middleware.
|
||||
This allows the user to transform claims before the authorisation middleware is called.
|
||||
After the user is authorised first we call the claims to headers middleware and Finally
|
||||
the claims to query strig parameters middleware.
|
||||
|
||||
The syntax for performing the transforms is the same for each proces. In the ReRoute
|
||||
configuration a json dictionary is added with a specific name either AddClaimsToRequest,
|
||||
AddHeadersToRequest, AddQueriesToRequest.
|
||||
|
||||
Note I'm not a hotshot programmer so have no idea if this syntax is good..
|
||||
|
||||
Within this dictionary the entries specify how Ocelot should transform things!
|
||||
The key to the dictionary is going to become the key of either a claim, header
|
||||
or query parameter.
|
||||
|
||||
The value of the entry is parsed to logic that will perform the transform. First of
|
||||
all a dictionary accessor is specified e.g. Claims[CustomerId]. This means we want
|
||||
to access the claims and get the CustomerId claim type. Next is a greater than (>)
|
||||
symbol which is just used to split the string. The next entry is either value or value with
|
||||
and indexer. If value is specifed Ocelot will just take the value and add it to the
|
||||
transform. If the value has an indexer Ocelot will look for a delimiter which is provided
|
||||
after another greater than symbol. Ocelot will then split the value on the delimiter
|
||||
and add whatever was at the index requested to the transform.
|
||||
|
||||
Claims to Claims Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to claims
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddClaimsToRequest": {
|
||||
"UserType": "Claims[sub] > value[0] > |",
|
||||
"UserId": "Claims[sub] > value[1] > |"
|
||||
}
|
||||
|
||||
This shows a transforms where Ocelot looks at the users sub claim and transforms it into
|
||||
UserType and UserId claims. Assuming the sub looks like this "usertypevalue|useridvalue".
|
||||
|
||||
Claims to Headers Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to headers
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddHeadersToRequest": {
|
||||
"CustomerId": "Claims[sub] > value[1] > |"
|
||||
}
|
||||
|
||||
This shows a transform where Ocelot looks at the users sub claim and trasnforms it into a
|
||||
CustomerId header. Assuming the sub looks like this "usertypevalue|useridvalue".
|
||||
|
||||
Claims to Query String Parameters Tranformation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Below is an example configuration that will transforms claims to query string parameters
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"AddQueriesToRequest": {
|
||||
"LocationId": "Claims[LocationId] > value",
|
||||
}
|
||||
|
||||
This shows a transform where Ocelot looks at the users LocationId claim and add its as
|
||||
a query string parameter to be forwarded onto the downstream service.
|
@ -1,102 +1,106 @@
|
||||
Configuration
|
||||
============
|
||||
|
||||
An example configuration can be found `here <https://github.com/TomPallister/Ocelot/blob/develop/test/Ocelot.ManualTest/configuration.json>`_.
|
||||
There are two sections to the configuration. An array of ReRoutes and a GlobalConfiguration.
|
||||
The ReRoutes are the objects that tell Ocelot how to treat an upstream request. The Global
|
||||
configuration is a bit hacky and allows overrides of ReRoute specific settings. It's useful
|
||||
if you don't want to manage lots of ReRoute specific settings.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
Follow Redirects / Use CookieContainer
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Use HttpHandlerOptions in ReRoute configuration to set up HttpHandler behavior:
|
||||
- _AllowAutoRedirect_ is a value that indicates whether the request should follow redirection responses.
|
||||
Set it true if the request should automatically follow redirection responses from the Downstream resource; otherwise false. The default value is true.
|
||||
- _UseCookieContainer_ is a value that indicates whether the handler uses the CookieContainer property to store server cookies and uses these cookies when sending requests.
|
||||
The default value is true.
|
||||
|
||||
Here is an example ReRoute configuration, You don't need to set all of these things but this is everything that is available at the moment:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": [
|
||||
"Get"
|
||||
],
|
||||
"AddHeadersToRequest": {},
|
||||
"AddClaimsToRequest": {},
|
||||
"RouteClaimsRequirement": {},
|
||||
"AddQueriesToRequest": {},
|
||||
"RequestIdKey": "",
|
||||
"FileCacheOptions": {
|
||||
"TtlSeconds": 0,
|
||||
"Region": ""
|
||||
},
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"ServiceName": "",
|
||||
"DownstreamScheme": "http",
|
||||
"DownstreamHost": "localhost",
|
||||
"DownstreamPort": 51779,
|
||||
"QoSOptions": {
|
||||
"ExceptionsAllowedBeforeBreaking": 0,
|
||||
"DurationOfBreak": 0,
|
||||
"TimeoutValue": 0
|
||||
},
|
||||
"LoadBalancer": "",
|
||||
"RateLimitOptions": {
|
||||
"ClientWhitelist": [],
|
||||
"EnableRateLimiting": false,
|
||||
"Period": "",
|
||||
"PeriodTimespan": 0,
|
||||
"Limit": 0
|
||||
},
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "",
|
||||
"AllowedScopes": []
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": true,
|
||||
"UseCookieContainer": true
|
||||
},
|
||||
"UseServiceDiscovery": false
|
||||
}
|
||||
|
||||
More information on how to use these options is below..
|
||||
|
||||
Store configuration in consul
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you add the following when you register your services Ocelot will attempt to store and retrieve its configuration in consul KV store.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddStoreOcelotConfigurationInConsul();
|
||||
|
||||
You also need to add the following to your configuration.json. This is how Ocelot
|
||||
finds your Consul agent and interacts to load and store the configuration from Consul.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"GlobalConfiguration": {
|
||||
"ServiceDiscoveryProvider": {
|
||||
"Host": "localhost",
|
||||
"Port": 9500
|
||||
}
|
||||
}
|
||||
|
||||
I decided to create this feature after working on the raft consensus algorithm and finding out its super hard. Why not take advantage of the fact Consul already gives you this!
|
||||
I guess it means if you want to use Ocelot to its fullest you take on Consul as a dependency for now.
|
||||
|
||||
Configuration
|
||||
============
|
||||
|
||||
An example configuration can be found `here <https://github.com/TomPallister/Ocelot/blob/develop/test/Ocelot.ManualTest/configuration.json>`_.
|
||||
There are two sections to the configuration. An array of ReRoutes and a GlobalConfiguration.
|
||||
The ReRoutes are the objects that tell Ocelot how to treat an upstream request. The Global
|
||||
configuration is a bit hacky and allows overrides of ReRoute specific settings. It's useful
|
||||
if you don't want to manage lots of ReRoute specific settings.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
Here is an example ReRoute configuration, You don't need to set all of these things but this is everything that is available at the moment:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": [
|
||||
"Get"
|
||||
],
|
||||
"AddHeadersToRequest": {},
|
||||
"AddClaimsToRequest": {},
|
||||
"RouteClaimsRequirement": {},
|
||||
"AddQueriesToRequest": {},
|
||||
"RequestIdKey": "",
|
||||
"FileCacheOptions": {
|
||||
"TtlSeconds": 0,
|
||||
"Region": ""
|
||||
},
|
||||
"ReRouteIsCaseSensitive": false,
|
||||
"ServiceName": "",
|
||||
"DownstreamScheme": "http",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 51876,
|
||||
}
|
||||
],
|
||||
"QoSOptions": {
|
||||
"ExceptionsAllowedBeforeBreaking": 0,
|
||||
"DurationOfBreak": 0,
|
||||
"TimeoutValue": 0
|
||||
},
|
||||
"LoadBalancer": "",
|
||||
"RateLimitOptions": {
|
||||
"ClientWhitelist": [],
|
||||
"EnableRateLimiting": false,
|
||||
"Period": "",
|
||||
"PeriodTimespan": 0,
|
||||
"Limit": 0
|
||||
},
|
||||
"AuthenticationOptions": {
|
||||
"AuthenticationProviderKey": "",
|
||||
"AllowedScopes": []
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": true,
|
||||
"UseCookieContainer": true
|
||||
},
|
||||
"UseServiceDiscovery": false
|
||||
}
|
||||
|
||||
More information on how to use these options is below..
|
||||
|
||||
Follow Redirects / Use CookieContainer
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Use HttpHandlerOptions in ReRoute configuration to set up HttpHandler behavior:
|
||||
- _AllowAutoRedirect_ is a value that indicates whether the request should follow redirection responses.
|
||||
Set it true if the request should automatically follow redirection responses from the Downstream resource; otherwise false. The default value is true.
|
||||
- _UseCookieContainer_ is a value that indicates whether the handler uses the CookieContainer property to store server cookies and uses these cookies when sending requests.
|
||||
The default value is true.
|
||||
|
||||
Store configuration in consul
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you add the following when you register your services Ocelot will attempt to store and retrieve its configuration in consul KV store.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddStoreOcelotConfigurationInConsul();
|
||||
|
||||
You also need to add the following to your configuration.json. This is how Ocelot
|
||||
finds your Consul agent and interacts to load and store the configuration from Consul.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"GlobalConfiguration": {
|
||||
"ServiceDiscoveryProvider": {
|
||||
"Host": "localhost",
|
||||
"Port": 9500
|
||||
}
|
||||
}
|
||||
|
||||
I decided to create this feature after working on the raft consensus algorithm and finding out its super hard. Why not take advantage of the fact Consul already gives you this!
|
||||
I guess it means if you want to use Ocelot to its fullest you take on Consul as a dependency for now.
|
||||
|
||||
This feature has a 3 second ttl cache before making a new request to your local consul agent.
|
@ -1,70 +1,97 @@
|
||||
Headers Transformation
|
||||
=====================
|
||||
|
||||
Ocelot allows the user to transform headers pre and post downstream request. At the moment Ocelot only supports find and replace. This feature was requested `GitHub #190 <https://github.com/TomPallister/Ocelot/issues/190>`_ and I decided that it was going to be useful in various ways.
|
||||
|
||||
Syntax
|
||||
^^^^^^
|
||||
|
||||
In order to transform a header first we specify the header key and then the type of transform we want e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
|
||||
The key is "Test" and the value is "http://www.bbc.co.uk/, http://ocelot.com/". The value is saying replace http://www.bbc.co.uk/ with http://ocelot.com/. The syntax is {find}, {replace}. Hopefully pretty simple. There are examples below that explain more.
|
||||
|
||||
Pre Downstream Request
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Add the following to a ReRoute in configuration.json in order to replace http://www.bbc.co.uk/ with http://ocelot.com/. This header will be changed before the request downstream and will be sent to the downstream server.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"UpstreamHeaderTransform": {
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
|
||||
Post Downstream Request
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Add the following to a ReRoute in configuration.json in order to replace http://www.bbc.co.uk/ with http://ocelot.com/. This transformation will take place after Ocelot has received the response from the downstream service.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
|
||||
Placeholders
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Ocelot allows placeholders that can be used in header transformation. At the moment there is only one placeholder.
|
||||
|
||||
{BaseUrl} - This will use Ocelot's base url e.g. http://localhost:5000 as its value.
|
||||
|
||||
Handling 302 Redirects
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Ocelot will by default automatically follow redirects however if you want to return the location header to the client you might want to change the location to be Ocelot not the downstream service. Ocelot allows this with the following configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
or you could use the BaseUrl placeholder.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "http://localhost:6773, {BaseUrl}"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
Ocelot will not try and replace the location header returned by the downstream service with its own URL.
|
||||
Headers Transformation
|
||||
=====================
|
||||
|
||||
Ocelot allows the user to transform headers pre and post downstream request. At the moment Ocelot only supports find and replace. This feature was requested `GitHub #190 <https://github.com/TomPallister/Ocelot/issues/190>`_ and I decided that it was going to be useful in various ways.
|
||||
|
||||
Syntax
|
||||
^^^^^^
|
||||
|
||||
In order to transform a header first we specify the header key and then the type of transform we want e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
|
||||
The key is "Test" and the value is "http://www.bbc.co.uk/, http://ocelot.com/". The value is saying replace http://www.bbc.co.uk/ with http://ocelot.com/. The syntax is {find}, {replace}. Hopefully pretty simple. There are examples below that explain more.
|
||||
|
||||
Pre Downstream Request
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Add the following to a ReRoute in configuration.json in order to replace http://www.bbc.co.uk/ with http://ocelot.com/. This header will be changed before the request downstream and will be sent to the downstream server.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"UpstreamHeaderTransform": {
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
|
||||
Post Downstream Request
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Add the following to a ReRoute in configuration.json in order to replace http://www.bbc.co.uk/ with http://ocelot.com/. This transformation will take place after Ocelot has received the response from the downstream service.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Test": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
|
||||
Placeholders
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Ocelot allows placeholders that can be used in header transformation.
|
||||
|
||||
{BaseUrl} - This will use Ocelot's base url e.g. http://localhost:5000 as its value.
|
||||
{DownstreamBaseUrl} - This will use the downstream services base url e.g. http://localhost:5000 as its value. This only works for DownstreamHeaderTransform at the moment.
|
||||
|
||||
Handling 302 Redirects
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Ocelot will by default automatically follow redirects however if you want to return the location header to the client you might want to change the location to be Ocelot not the downstream service. Ocelot allows this with the following configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "http://www.bbc.co.uk/, http://ocelot.com/"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
or you could use the BaseUrl placeholder.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "http://localhost:6773, {BaseUrl}"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
finally if you are using a load balancer with Ocelot you will get multiple downstream base urls so the above would not work. In this case you can do the following.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "{DownstreamBaseUrl}, {BaseUrl}"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
Future
|
||||
^^^^^^
|
||||
|
||||
Ideally this feature would be able to support the fact that a header can have multiple values. At the moment it just assumes one.
|
||||
It would also be nice if it could multi find and replace e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"DownstreamHeaderTransform": {
|
||||
"Location": "[{one,one},{two,two}"
|
||||
},
|
||||
"HttpHandlerOptions": {
|
||||
"AllowAutoRedirect": false,
|
||||
},
|
||||
|
||||
If anyone wants to have a go at this please help yourself!!
|
60
docs/features/loadbalancer.rst
Normal file
60
docs/features/loadbalancer.rst
Normal file
@ -0,0 +1,60 @@
|
||||
Load Balancer
|
||||
=============
|
||||
|
||||
Ocelot can load balance across available downstream services for each ReRoute. This means you can scale your downstream services and Ocelot can use them effectively.
|
||||
|
||||
The type of load balancer available are:
|
||||
|
||||
LeastConnection - tracks which services are dealing with requests and sends new requests to service with least existing requests. The algorythm state is not distributed across a cluster of Ocelot's.
|
||||
|
||||
RoundRobin - loops through available services and sends requests. The algorythm state is not distributed across a cluster of Ocelot's.
|
||||
|
||||
NoLoadBalancer - takes the first available service from config or service discovery.
|
||||
|
||||
You must choose in your configuration which load balancer to use.
|
||||
|
||||
Configuration
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
The following shows how to set up multiple downstream services for a ReRoute using configuration.json and then select the LeadConnection load balancer. This is the simplest way to get load balancing set up.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "10.0.1.10",
|
||||
"Port": 5000,
|
||||
},
|
||||
{
|
||||
"Host": "10.0.1.11",
|
||||
"Port": 5000,
|
||||
}
|
||||
],
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"LoadBalancer": "LeastConnection",
|
||||
"UpstreamHttpMethod": [ "Put", "Delete" ]
|
||||
}
|
||||
|
||||
|
||||
Service Discovery
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
The following shows how to set up a ReRoute using service discovery then select the LeadConnection load balancer.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"UpstreamHttpMethod": [ "Put" ],
|
||||
"ServiceName": "product",
|
||||
"LoadBalancer": "LeastConnection",
|
||||
"UseServiceDiscovery": true
|
||||
}
|
||||
|
||||
When this is set up Ocelot will lookup the downstream host and port from the service discover provider and load balance requests across any available services. If you add and remove services from the
|
||||
service discovery provider (consul) then Ocelot should respect this and stop calling services that have been removed and start calling services that have been added.
|
@ -1,14 +1,14 @@
|
||||
Logging
|
||||
=======
|
||||
|
||||
Ocelot uses the standard logging interfaces ILoggerFactory / ILogger<T> at the moment.
|
||||
This is encapsulated in IOcelotLogger / IOcelotLoggerFactory with an implementation
|
||||
for the standard asp.net core logging stuff at the moment. This is because Ocelot add's some extra info to the logs such as request id if it is configured.
|
||||
|
||||
There is a global error handler that should catch any exceptions thrown and log them as errors.
|
||||
|
||||
Finally if logging is set to trace level Ocelot will log starting, finishing and any middlewares that throw an exception which can be quite useful.
|
||||
|
||||
The reason for not just using bog standard framework logging is that I could not
|
||||
work out how to override the request id that get's logged when setting IncludeScopes
|
||||
Logging
|
||||
=======
|
||||
|
||||
Ocelot uses the standard logging interfaces ILoggerFactory / ILogger<T> at the moment.
|
||||
This is encapsulated in IOcelotLogger / IOcelotLoggerFactory with an implementation
|
||||
for the standard asp.net core logging stuff at the moment. This is because Ocelot add's some extra info to the logs such as request id if it is configured.
|
||||
|
||||
There is a global error handler that should catch any exceptions thrown and log them as errors.
|
||||
|
||||
Finally if logging is set to trace level Ocelot will log starting, finishing and any middlewares that throw an exception which can be quite useful.
|
||||
|
||||
The reason for not just using bog standard framework logging is that I could not
|
||||
work out how to override the request id that get's logged when setting IncludeScopes
|
||||
to true for logging settings. Nicely onto the next feature.
|
@ -1,41 +1,41 @@
|
||||
Middleware Injection and Overrides
|
||||
==================================
|
||||
|
||||
Warning use with caution. If you are seeing any exceptions or strange behavior in your middleware
|
||||
pipeline and you are using any of the following. Remove them and try again!
|
||||
|
||||
When setting up Ocelot in your Startup.cs you can provide some additonal middleware
|
||||
and override middleware. This is done as follos.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
var configuration = new OcelotMiddlewareConfiguration
|
||||
{
|
||||
PreErrorResponderMiddleware = async (ctx, next) =>
|
||||
{
|
||||
await next.Invoke();
|
||||
}
|
||||
};
|
||||
|
||||
app.UseOcelot(configuration);
|
||||
|
||||
In the example above the provided function will run before the first piece of Ocelot middleware.
|
||||
This allows a user to supply any behaviours they want before and after the Ocelot pipeline has run.
|
||||
This means you can break everything so use at your own pleasure!
|
||||
|
||||
The user can set functions against the following.
|
||||
|
||||
* PreErrorResponderMiddleware - Already explained above.
|
||||
|
||||
* PreAuthenticationMiddleware - This allows the user to run pre authentication logic and then call Ocelot's authentication middleware.
|
||||
|
||||
* AuthenticationMiddleware - This overrides Ocelots authentication middleware.
|
||||
|
||||
* PreAuthorisationMiddleware - This allows the user to run pre authorisation logic and then call Ocelot's authorisation middleware.
|
||||
|
||||
* AuthorisationMiddleware - This overrides Ocelots authorisation middleware.
|
||||
|
||||
* PreQueryStringBuilderMiddleware - This alows the user to manipulate the query string on the http request before it is passed to Ocelots request creator.
|
||||
|
||||
Obviously you can just add middleware as normal before the call to app.UseOcelot() It cannot be added
|
||||
Middleware Injection and Overrides
|
||||
==================================
|
||||
|
||||
Warning use with caution. If you are seeing any exceptions or strange behavior in your middleware
|
||||
pipeline and you are using any of the following. Remove them and try again!
|
||||
|
||||
When setting up Ocelot in your Startup.cs you can provide some additonal middleware
|
||||
and override middleware. This is done as follos.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
var configuration = new OcelotMiddlewareConfiguration
|
||||
{
|
||||
PreErrorResponderMiddleware = async (ctx, next) =>
|
||||
{
|
||||
await next.Invoke();
|
||||
}
|
||||
};
|
||||
|
||||
app.UseOcelot(configuration);
|
||||
|
||||
In the example above the provided function will run before the first piece of Ocelot middleware.
|
||||
This allows a user to supply any behaviours they want before and after the Ocelot pipeline has run.
|
||||
This means you can break everything so use at your own pleasure!
|
||||
|
||||
The user can set functions against the following.
|
||||
|
||||
* PreErrorResponderMiddleware - Already explained above.
|
||||
|
||||
* PreAuthenticationMiddleware - This allows the user to run pre authentication logic and then call Ocelot's authentication middleware.
|
||||
|
||||
* AuthenticationMiddleware - This overrides Ocelots authentication middleware.
|
||||
|
||||
* PreAuthorisationMiddleware - This allows the user to run pre authorisation logic and then call Ocelot's authorisation middleware.
|
||||
|
||||
* AuthorisationMiddleware - This overrides Ocelots authorisation middleware.
|
||||
|
||||
* PreQueryStringBuilderMiddleware - This alows the user to manipulate the query string on the http request before it is passed to Ocelots request creator.
|
||||
|
||||
Obviously you can just add middleware as normal before the call to app.UseOcelot() It cannot be added
|
||||
after as Ocelot does not call the next middleware.
|
@ -1,22 +1,22 @@
|
||||
Quality of Service
|
||||
==================
|
||||
|
||||
Ocelot supports one QoS capability at the current time. You can set on a per ReRoute basis if you
|
||||
want to use a circuit breaker when making requests to a downstream service. This uses the an awesome
|
||||
.NET library called Polly check them out `here <https://github.com/App-vNext/Polly>`_.
|
||||
|
||||
Add the following section to a ReRoute configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"QoSOptions": {
|
||||
"ExceptionsAllowedBeforeBreaking":3,
|
||||
"DurationOfBreak":5,
|
||||
"TimeoutValue":5000
|
||||
}
|
||||
|
||||
You must set a number greater than 0 against ExceptionsAllowedBeforeBreaking for this rule to be
|
||||
implemented. Duration of break is how long the circuit breaker will stay open for after it is tripped.
|
||||
TimeoutValue means ff a request takes more than 5 seconds it will automatically be timed out.
|
||||
|
||||
Quality of Service
|
||||
==================
|
||||
|
||||
Ocelot supports one QoS capability at the current time. You can set on a per ReRoute basis if you
|
||||
want to use a circuit breaker when making requests to a downstream service. This uses the an awesome
|
||||
.NET library called Polly check them out `here <https://github.com/App-vNext/Polly>`_.
|
||||
|
||||
Add the following section to a ReRoute configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"QoSOptions": {
|
||||
"ExceptionsAllowedBeforeBreaking":3,
|
||||
"DurationOfBreak":5,
|
||||
"TimeoutValue":5000
|
||||
}
|
||||
|
||||
You must set a number greater than 0 against ExceptionsAllowedBeforeBreaking for this rule to be
|
||||
implemented. Duration of break is how long the circuit breaker will stay open for after it is tripped.
|
||||
TimeoutValue means ff a request takes more than 5 seconds it will automatically be timed out.
|
||||
|
||||
If you do not add a QoS section QoS will not be used.
|
@ -1,45 +1,45 @@
|
||||
Raft (EXPERIMENTAL DO NOT USE IN PRODUCTION)
|
||||
============================================
|
||||
|
||||
Ocelot has recenely integrated `Rafty <https://github.com/TomPallister/Rafty>`_ which is an implementation of Raft that I have also been working on over the last year. This project is very experimental so please do not use this feature of Ocelot in production until I think it's OK.
|
||||
|
||||
Raft is a distributed concensus algorythm that allows a cluster of servers (Ocelots) to maintain local state without having a centralised database for storing state (e.g. SQL Server).
|
||||
|
||||
In order to enable Rafty in Ocelot you must make the following changes to your Startup.cs.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public virtual void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddAdministration("/administration", "secret")
|
||||
.AddRafty();
|
||||
}
|
||||
|
||||
In addition to this you must add a file called peers.json to your main project and it will look as follows
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"Peers": [{
|
||||
"HostAndPort": "http://localhost:5000"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5002"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5003"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5004"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5001"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Each instance of Ocelot must have it's address in the array so that they can communicate using Rafty.
|
||||
|
||||
Once you have made these configuration changes you must deploy and start each instance of Ocelot using the addresses in the peers.json file. The servers should then start communicating with each other! You can test if everything is working by posting a configuration update and checking it has replicated to all servers by getting there configuration.
|
||||
Raft (EXPERIMENTAL DO NOT USE IN PRODUCTION)
|
||||
============================================
|
||||
|
||||
Ocelot has recenely integrated `Rafty <https://github.com/TomPallister/Rafty>`_ which is an implementation of Raft that I have also been working on over the last year. This project is very experimental so please do not use this feature of Ocelot in production until I think it's OK.
|
||||
|
||||
Raft is a distributed concensus algorythm that allows a cluster of servers (Ocelots) to maintain local state without having a centralised database for storing state (e.g. SQL Server).
|
||||
|
||||
In order to enable Rafty in Ocelot you must make the following changes to your Startup.cs.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public virtual void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services
|
||||
.AddOcelot(Configuration)
|
||||
.AddAdministration("/administration", "secret")
|
||||
.AddRafty();
|
||||
}
|
||||
|
||||
In addition to this you must add a file called peers.json to your main project and it will look as follows
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"Peers": [{
|
||||
"HostAndPort": "http://localhost:5000"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5002"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5003"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5004"
|
||||
},
|
||||
{
|
||||
"HostAndPort": "http://localhost:5001"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Each instance of Ocelot must have it's address in the array so that they can communicate using Rafty.
|
||||
|
||||
Once you have made these configuration changes you must deploy and start each instance of Ocelot using the addresses in the peers.json file. The servers should then start communicating with each other! You can test if everything is working by posting a configuration update and checking it has replicated to all servers by getting there configuration.
|
||||
|
@ -1,60 +1,60 @@
|
||||
Request Id / Correlation Id
|
||||
===========================
|
||||
|
||||
Ocelot supports a client sending a request id in the form of a header. If set Ocelot will
|
||||
use the requestid for logging as soon as it becomes available in the middleware pipeline.
|
||||
Ocelot will also forward the request id with the specified header to the downstream service.
|
||||
|
||||
You can still get the asp.net core request id in the logs if you set
|
||||
IncludeScopes true in your logging config.
|
||||
|
||||
In order to use the reques tid feature you have two options.
|
||||
|
||||
*Global*
|
||||
|
||||
In your configuration.json set the following in the GlobalConfiguration section. This will be used for all requests into Ocelot.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"GlobalConfiguration": {
|
||||
"RequestIdKey": "OcRequestId"
|
||||
}
|
||||
|
||||
I reccomend using the GlobalConfiguration unless you really need it to be ReRoute specific.
|
||||
|
||||
*ReRoute*
|
||||
|
||||
If you want to override this for a specific ReRoute add the following to configuration.json for the specific ReRoute.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"RequestIdKey": "OcRequestId"
|
||||
|
||||
Once Ocelot has identified the incoming requests matching ReRoute object it will set the request id based on the ReRoute configuration.
|
||||
|
||||
This can lead to a small gotcha. If you set a GlobalConfiguration it is possible to get one request id until the ReRoute is identified and then another after that because the request id key can change. This is by design and is the best solution I can think of at the moment. In this case the OcelotLogger will show the request id and previous request id in the logs.
|
||||
|
||||
Below is an example of the logging when set at Debug level for a normal request..
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
dbug: Ocelot.Errors.Middleware.ExceptionHandlerMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: ocelot pipeline started,
|
||||
dbug: Ocelot.DownstreamRouteFinder.Middleware.DownstreamRouteFinderMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: upstream url path is {upstreamUrlPath},
|
||||
dbug: Ocelot.DownstreamRouteFinder.Middleware.DownstreamRouteFinderMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: downstream template is {downstreamRoute.Data.ReRoute.DownstreamPath},
|
||||
dbug: Ocelot.RateLimit.Middleware.ClientRateLimitMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: EndpointRateLimiting is not enabled for Ocelot.Values.PathTemplate,
|
||||
dbug: Ocelot.Authorisation.Middleware.AuthorisationMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: /posts/{postId} route does not require user to be authorised,
|
||||
dbug: Ocelot.DownstreamUrlCreator.Middleware.DownstreamUrlCreatorMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: downstream url is {downstreamUrl.Data.Value},
|
||||
dbug: Ocelot.Request.Middleware.HttpRequestBuilderMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: setting upstream request,
|
||||
dbug: Ocelot.Requester.Middleware.HttpRequesterMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: setting http response message,
|
||||
dbug: Ocelot.Responder.Middleware.ResponderMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: no pipeline errors, setting and returning completed response,
|
||||
dbug: Ocelot.Errors.Middleware.ExceptionHandlerMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: ocelot pipeline finished,
|
||||
Request Id / Correlation Id
|
||||
===========================
|
||||
|
||||
Ocelot supports a client sending a request id in the form of a header. If set Ocelot will
|
||||
use the requestid for logging as soon as it becomes available in the middleware pipeline.
|
||||
Ocelot will also forward the request id with the specified header to the downstream service.
|
||||
|
||||
You can still get the asp.net core request id in the logs if you set
|
||||
IncludeScopes true in your logging config.
|
||||
|
||||
In order to use the reques tid feature you have two options.
|
||||
|
||||
*Global*
|
||||
|
||||
In your configuration.json set the following in the GlobalConfiguration section. This will be used for all requests into Ocelot.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"GlobalConfiguration": {
|
||||
"RequestIdKey": "OcRequestId"
|
||||
}
|
||||
|
||||
I reccomend using the GlobalConfiguration unless you really need it to be ReRoute specific.
|
||||
|
||||
*ReRoute*
|
||||
|
||||
If you want to override this for a specific ReRoute add the following to configuration.json for the specific ReRoute.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"RequestIdKey": "OcRequestId"
|
||||
|
||||
Once Ocelot has identified the incoming requests matching ReRoute object it will set the request id based on the ReRoute configuration.
|
||||
|
||||
This can lead to a small gotcha. If you set a GlobalConfiguration it is possible to get one request id until the ReRoute is identified and then another after that because the request id key can change. This is by design and is the best solution I can think of at the moment. In this case the OcelotLogger will show the request id and previous request id in the logs.
|
||||
|
||||
Below is an example of the logging when set at Debug level for a normal request..
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
dbug: Ocelot.Errors.Middleware.ExceptionHandlerMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: ocelot pipeline started,
|
||||
dbug: Ocelot.DownstreamRouteFinder.Middleware.DownstreamRouteFinderMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: upstream url path is {upstreamUrlPath},
|
||||
dbug: Ocelot.DownstreamRouteFinder.Middleware.DownstreamRouteFinderMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: downstream template is {downstreamRoute.Data.ReRoute.DownstreamPath},
|
||||
dbug: Ocelot.RateLimit.Middleware.ClientRateLimitMiddleware[0]
|
||||
requestId: asdf, previousRequestId: no previous request id, message: EndpointRateLimiting is not enabled for Ocelot.Values.PathTemplate,
|
||||
dbug: Ocelot.Authorisation.Middleware.AuthorisationMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: /posts/{postId} route does not require user to be authorised,
|
||||
dbug: Ocelot.DownstreamUrlCreator.Middleware.DownstreamUrlCreatorMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: downstream url is {downstreamUrl.Data.Value},
|
||||
dbug: Ocelot.Request.Middleware.HttpRequestBuilderMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: setting upstream request,
|
||||
dbug: Ocelot.Requester.Middleware.HttpRequesterMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: setting http response message,
|
||||
dbug: Ocelot.Responder.Middleware.ResponderMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: no pipeline errors, setting and returning completed response,
|
||||
dbug: Ocelot.Errors.Middleware.ExceptionHandlerMiddleware[0]
|
||||
requestId: 1234, previousRequestId: asdf, message: ocelot pipeline finished,
|
||||
|
@ -1,94 +1,112 @@
|
||||
Routing
|
||||
=======
|
||||
|
||||
Ocelot's primary functionality is to take incomeing http requests and forward them on
|
||||
to a downstream service. At the moment in the form of another http request (in the future
|
||||
this could be any transport mechanism.).
|
||||
|
||||
Ocelot's describes the routing of one request to another as a ReRoute. In order to get
|
||||
anything working in Ocelot you need to set up a ReRoute in the configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [
|
||||
]
|
||||
}
|
||||
|
||||
In order to set up a ReRoute you need to add one to the json array called ReRoutes like
|
||||
the following.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamPort": 80,
|
||||
"DownstreamHost":"localhost",
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"UpstreamHttpMethod": [ "Put", "Delete" ]
|
||||
}
|
||||
|
||||
The DownstreamPathTemplate, Scheme, Port and Host make the URL that this request will be forwarded to.
|
||||
The UpstreamPathTemplate is the URL that Ocelot will use to identity which
|
||||
DownstreamPathTemplate to use for a given request. Finally the UpstreamHttpMethod is used so
|
||||
Ocelot can distinguish between requests to the same URL and is obviously needed to work :)
|
||||
You can set a specific list of HTTP Methods or set an empty list to allow any of them. In Ocelot you can add placeholders for variables to your Templates in the form of {something}.
|
||||
The placeholder needs to be in both the DownstreamPathTemplate and UpstreamPathTemplate. If it is
|
||||
Ocelot will attempt to replace the placeholder with the correct variable value from the
|
||||
Upstream URL when the request comes in.
|
||||
|
||||
You can also do a catch all type of ReRoute e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/{everything}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamPort": 80,
|
||||
"DownstreamHost":"localhost",
|
||||
"UpstreamPathTemplate": "/{everything}",
|
||||
"UpstreamHttpMethod": [ "Get", "Post" ]
|
||||
}
|
||||
|
||||
This will forward any path + query string combinations to the downstream service after the path /api.
|
||||
|
||||
At the moment without any configuration Ocelot will default to all ReRoutes being case insensitive.
|
||||
In order to change this you can specify on a per ReRoute basis the following setting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRouteIsCaseSensitive": true
|
||||
|
||||
This means that when Ocelot tries to match the incoming upstream url with an upstream template the
|
||||
evaluation will be case sensitive. This setting defaults to false so only set it if you want
|
||||
the ReRoute to be case sensitive is my advice!
|
||||
|
||||
Catch All
|
||||
^^^^^^^^^
|
||||
|
||||
Ocelot's routing also supports a catch all style routing where the user can specify that they want to match all traffic if you set up your config like below the request will be proxied straight through (it doesnt have to be url any placeholder name will work).
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/{url}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamPort": 80,
|
||||
"DownstreamHost":"localhost",
|
||||
"UpstreamPathTemplate": "/{url}",
|
||||
"UpstreamHttpMethod": [ "Get" ]
|
||||
}
|
||||
|
||||
The catch all has a lower priority than any other ReRoute. If you also have the ReRoute below in your config then Ocelot would match it before the catch all.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamPort": 80,
|
||||
"DownstreamHost":"10.0.10.1",
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": [ "Get" ]
|
||||
}
|
||||
Routing
|
||||
=======
|
||||
|
||||
Ocelot's primary functionality is to take incomeing http requests and forward them on
|
||||
to a downstream service. At the moment in the form of another http request (in the future
|
||||
this could be any transport mechanism.).
|
||||
|
||||
Ocelot's describes the routing of one request to another as a ReRoute. In order to get
|
||||
anything working in Ocelot you need to set up a ReRoute in the configuration.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [
|
||||
]
|
||||
}
|
||||
|
||||
In order to set up a ReRoute you need to add one to the json array called ReRoutes like
|
||||
the following.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 80,
|
||||
}
|
||||
],
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"UpstreamHttpMethod": [ "Put", "Delete" ]
|
||||
}
|
||||
|
||||
The DownstreamPathTemplate, Scheme and DownstreamHostAndPorts make the URL that this request will be forwarded to.
|
||||
|
||||
DownstreamHostAndPorts is an array that contains the host and port of any downstream services that you wish to forward requests to. Usually this will just contain one entry but sometimes you might want to load balance
|
||||
requests to your downstream services and Ocelot let's you add more than one entry and then select a load balancer.
|
||||
|
||||
The UpstreamPathTemplate is the URL that Ocelot will use to identity which DownstreamPathTemplate to use for a given request. Finally the UpstreamHttpMethod is used so
|
||||
Ocelot can distinguish between requests to the same URL and is obviously needed to work :)
|
||||
|
||||
You can set a specific list of HTTP Methods or set an empty list to allow any of them. In Ocelot you can add placeholders for variables to your Templates in the form of {something}.
|
||||
The placeholder needs to be in both the DownstreamPathTemplate and UpstreamPathTemplate. If it is Ocelot will attempt to replace the placeholder with the correct variable value from the Upstream URL when the request comes in.
|
||||
|
||||
You can also do a catch all type of ReRoute e.g.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/{everything}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 80,
|
||||
}
|
||||
],
|
||||
"UpstreamPathTemplate": "/{everything}",
|
||||
"UpstreamHttpMethod": [ "Get", "Post" ]
|
||||
}
|
||||
|
||||
This will forward any path + query string combinations to the downstream service after the path /api.
|
||||
|
||||
At the moment without any configuration Ocelot will default to all ReRoutes being case insensitive.
|
||||
In order to change this you can specify on a per ReRoute basis the following setting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ReRouteIsCaseSensitive": true
|
||||
|
||||
This means that when Ocelot tries to match the incoming upstream url with an upstream template the
|
||||
evaluation will be case sensitive. This setting defaults to false so only set it if you want
|
||||
the ReRoute to be case sensitive is my advice!
|
||||
|
||||
Catch All
|
||||
^^^^^^^^^
|
||||
|
||||
Ocelot's routing also supports a catch all style routing where the user can specify that they want to match all traffic if you set up your config like below the request will be proxied straight through (it doesnt have to be url any placeholder name will work).
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/{url}",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "localhost",
|
||||
"Port": 80,
|
||||
}
|
||||
],
|
||||
"UpstreamPathTemplate": "/{url}",
|
||||
"UpstreamHttpMethod": [ "Get" ]
|
||||
}
|
||||
|
||||
The catch all has a lower priority than any other ReRoute. If you also have the ReRoute below in your config then Ocelot would match it before the catch all.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/",
|
||||
"DownstreamScheme": "https",
|
||||
"DownstreamHostAndPorts": [
|
||||
{
|
||||
"Host": "10.0.10.1",
|
||||
"Port": 80,
|
||||
}
|
||||
],
|
||||
"UpstreamPathTemplate": "/",
|
||||
"UpstreamHttpMethod": [ "Get" ]
|
||||
}
|
||||
|
@ -1,38 +1,38 @@
|
||||
Service Discovery
|
||||
=================
|
||||
|
||||
Ocelot allows you to specify a service discovery provider and will use this to find the host and port
|
||||
for the downstream service Ocelot is forwarding a request to. At the moment this is only supported in the
|
||||
GlobalConfiguration section which means the same service discovery provider will be used for all ReRoutes
|
||||
you specify a ServiceName for at ReRoute level.
|
||||
|
||||
At the moment the only supported service discovery provider is Consul. The following is required in the
|
||||
GlobalConfiguration. The Provider is required and if you do not specify a host and port the Consul default
|
||||
will be used.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ServiceDiscoveryProvider": {
|
||||
"Host": "localhost",
|
||||
"Port": 9500
|
||||
}
|
||||
|
||||
In the future we can add a feature that allows ReRoute specfic configuration.
|
||||
|
||||
In order to tell Ocelot a ReRoute is to use the service discovery provider for its host and port you must add the
|
||||
ServiceName, UseServiceDiscovery and load balancer you wish to use when making requests downstream. At the moment Ocelot has a RoundRobin
|
||||
and LeastConnection algorithm you can use. If no load balancer is specified Ocelot will not load balance requests.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"UpstreamHttpMethod": [ "Put" ],
|
||||
"ServiceName": "product",
|
||||
"LoadBalancer": "LeastConnection",
|
||||
"UseServiceDiscovery": false
|
||||
}
|
||||
|
||||
Service Discovery
|
||||
=================
|
||||
|
||||
Ocelot allows you to specify a service discovery provider and will use this to find the host and port
|
||||
for the downstream service Ocelot is forwarding a request to. At the moment this is only supported in the
|
||||
GlobalConfiguration section which means the same service discovery provider will be used for all ReRoutes
|
||||
you specify a ServiceName for at ReRoute level.
|
||||
|
||||
At the moment the only supported service discovery provider is Consul. The following is required in the
|
||||
GlobalConfiguration. The Provider is required and if you do not specify a host and port the Consul default
|
||||
will be used.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
"ServiceDiscoveryProvider": {
|
||||
"Host": "localhost",
|
||||
"Port": 9500
|
||||
}
|
||||
|
||||
In the future we can add a feature that allows ReRoute specfic configuration.
|
||||
|
||||
In order to tell Ocelot a ReRoute is to use the service discovery provider for its host and port you must add the
|
||||
ServiceName, UseServiceDiscovery and load balancer you wish to use when making requests downstream. At the moment Ocelot has a RoundRobin
|
||||
and LeastConnection algorithm you can use. If no load balancer is specified Ocelot will not load balance requests.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"DownstreamPathTemplate": "/api/posts/{postId}",
|
||||
"DownstreamScheme": "https",
|
||||
"UpstreamPathTemplate": "/posts/{postId}",
|
||||
"UpstreamHttpMethod": [ "Put" ],
|
||||
"ServiceName": "product",
|
||||
"LoadBalancer": "LeastConnection",
|
||||
"UseServiceDiscovery": true
|
||||
}
|
||||
|
||||
When this is set up Ocelot will lookup the downstream host and port from the service discover provider and load balance requests across any available services.
|
@ -1,47 +1,48 @@
|
||||
Welcome to Ocelot
|
||||
=================
|
||||
|
||||
Thanks for taking a look at the Ocelot documentation. Please use the left hand nav to get around. I would suggest taking a look at introduction first.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Introduction
|
||||
|
||||
introduction/bigpicture
|
||||
introduction/gettingstarted
|
||||
introduction/contributing
|
||||
introduction/notsupported
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Features
|
||||
|
||||
features/routing
|
||||
features/configuration
|
||||
features/servicediscovery
|
||||
features/authentication
|
||||
features/authorisation
|
||||
features/administration
|
||||
features/raft
|
||||
features/caching
|
||||
features/qualityofservice
|
||||
features/headerstransformation
|
||||
features/claimstransformation
|
||||
features/logging
|
||||
features/requestid
|
||||
features/middlewareinjection
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Building Ocelot
|
||||
|
||||
building/overview
|
||||
building/building
|
||||
building/tests
|
||||
building/releaseprocess
|
||||
|
||||
|
||||
|
||||
Welcome to Ocelot
|
||||
=================
|
||||
|
||||
Thanks for taking a look at the Ocelot documentation. Please use the left hand nav to get around. I would suggest taking a look at introduction first.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Introduction
|
||||
|
||||
introduction/bigpicture
|
||||
introduction/gettingstarted
|
||||
introduction/contributing
|
||||
introduction/notsupported
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Features
|
||||
|
||||
features/routing
|
||||
features/configuration
|
||||
features/servicediscovery
|
||||
features/authentication
|
||||
features/authorisation
|
||||
features/administration
|
||||
features/raft
|
||||
features/caching
|
||||
features/qualityofservice
|
||||
features/headerstransformation
|
||||
features/claimstransformation
|
||||
features/logging
|
||||
features/requestid
|
||||
features/middlewareinjection
|
||||
features/loadbalancer
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:hidden:
|
||||
:caption: Building Ocelot
|
||||
|
||||
building/overview
|
||||
building/building
|
||||
building/tests
|
||||
building/releaseprocess
|
||||
|
||||
|
||||
|
||||
|
@ -1,38 +1,38 @@
|
||||
Big Picture
|
||||
===========
|
||||
|
||||
Ocleot is aimed at people using .NET running
|
||||
a micro services / service orientated architecture
|
||||
that need a unified point of entry into their system.
|
||||
|
||||
In particular I want easy integration with
|
||||
IdentityServer reference and bearer tokens.
|
||||
|
||||
Ocelot is a bunch of middlewares in a specific order.
|
||||
|
||||
Ocelot manipulates the HttpRequest object into a state specified by its configuration until
|
||||
it reaches a request builder middleware where it creates a HttpRequestMessage object which is
|
||||
used to make a request to a downstream service. The middleware that makes the request is
|
||||
the last thing in the Ocelot pipeline. It does not call the next middleware.
|
||||
The response from the downstream service is stored in a per request scoped repository
|
||||
and retrived as the requests goes back up the Ocelot pipeline. There is a piece of middleware
|
||||
that maps the HttpResponseMessage onto the HttpResponse object and that is returned to the client.
|
||||
That is basically it with a bunch of other features.
|
||||
|
||||
The following are configuration that you use when deploying Ocelot.
|
||||
|
||||
Basic Implementation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotBasic.jpg
|
||||
|
||||
With IdentityServer
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotIndentityServer.jpg
|
||||
|
||||
Multiple Instances
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotMultipleInstances.jpg
|
||||
|
||||
With Consul
|
||||
^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotMultipleInstancesConsul.jpg
|
||||
Big Picture
|
||||
===========
|
||||
|
||||
Ocleot is aimed at people using .NET running
|
||||
a micro services / service orientated architecture
|
||||
that need a unified point of entry into their system.
|
||||
|
||||
In particular I want easy integration with
|
||||
IdentityServer reference and bearer tokens.
|
||||
|
||||
Ocelot is a bunch of middlewares in a specific order.
|
||||
|
||||
Ocelot manipulates the HttpRequest object into a state specified by its configuration until
|
||||
it reaches a request builder middleware where it creates a HttpRequestMessage object which is
|
||||
used to make a request to a downstream service. The middleware that makes the request is
|
||||
the last thing in the Ocelot pipeline. It does not call the next middleware.
|
||||
The response from the downstream service is stored in a per request scoped repository
|
||||
and retrived as the requests goes back up the Ocelot pipeline. There is a piece of middleware
|
||||
that maps the HttpResponseMessage onto the HttpResponse object and that is returned to the client.
|
||||
That is basically it with a bunch of other features.
|
||||
|
||||
The following are configuration that you use when deploying Ocelot.
|
||||
|
||||
Basic Implementation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotBasic.jpg
|
||||
|
||||
With IdentityServer
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotIndentityServer.jpg
|
||||
|
||||
Multiple Instances
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotMultipleInstances.jpg
|
||||
|
||||
With Consul
|
||||
^^^^^^^^^^^
|
||||
.. image:: ../images/OcelotMultipleInstancesConsul.jpg
|
||||
|
@ -1,5 +1,5 @@
|
||||
Contributing
|
||||
============
|
||||
|
||||
Pull requests, issues and commentary welcome! No special process just create a request and get in
|
||||
Contributing
|
||||
============
|
||||
|
||||
Pull requests, issues and commentary welcome! No special process just create a request and get in
|
||||
touch either via gitter or create an issue.
|
@ -1,172 +1,172 @@
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
Ocelot is designed to work with .NET Core only and is currently
|
||||
built to netcoreapp2.0 `this <https://docs.microsoft.com/en-us/dotnet/articles/standard/library>`_ documentation may prove helpful when working out if Ocelot would be suitable for you.
|
||||
|
||||
.NET Core 2.0
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
**Install NuGet package**
|
||||
|
||||
Install Ocelot and it's dependecies using nuget. You will need to create a netcoreapp2.0 projct and bring the package into it. Then follow the Startup below and :doc:`../features/configuration` sections
|
||||
to get up and running.
|
||||
|
||||
``Install-Package Ocelot``
|
||||
|
||||
All versions can be found `here <https://www.nuget.org/packages/Ocelot/>`_.
|
||||
|
||||
**Configuration**
|
||||
|
||||
The following is a very basic configuration.json. It won't do anything but should get Ocelot starting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
**Program**
|
||||
|
||||
Then in your Program.cs you will want to have the following. This can be changed if you
|
||||
don't wan't to use the default url e.g. UseUrls(someUrls) and should work as long as you keep the WebHostBuilder registration.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
IWebHostBuilder builder = new WebHostBuilder();
|
||||
builder.ConfigureServices(s => {
|
||||
s.AddSingleton(builder);
|
||||
});
|
||||
builder.UseKestrel()
|
||||
.UseContentRoot(Directory.GetCurrentDirectory())
|
||||
.ConfigureAppConfiguration((hostingContext, config) =>
|
||||
{
|
||||
config.SetBasePath(hostingContext.HostingEnvironment.ContentRootPath);
|
||||
var env = hostingContext.HostingEnvironment;
|
||||
config.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
|
||||
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true, reloadOnChange: true);
|
||||
config.AddJsonFile("configuration.json");
|
||||
config.AddEnvironmentVariables();
|
||||
})
|
||||
.ConfigureLogging((hostingContext, logging) =>
|
||||
{
|
||||
logging.AddConfiguration(hostingContext.Configuration.GetSection("Logging"));
|
||||
logging.AddConsole();
|
||||
})
|
||||
.UseIISIntegration()
|
||||
.UseStartup<ManualTestStartup>();
|
||||
var host = builder.Build();
|
||||
host.Run();
|
||||
}
|
||||
}
|
||||
|
||||
Sadly we need to inject the IWebHostBuilder interface to get the applications scheme, url and port later. I cannot find a better way of doing this at the moment without setting this in a static or some kind of config.
|
||||
|
||||
**Startup**
|
||||
|
||||
An example startup using a json file for configuration can be seen below. This is the most basic startup and Ocelot has quite a few more options. Detailed in the rest of these docs! If you get a stuck a good place to look is at the ManualTests project in the source code.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Startup
|
||||
{
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddOcelot();
|
||||
}
|
||||
|
||||
public void Configure(IApplicationBuilder app)
|
||||
{
|
||||
app.UseOcelot().Wait();
|
||||
}
|
||||
}
|
||||
|
||||
.NET Core 1.0
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
**Install NuGet package**
|
||||
|
||||
Install Ocelot and it's dependecies using nuget. You will need to create a netcoreapp1.0+ projct and bring the package into it. Then follow the Startup below and :doc:`../features/configuration` sections
|
||||
to get up and running. Please note you will need to choose one of the Ocelot packages from the NuGet feed.
|
||||
|
||||
All versions can be found `here <https://www.nuget.org/packages/Ocelot/>`_.
|
||||
|
||||
**Configuration**
|
||||
|
||||
The following is a very basic configuration.json. It won't do anything but should get Ocelot starting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
**Program**
|
||||
|
||||
Then in your Program.cs you will want to have the following. This can be changed if you
|
||||
don't wan't to use the default url e.g. UseUrls(someUrls) and should work as long as you keep the WebHostBuilder registration.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
IWebHostBuilder builder = new WebHostBuilder();
|
||||
|
||||
builder.ConfigureServices(s => {
|
||||
s.AddSingleton(builder);
|
||||
});
|
||||
|
||||
builder.UseKestrel()
|
||||
.UseContentRoot(Directory.GetCurrentDirectory())
|
||||
.UseStartup<Startup>();
|
||||
|
||||
var host = builder.Build();
|
||||
|
||||
host.Run();
|
||||
}
|
||||
}
|
||||
|
||||
Sadly we need to inject the IWebHostBuilder interface to get the applications scheme, url and port later. I cannot find a better way of doing this at the moment without setting this in a static or some kind of config.
|
||||
|
||||
**Startup**
|
||||
|
||||
An example startup using a json file for configuration can be seen below.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Startup
|
||||
{
|
||||
public Startup(IHostingEnvironment env)
|
||||
{
|
||||
var builder = new ConfigurationBuilder()
|
||||
.SetBasePath(env.ContentRootPath)
|
||||
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
|
||||
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true)
|
||||
.AddJsonFile("configuration.json")
|
||||
.AddEnvironmentVariables();
|
||||
|
||||
Configuration = builder.Build();
|
||||
}
|
||||
|
||||
public IConfigurationRoot Configuration { get; }
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
public void Configure(IApplicationBuilder app)
|
||||
{
|
||||
app.UseOcelot().Wait();
|
||||
}
|
||||
}
|
||||
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
Ocelot is designed to work with .NET Core only and is currently
|
||||
built to netcoreapp2.0 `this <https://docs.microsoft.com/en-us/dotnet/articles/standard/library>`_ documentation may prove helpful when working out if Ocelot would be suitable for you.
|
||||
|
||||
.NET Core 2.0
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
**Install NuGet package**
|
||||
|
||||
Install Ocelot and it's dependecies using nuget. You will need to create a netcoreapp2.0 projct and bring the package into it. Then follow the Startup below and :doc:`../features/configuration` sections
|
||||
to get up and running.
|
||||
|
||||
``Install-Package Ocelot``
|
||||
|
||||
All versions can be found `here <https://www.nuget.org/packages/Ocelot/>`_.
|
||||
|
||||
**Configuration**
|
||||
|
||||
The following is a very basic configuration.json. It won't do anything but should get Ocelot starting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
**Program**
|
||||
|
||||
Then in your Program.cs you will want to have the following. This can be changed if you
|
||||
don't wan't to use the default url e.g. UseUrls(someUrls) and should work as long as you keep the WebHostBuilder registration.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
IWebHostBuilder builder = new WebHostBuilder();
|
||||
builder.ConfigureServices(s => {
|
||||
s.AddSingleton(builder);
|
||||
});
|
||||
builder.UseKestrel()
|
||||
.UseContentRoot(Directory.GetCurrentDirectory())
|
||||
.ConfigureAppConfiguration((hostingContext, config) =>
|
||||
{
|
||||
config.SetBasePath(hostingContext.HostingEnvironment.ContentRootPath);
|
||||
var env = hostingContext.HostingEnvironment;
|
||||
config.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
|
||||
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true, reloadOnChange: true);
|
||||
config.AddJsonFile("configuration.json");
|
||||
config.AddEnvironmentVariables();
|
||||
})
|
||||
.ConfigureLogging((hostingContext, logging) =>
|
||||
{
|
||||
logging.AddConfiguration(hostingContext.Configuration.GetSection("Logging"));
|
||||
logging.AddConsole();
|
||||
})
|
||||
.UseIISIntegration()
|
||||
.UseStartup<ManualTestStartup>();
|
||||
var host = builder.Build();
|
||||
host.Run();
|
||||
}
|
||||
}
|
||||
|
||||
Sadly we need to inject the IWebHostBuilder interface to get the applications scheme, url and port later. I cannot find a better way of doing this at the moment without setting this in a static or some kind of config.
|
||||
|
||||
**Startup**
|
||||
|
||||
An example startup using a json file for configuration can be seen below. This is the most basic startup and Ocelot has quite a few more options. Detailed in the rest of these docs! If you get a stuck a good place to look is at the ManualTests project in the source code.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Startup
|
||||
{
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddOcelot();
|
||||
}
|
||||
|
||||
public void Configure(IApplicationBuilder app)
|
||||
{
|
||||
app.UseOcelot().Wait();
|
||||
}
|
||||
}
|
||||
|
||||
.NET Core 1.0
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
**Install NuGet package**
|
||||
|
||||
Install Ocelot and it's dependecies using nuget. You will need to create a netcoreapp1.0+ projct and bring the package into it. Then follow the Startup below and :doc:`../features/configuration` sections
|
||||
to get up and running. Please note you will need to choose one of the Ocelot packages from the NuGet feed.
|
||||
|
||||
All versions can be found `here <https://www.nuget.org/packages/Ocelot/>`_.
|
||||
|
||||
**Configuration**
|
||||
|
||||
The following is a very basic configuration.json. It won't do anything but should get Ocelot starting.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ReRoutes": [],
|
||||
"GlobalConfiguration": {}
|
||||
}
|
||||
|
||||
**Program**
|
||||
|
||||
Then in your Program.cs you will want to have the following. This can be changed if you
|
||||
don't wan't to use the default url e.g. UseUrls(someUrls) and should work as long as you keep the WebHostBuilder registration.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Program
|
||||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
IWebHostBuilder builder = new WebHostBuilder();
|
||||
|
||||
builder.ConfigureServices(s => {
|
||||
s.AddSingleton(builder);
|
||||
});
|
||||
|
||||
builder.UseKestrel()
|
||||
.UseContentRoot(Directory.GetCurrentDirectory())
|
||||
.UseStartup<Startup>();
|
||||
|
||||
var host = builder.Build();
|
||||
|
||||
host.Run();
|
||||
}
|
||||
}
|
||||
|
||||
Sadly we need to inject the IWebHostBuilder interface to get the applications scheme, url and port later. I cannot find a better way of doing this at the moment without setting this in a static or some kind of config.
|
||||
|
||||
**Startup**
|
||||
|
||||
An example startup using a json file for configuration can be seen below.
|
||||
|
||||
.. code-block:: csharp
|
||||
|
||||
public class Startup
|
||||
{
|
||||
public Startup(IHostingEnvironment env)
|
||||
{
|
||||
var builder = new ConfigurationBuilder()
|
||||
.SetBasePath(env.ContentRootPath)
|
||||
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
|
||||
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true)
|
||||
.AddJsonFile("configuration.json")
|
||||
.AddEnvironmentVariables();
|
||||
|
||||
Configuration = builder.Build();
|
||||
}
|
||||
|
||||
public IConfigurationRoot Configuration { get; }
|
||||
|
||||
public void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddOcelot(Configuration);
|
||||
}
|
||||
|
||||
public void Configure(IApplicationBuilder app)
|
||||
{
|
||||
app.UseOcelot().Wait();
|
||||
}
|
||||
}
|
||||
|
||||
This is pretty much all you need to get going.
|
@ -1,8 +1,8 @@
|
||||
Not Supported
|
||||
=============
|
||||
|
||||
Ocelot does not support...
|
||||
|
||||
* Chunked Encoding - Ocelot will always get the body size and return Content-Length header. Sorry if this doesn't work for your use case!
|
||||
|
||||
Not Supported
|
||||
=============
|
||||
|
||||
Ocelot does not support...
|
||||
|
||||
* Chunked Encoding - Ocelot will always get the body size and return Content-Length header. Sorry if this doesn't work for your use case!
|
||||
|
||||
* Fowarding a host header - The host header that you send to Ocelot will not be forwarded to the downstream service. Obviously this would break everything :(
|
562
docs/make.bat
562
docs/make.bat
@ -1,281 +1,281 @@
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. epub3 to make an epub3
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
echo. coverage to run coverage check of the documentation if enabled
|
||||
echo. dummy to check syntax errors of document sources
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
REM Check if sphinx-build is available and fallback to Python version if any
|
||||
%SPHINXBUILD% 1>NUL 2>NUL
|
||||
if errorlevel 9009 goto sphinx_python
|
||||
goto sphinx_ok
|
||||
|
||||
:sphinx_python
|
||||
|
||||
set SPHINXBUILD=python -m sphinx.__init__
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:sphinx_ok
|
||||
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Ocelot.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Ocelot.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub3" (
|
||||
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "coverage" (
|
||||
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of coverage in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/coverage/python.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dummy" (
|
||||
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. Dummy builder generates no files.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. epub3 to make an epub3
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
echo. coverage to run coverage check of the documentation if enabled
|
||||
echo. dummy to check syntax errors of document sources
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
REM Check if sphinx-build is available and fallback to Python version if any
|
||||
%SPHINXBUILD% 1>NUL 2>NUL
|
||||
if errorlevel 9009 goto sphinx_python
|
||||
goto sphinx_ok
|
||||
|
||||
:sphinx_python
|
||||
|
||||
set SPHINXBUILD=python -m sphinx.__init__
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:sphinx_ok
|
||||
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Ocelot.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Ocelot.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub3" (
|
||||
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "coverage" (
|
||||
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of coverage in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/coverage/python.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dummy" (
|
||||
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. Dummy builder generates no files.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
||||
|
562
docs/make.sh
562
docs/make.sh
@ -1,281 +1,281 @@
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. epub3 to make an epub3
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
echo. coverage to run coverage check of the documentation if enabled
|
||||
echo. dummy to check syntax errors of document sources
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
REM Check if sphinx-build is available and fallback to Python version if any
|
||||
%SPHINXBUILD% 1>NUL 2>NUL
|
||||
if errorlevel 9009 goto sphinx_python
|
||||
goto sphinx_ok
|
||||
|
||||
:sphinx_python
|
||||
|
||||
set SPHINXBUILD=python -m sphinx.__init__
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:sphinx_ok
|
||||
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Ocelot.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Ocelot.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub3" (
|
||||
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "coverage" (
|
||||
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of coverage in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/coverage/python.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dummy" (
|
||||
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. Dummy builder generates no files.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. epub3 to make an epub3
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
echo. coverage to run coverage check of the documentation if enabled
|
||||
echo. dummy to check syntax errors of document sources
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
REM Check if sphinx-build is available and fallback to Python version if any
|
||||
%SPHINXBUILD% 1>NUL 2>NUL
|
||||
if errorlevel 9009 goto sphinx_python
|
||||
goto sphinx_ok
|
||||
|
||||
:sphinx_python
|
||||
|
||||
set SPHINXBUILD=python -m sphinx.__init__
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:sphinx_ok
|
||||
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Ocelot.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Ocelot.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub3" (
|
||||
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %~dp0
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "coverage" (
|
||||
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of coverage in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/coverage/python.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dummy" (
|
||||
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. Dummy builder generates no files.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
||||
|
@ -1,14 +1,14 @@
|
||||
# Ocelot documentation
|
||||
|
||||
The folder contains the documentation for Ocelot.
|
||||
|
||||
We are using [Read the docs](https://readthedocs.org/) to host the documentation and the rendered version
|
||||
can be found [here](https://ocelot.readthedocs.io).
|
||||
|
||||
Doc pages are authored in ReStructuredText (RST) - you can find a primer [here](http://www.sphinx-doc.org/en/stable/rest.html).
|
||||
|
||||
You can find more information about RTD and Sphinx under the following links:
|
||||
|
||||
* [Read the Docs documentation](https://docs.readthedocs.io/en/latest/index.html)
|
||||
* [Sphinx](http://www.sphinx-doc.org/)
|
||||
* [Getting started Screencast](https://www.youtube.com/watch?feature=player_embedded&v=oJsUvBQyHBs)
|
||||
# Ocelot documentation
|
||||
|
||||
The folder contains the documentation for Ocelot.
|
||||
|
||||
We are using [Read the docs](https://readthedocs.org/) to host the documentation and the rendered version
|
||||
can be found [here](https://ocelot.readthedocs.io).
|
||||
|
||||
Doc pages are authored in ReStructuredText (RST) - you can find a primer [here](http://www.sphinx-doc.org/en/stable/rest.html).
|
||||
|
||||
You can find more information about RTD and Sphinx under the following links:
|
||||
|
||||
* [Read the Docs documentation](https://docs.readthedocs.io/en/latest/index.html)
|
||||
* [Sphinx](http://www.sphinx-doc.org/)
|
||||
* [Getting started Screencast](https://www.youtube.com/watch?feature=player_embedded&v=oJsUvBQyHBs)
|
||||
|
10
global.json
10
global.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"projects": [ "src", "test" ],
|
||||
"sdk": {
|
||||
"version": "2.0.2"
|
||||
}
|
||||
{
|
||||
"projects": [ "src", "test" ],
|
||||
"sdk": {
|
||||
"version": "2.0.2"
|
||||
}
|
||||
}
|
@ -1,314 +1,314 @@
|
||||
{
|
||||
"id": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"name": "Ocelot",
|
||||
"description": "",
|
||||
"order": [
|
||||
"a1c95935-ed18-d5dc-bcb8-a3db8ba1934f",
|
||||
"ea0ed57a-2cb9-8acc-47dd-006b8db2f1b2",
|
||||
"c4494401-3985-a5bf-71fb-6e4171384ac6",
|
||||
"09af8dda-a9cb-20d2-5ee3-0a3023773a1a",
|
||||
"e8825dc3-4137-99a7-0000-ef5786610dc3",
|
||||
"fddfc4fa-5114-69e3-4744-203ed71a526b",
|
||||
"c45d30d7-d9c4-fa05-8110-d6e769bb6ff9",
|
||||
"4684c2fa-f38c-c193-5f55-bf563a1978c6",
|
||||
"5f308240-79e3-cf74-7a6b-fe462f0d54f1",
|
||||
"178f16da-c61b-c881-1c33-9d64a56851a4",
|
||||
"26a08569-85f6-7f9a-726f-61be419c7a34"
|
||||
],
|
||||
"folders": [],
|
||||
"timestamp": 0,
|
||||
"owner": "212120",
|
||||
"public": false,
|
||||
"requests": [
|
||||
{
|
||||
"folder": null,
|
||||
"id": "09af8dda-a9cb-20d2-5ee3-0a3023773a1a",
|
||||
"name": "GET http://localhost:5000/comments?postId=1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/comments?postId=1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "178f16da-c61b-c881-1c33-9d64a56851a4",
|
||||
"headers": "Authorization: Bearer {{AccessToken}}\n",
|
||||
"url": "http://localhost:5000/administration/configuration",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "GET",
|
||||
"data": null,
|
||||
"dataMode": "params",
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"time": 1508914722969,
|
||||
"name": "GET http://localhost:5000/admin/configuration",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "26a08569-85f6-7f9a-726f-61be419c7a34",
|
||||
"headers": "",
|
||||
"url": "http://localhost:5000/administration/connect/token",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "POST",
|
||||
"data": [
|
||||
{
|
||||
"key": "client_id",
|
||||
"value": "raft",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "client_secret",
|
||||
"value": "REALLYHARDPASSWORD",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "scope",
|
||||
"value": "admin raft ",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "username",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"key": "password",
|
||||
"value": "secret",
|
||||
"type": "text",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"key": "grant_type",
|
||||
"value": "client_credentials",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"dataMode": "params",
|
||||
"tests": "var jsonData = JSON.parse(responseBody);\npostman.setGlobalVariable(\"AccessToken\", jsonData.access_token);\npostman.setGlobalVariable(\"RefreshToken\", jsonData.refresh_token);",
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"time": 1513240031907,
|
||||
"name": "POST http://localhost:5000/admin/connect/token copy copy",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "4684c2fa-f38c-c193-5f55-bf563a1978c6",
|
||||
"name": "DELETE http://localhost:5000/posts/1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "DELETE",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "5f308240-79e3-cf74-7a6b-fe462f0d54f1",
|
||||
"headers": "Authorization: Bearer {{AccessToken}}\n",
|
||||
"url": "http://localhost:5000/administration/.well-known/openid-configuration",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "GET",
|
||||
"data": null,
|
||||
"dataMode": "params",
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": "{}",
|
||||
"time": 1488038888813,
|
||||
"name": "GET http://localhost:5000/admin/.well-known/openid-configuration",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"folder": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": null,
|
||||
"queryParams": [],
|
||||
"headerData": [
|
||||
{
|
||||
"key": "Authorization",
|
||||
"value": "Bearer {{AccessToken}}",
|
||||
"description": "",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"pathVariableData": []
|
||||
},
|
||||
{
|
||||
"id": "a1c95935-ed18-d5dc-bcb8-a3db8ba1934f",
|
||||
"folder": null,
|
||||
"name": "GET http://localhost:5000/posts",
|
||||
"dataMode": "params",
|
||||
"data": [
|
||||
{
|
||||
"key": "client_id",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "client_secret",
|
||||
"value": "secret",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "scope",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "username",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "password",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "grant_type",
|
||||
"value": "password",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "POST",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/admin/configuration",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": "{}",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "c4494401-3985-a5bf-71fb-6e4171384ac6",
|
||||
"name": "GET http://localhost:5000/posts/1/comments",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1/comments",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "c45d30d7-d9c4-fa05-8110-d6e769bb6ff9",
|
||||
"name": "PATCH http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "PATCH",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"title\": \"gfdgsgsdgsdfgsdfgdfg\",\n}"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "e8825dc3-4137-99a7-0000-ef5786610dc3",
|
||||
"name": "POST http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "POST",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"userId\": 1,\n \"title\": \"test\",\n \"body\": \"test\"\n}"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "ea0ed57a-2cb9-8acc-47dd-006b8db2f1b2",
|
||||
"name": "GET http://localhost:5000/posts/1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "fddfc4fa-5114-69e3-4744-203ed71a526b",
|
||||
"name": "PUT http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "PUT",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"userId\": 1,\n \"title\": \"test\",\n \"body\": \"test\"\n}"
|
||||
}
|
||||
]
|
||||
{
|
||||
"id": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"name": "Ocelot",
|
||||
"description": "",
|
||||
"order": [
|
||||
"a1c95935-ed18-d5dc-bcb8-a3db8ba1934f",
|
||||
"ea0ed57a-2cb9-8acc-47dd-006b8db2f1b2",
|
||||
"c4494401-3985-a5bf-71fb-6e4171384ac6",
|
||||
"09af8dda-a9cb-20d2-5ee3-0a3023773a1a",
|
||||
"e8825dc3-4137-99a7-0000-ef5786610dc3",
|
||||
"fddfc4fa-5114-69e3-4744-203ed71a526b",
|
||||
"c45d30d7-d9c4-fa05-8110-d6e769bb6ff9",
|
||||
"4684c2fa-f38c-c193-5f55-bf563a1978c6",
|
||||
"5f308240-79e3-cf74-7a6b-fe462f0d54f1",
|
||||
"178f16da-c61b-c881-1c33-9d64a56851a4",
|
||||
"26a08569-85f6-7f9a-726f-61be419c7a34"
|
||||
],
|
||||
"folders": [],
|
||||
"timestamp": 0,
|
||||
"owner": "212120",
|
||||
"public": false,
|
||||
"requests": [
|
||||
{
|
||||
"folder": null,
|
||||
"id": "09af8dda-a9cb-20d2-5ee3-0a3023773a1a",
|
||||
"name": "GET http://localhost:5000/comments?postId=1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/comments?postId=1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "178f16da-c61b-c881-1c33-9d64a56851a4",
|
||||
"headers": "Authorization: Bearer {{AccessToken}}\n",
|
||||
"url": "http://localhost:5000/administration/configuration",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "GET",
|
||||
"data": null,
|
||||
"dataMode": "params",
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"time": 1508914722969,
|
||||
"name": "GET http://localhost:5000/admin/configuration",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "26a08569-85f6-7f9a-726f-61be419c7a34",
|
||||
"headers": "",
|
||||
"url": "http://localhost:5000/administration/connect/token",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "POST",
|
||||
"data": [
|
||||
{
|
||||
"key": "client_id",
|
||||
"value": "raft",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "client_secret",
|
||||
"value": "REALLYHARDPASSWORD",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "scope",
|
||||
"value": "admin raft ",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "username",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"key": "password",
|
||||
"value": "secret",
|
||||
"type": "text",
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"key": "grant_type",
|
||||
"value": "client_credentials",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"dataMode": "params",
|
||||
"tests": "var jsonData = JSON.parse(responseBody);\npostman.setGlobalVariable(\"AccessToken\", jsonData.access_token);\npostman.setGlobalVariable(\"RefreshToken\", jsonData.refresh_token);",
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"time": 1513240031907,
|
||||
"name": "POST http://localhost:5000/admin/connect/token copy copy",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "4684c2fa-f38c-c193-5f55-bf563a1978c6",
|
||||
"name": "DELETE http://localhost:5000/posts/1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "DELETE",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"id": "5f308240-79e3-cf74-7a6b-fe462f0d54f1",
|
||||
"headers": "Authorization: Bearer {{AccessToken}}\n",
|
||||
"url": "http://localhost:5000/administration/.well-known/openid-configuration",
|
||||
"preRequestScript": null,
|
||||
"pathVariables": {},
|
||||
"method": "GET",
|
||||
"data": null,
|
||||
"dataMode": "params",
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": "{}",
|
||||
"time": 1488038888813,
|
||||
"name": "GET http://localhost:5000/admin/.well-known/openid-configuration",
|
||||
"description": "",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"folder": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": null,
|
||||
"queryParams": [],
|
||||
"headerData": [
|
||||
{
|
||||
"key": "Authorization",
|
||||
"value": "Bearer {{AccessToken}}",
|
||||
"description": "",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"pathVariableData": []
|
||||
},
|
||||
{
|
||||
"id": "a1c95935-ed18-d5dc-bcb8-a3db8ba1934f",
|
||||
"folder": null,
|
||||
"name": "GET http://localhost:5000/posts",
|
||||
"dataMode": "params",
|
||||
"data": [
|
||||
{
|
||||
"key": "client_id",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "client_secret",
|
||||
"value": "secret",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "scope",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "username",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "password",
|
||||
"value": "admin",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"key": "grant_type",
|
||||
"value": "password",
|
||||
"type": "text",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "POST",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/admin/configuration",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": "{}",
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "c4494401-3985-a5bf-71fb-6e4171384ac6",
|
||||
"name": "GET http://localhost:5000/posts/1/comments",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1/comments",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "c45d30d7-d9c4-fa05-8110-d6e769bb6ff9",
|
||||
"name": "PATCH http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "PATCH",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"title\": \"gfdgsgsdgsdfgsdfgdfg\",\n}"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "e8825dc3-4137-99a7-0000-ef5786610dc3",
|
||||
"name": "POST http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "POST",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"userId\": 1,\n \"title\": \"test\",\n \"body\": \"test\"\n}"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "ea0ed57a-2cb9-8acc-47dd-006b8db2f1b2",
|
||||
"name": "GET http://localhost:5000/posts/1",
|
||||
"dataMode": "params",
|
||||
"data": null,
|
||||
"rawModeData": null,
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "GET",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375"
|
||||
},
|
||||
{
|
||||
"folder": null,
|
||||
"id": "fddfc4fa-5114-69e3-4744-203ed71a526b",
|
||||
"name": "PUT http://localhost:5000/posts/1",
|
||||
"dataMode": "raw",
|
||||
"data": [],
|
||||
"descriptionFormat": "html",
|
||||
"description": "",
|
||||
"headers": "",
|
||||
"method": "PUT",
|
||||
"pathVariables": {},
|
||||
"url": "http://localhost:5000/posts/1",
|
||||
"preRequestScript": null,
|
||||
"tests": null,
|
||||
"currentHelper": "normal",
|
||||
"helperAttributes": {},
|
||||
"collectionId": "4dbde9fe-89f5-be35-bb9f-d3b438e16375",
|
||||
"rawModeData": "{\n \"userId\": 1,\n \"title\": \"test\",\n \"body\": \"test\"\n}"
|
||||
}
|
||||
]
|
||||
}
|
@ -1,2 +1,2 @@
|
||||
./build.ps1 -target Release
|
||||
./build.ps1 -target Release
|
||||
exit $LASTEXITCODE
|
@ -1,2 +1,2 @@
|
||||
./build -target RunAcceptanceTests
|
||||
./build -target RunAcceptanceTests
|
||||
exit $LASTEXITCODE
|
@ -1,2 +1,2 @@
|
||||
./build.ps1 -target RunBenchmarkTests
|
||||
./build.ps1 -target RunBenchmarkTests
|
||||
exit $LASTEXITCODE
|
@ -1,2 +1,2 @@
|
||||
./build.ps1 -target RunUnitTests
|
||||
./build.ps1 -target RunUnitTests
|
||||
exit $LASTEXITCODE
|
@ -1,16 +1,16 @@
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace Ocelot.Authentication
|
||||
{
|
||||
class BearerToken
|
||||
{
|
||||
[JsonProperty("access_token")]
|
||||
public string AccessToken { get; set; }
|
||||
|
||||
[JsonProperty("expires_in")]
|
||||
public int ExpiresIn { get; set; }
|
||||
|
||||
[JsonProperty("token_type")]
|
||||
public string TokenType { get; set; }
|
||||
}
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace Ocelot.Authentication
|
||||
{
|
||||
class BearerToken
|
||||
{
|
||||
[JsonProperty("access_token")]
|
||||
public string AccessToken { get; set; }
|
||||
|
||||
[JsonProperty("expires_in")]
|
||||
public int ExpiresIn { get; set; }
|
||||
|
||||
[JsonProperty("token_type")]
|
||||
public string TokenType { get; set; }
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
namespace Ocelot.Authentication.Handler
|
||||
{
|
||||
public enum SupportedAuthenticationProviders
|
||||
{
|
||||
IdentityServer,
|
||||
Jwt
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Authentication.Handler
|
||||
{
|
||||
public enum SupportedAuthenticationProviders
|
||||
{
|
||||
IdentityServer,
|
||||
Jwt
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Authentication.Middleware
|
||||
{
|
||||
public static class AuthenticationMiddlewareMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseAuthenticationMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<AuthenticationMiddleware>(builder);
|
||||
}
|
||||
}
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Authentication.Middleware
|
||||
{
|
||||
public static class AuthenticationMiddlewareMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseAuthenticationMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<AuthenticationMiddleware>(builder);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class ClaimValueNotAuthorisedError : Error
|
||||
{
|
||||
public ClaimValueNotAuthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.ClaimValueNotAuthorisedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class ClaimValueNotAuthorisedError : Error
|
||||
{
|
||||
public ClaimValueNotAuthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.ClaimValueNotAuthorisedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,53 +1,53 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Errors;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using Infrastructure.Claims.Parser;
|
||||
|
||||
public class ClaimsAuthoriser : IClaimsAuthoriser
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public ClaimsAuthoriser(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, Dictionary<string, string> routeClaimsRequirement)
|
||||
{
|
||||
foreach (var required in routeClaimsRequirement)
|
||||
{
|
||||
var value = _claimsParser.GetValue(claimsPrincipal.Claims, required.Key, string.Empty, 0);
|
||||
|
||||
if (value.IsError)
|
||||
{
|
||||
return new ErrorResponse<bool>(value.Errors);
|
||||
}
|
||||
|
||||
if (value.Data != null)
|
||||
{
|
||||
var authorised = value.Data == required.Value;
|
||||
if (!authorised)
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new ClaimValueNotAuthorisedError(
|
||||
$"claim value: {value.Data} is not the same as required value: {required.Value} for type: {required.Key}")
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new UserDoesNotHaveClaimError($"user does not have claim {required.Key}")
|
||||
});
|
||||
}
|
||||
}
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Errors;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using Infrastructure.Claims.Parser;
|
||||
|
||||
public class ClaimsAuthoriser : IClaimsAuthoriser
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public ClaimsAuthoriser(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, Dictionary<string, string> routeClaimsRequirement)
|
||||
{
|
||||
foreach (var required in routeClaimsRequirement)
|
||||
{
|
||||
var value = _claimsParser.GetValue(claimsPrincipal.Claims, required.Key, string.Empty, 0);
|
||||
|
||||
if (value.IsError)
|
||||
{
|
||||
return new ErrorResponse<bool>(value.Errors);
|
||||
}
|
||||
|
||||
if (value.Data != null)
|
||||
{
|
||||
var authorised = value.Data == required.Value;
|
||||
if (!authorised)
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new ClaimValueNotAuthorisedError(
|
||||
$"claim value: {value.Data} is not the same as required value: {required.Value} for type: {required.Key}")
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new UserDoesNotHaveClaimError($"user does not have claim {required.Key}")
|
||||
});
|
||||
}
|
||||
}
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
|
||||
public interface IClaimsAuthoriser
|
||||
{
|
||||
Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, Dictionary<string, string> routeClaimsRequirement);
|
||||
}
|
||||
}
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
|
||||
public interface IClaimsAuthoriser
|
||||
{
|
||||
Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, Dictionary<string, string> routeClaimsRequirement);
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
|
||||
public interface IScopesAuthoriser
|
||||
{
|
||||
Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, List<string> routeAllowedScopes);
|
||||
}
|
||||
}
|
||||
using System.Security.Claims;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
|
||||
public interface IScopesAuthoriser
|
||||
{
|
||||
Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, List<string> routeAllowedScopes);
|
||||
}
|
||||
}
|
||||
|
@ -1,117 +1,117 @@
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Responses;
|
||||
using Ocelot.Configuration;
|
||||
|
||||
namespace Ocelot.Authorisation.Middleware
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Errors;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
public class AuthorisationMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IClaimsAuthoriser _claimsAuthoriser;
|
||||
private readonly IScopesAuthoriser _scopesAuthoriser;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public AuthorisationMiddleware(RequestDelegate next,
|
||||
IRequestScopedDataRepository requestScopedDataRepository,
|
||||
IClaimsAuthoriser claimsAuthoriser,
|
||||
IScopesAuthoriser scopesAuthoriser,
|
||||
IOcelotLoggerFactory loggerFactory)
|
||||
: base(requestScopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_claimsAuthoriser = claimsAuthoriser;
|
||||
_scopesAuthoriser = scopesAuthoriser;
|
||||
_logger = loggerFactory.CreateLogger<AuthorisationMiddleware>();
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (IsAuthenticatedRoute(DownstreamRoute.ReRoute))
|
||||
{
|
||||
_logger.LogDebug("route is authenticated scopes must be checked");
|
||||
|
||||
var authorised = _scopesAuthoriser.Authorise(context.User, DownstreamRoute.ReRoute.AuthenticationOptions.AllowedScopes);
|
||||
|
||||
if (authorised.IsError)
|
||||
{
|
||||
_logger.LogDebug("error authorising user scopes");
|
||||
|
||||
SetPipelineError(authorised.Errors);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAuthorised(authorised))
|
||||
{
|
||||
_logger.LogDebug("user scopes is authorised calling next authorisation checks");
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("user scopes is not authorised setting pipeline error");
|
||||
|
||||
SetPipelineError(new List<Error>
|
||||
{
|
||||
new UnauthorisedError(
|
||||
$"{context.User.Identity.Name} unable to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}")
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (IsAuthorisedRoute(DownstreamRoute.ReRoute))
|
||||
{
|
||||
_logger.LogDebug("route is authorised");
|
||||
|
||||
var authorised = _claimsAuthoriser.Authorise(context.User, DownstreamRoute.ReRoute.RouteClaimsRequirement);
|
||||
|
||||
if (authorised.IsError)
|
||||
{
|
||||
_logger.LogDebug($"Error whilst authorising {context.User.Identity.Name} for {context.User.Identity.Name}. Setting pipeline error");
|
||||
|
||||
SetPipelineError(authorised.Errors);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAuthorised(authorised))
|
||||
{
|
||||
_logger.LogDebug($"{context.User.Identity.Name} has succesfully been authorised for {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}. Calling next middleware");
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug($"{context.User.Identity.Name} is not authorised to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}. Setting pipeline error");
|
||||
|
||||
SetPipelineError(new List<Error>
|
||||
{
|
||||
new UnauthorisedError($"{context.User.Identity.Name} is not authorised to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}")
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug($"{DownstreamRoute.ReRoute.DownstreamPathTemplate.Value} route does not require user to be authorised");
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsAuthorised(Response<bool> authorised)
|
||||
{
|
||||
return authorised.Data;
|
||||
}
|
||||
|
||||
private static bool IsAuthenticatedRoute(ReRoute reRoute)
|
||||
{
|
||||
return reRoute.IsAuthenticated;
|
||||
}
|
||||
|
||||
private static bool IsAuthorisedRoute(ReRoute reRoute)
|
||||
{
|
||||
return reRoute.IsAuthorised;
|
||||
}
|
||||
}
|
||||
}
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Responses;
|
||||
using Ocelot.Configuration;
|
||||
|
||||
namespace Ocelot.Authorisation.Middleware
|
||||
{
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Errors;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
public class AuthorisationMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IClaimsAuthoriser _claimsAuthoriser;
|
||||
private readonly IScopesAuthoriser _scopesAuthoriser;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public AuthorisationMiddleware(RequestDelegate next,
|
||||
IRequestScopedDataRepository requestScopedDataRepository,
|
||||
IClaimsAuthoriser claimsAuthoriser,
|
||||
IScopesAuthoriser scopesAuthoriser,
|
||||
IOcelotLoggerFactory loggerFactory)
|
||||
: base(requestScopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_claimsAuthoriser = claimsAuthoriser;
|
||||
_scopesAuthoriser = scopesAuthoriser;
|
||||
_logger = loggerFactory.CreateLogger<AuthorisationMiddleware>();
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (IsAuthenticatedRoute(DownstreamRoute.ReRoute))
|
||||
{
|
||||
_logger.LogDebug("route is authenticated scopes must be checked");
|
||||
|
||||
var authorised = _scopesAuthoriser.Authorise(context.User, DownstreamRoute.ReRoute.AuthenticationOptions.AllowedScopes);
|
||||
|
||||
if (authorised.IsError)
|
||||
{
|
||||
_logger.LogDebug("error authorising user scopes");
|
||||
|
||||
SetPipelineError(authorised.Errors);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAuthorised(authorised))
|
||||
{
|
||||
_logger.LogDebug("user scopes is authorised calling next authorisation checks");
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("user scopes is not authorised setting pipeline error");
|
||||
|
||||
SetPipelineError(new List<Error>
|
||||
{
|
||||
new UnauthorisedError(
|
||||
$"{context.User.Identity.Name} unable to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}")
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (IsAuthorisedRoute(DownstreamRoute.ReRoute))
|
||||
{
|
||||
_logger.LogDebug("route is authorised");
|
||||
|
||||
var authorised = _claimsAuthoriser.Authorise(context.User, DownstreamRoute.ReRoute.RouteClaimsRequirement);
|
||||
|
||||
if (authorised.IsError)
|
||||
{
|
||||
_logger.LogDebug($"Error whilst authorising {context.User.Identity.Name} for {context.User.Identity.Name}. Setting pipeline error");
|
||||
|
||||
SetPipelineError(authorised.Errors);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAuthorised(authorised))
|
||||
{
|
||||
_logger.LogDebug($"{context.User.Identity.Name} has succesfully been authorised for {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}. Calling next middleware");
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug($"{context.User.Identity.Name} is not authorised to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}. Setting pipeline error");
|
||||
|
||||
SetPipelineError(new List<Error>
|
||||
{
|
||||
new UnauthorisedError($"{context.User.Identity.Name} is not authorised to access {DownstreamRoute.ReRoute.UpstreamPathTemplate.Value}")
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug($"{DownstreamRoute.ReRoute.DownstreamPathTemplate.Value} route does not require user to be authorised");
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsAuthorised(Response<bool> authorised)
|
||||
{
|
||||
return authorised.Data;
|
||||
}
|
||||
|
||||
private static bool IsAuthenticatedRoute(ReRoute reRoute)
|
||||
{
|
||||
return reRoute.IsAuthenticated;
|
||||
}
|
||||
|
||||
private static bool IsAuthorisedRoute(ReRoute reRoute)
|
||||
{
|
||||
return reRoute.IsAuthorised;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
namespace Ocelot.Authorisation.Middleware
|
||||
{
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
public static class AuthorisationMiddlewareMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseAuthorisationMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<AuthorisationMiddleware>();
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Authorisation.Middleware
|
||||
{
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
public static class AuthorisationMiddlewareMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseAuthorisationMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<AuthorisationMiddleware>();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class ScopeNotAuthorisedError : Error
|
||||
{
|
||||
public ScopeNotAuthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.ScopeNotAuthorisedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class ScopeNotAuthorisedError : Error
|
||||
{
|
||||
public ScopeNotAuthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.ScopeNotAuthorisedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,51 +1,51 @@
|
||||
using IdentityModel;
|
||||
using Ocelot.Errors;
|
||||
using Ocelot.Responses;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using System.Linq;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using Infrastructure.Claims.Parser;
|
||||
|
||||
public class ScopesAuthoriser : IScopesAuthoriser
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public ScopesAuthoriser(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, List<string> routeAllowedScopes)
|
||||
{
|
||||
if (routeAllowedScopes == null || routeAllowedScopes.Count == 0)
|
||||
{
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
|
||||
var values = _claimsParser.GetValuesByClaimType(claimsPrincipal.Claims, JwtClaimTypes.Scope);
|
||||
|
||||
if (values.IsError)
|
||||
{
|
||||
return new ErrorResponse<bool>(values.Errors);
|
||||
}
|
||||
|
||||
var userScopes = values.Data;
|
||||
|
||||
List<string> matchesScopes = routeAllowedScopes.Intersect(userScopes).ToList();
|
||||
|
||||
if (matchesScopes == null || matchesScopes.Count == 0)
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new ScopeNotAuthorisedError(
|
||||
$"no one user scope: '{string.Join(",", userScopes)}' match with some allowed scope: '{string.Join(",", routeAllowedScopes)}'")
|
||||
});
|
||||
}
|
||||
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
}
|
||||
using IdentityModel;
|
||||
using Ocelot.Errors;
|
||||
using Ocelot.Responses;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using System.Linq;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
using Infrastructure.Claims.Parser;
|
||||
|
||||
public class ScopesAuthoriser : IScopesAuthoriser
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public ScopesAuthoriser(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response<bool> Authorise(ClaimsPrincipal claimsPrincipal, List<string> routeAllowedScopes)
|
||||
{
|
||||
if (routeAllowedScopes == null || routeAllowedScopes.Count == 0)
|
||||
{
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
|
||||
var values = _claimsParser.GetValuesByClaimType(claimsPrincipal.Claims, JwtClaimTypes.Scope);
|
||||
|
||||
if (values.IsError)
|
||||
{
|
||||
return new ErrorResponse<bool>(values.Errors);
|
||||
}
|
||||
|
||||
var userScopes = values.Data;
|
||||
|
||||
List<string> matchesScopes = routeAllowedScopes.Intersect(userScopes).ToList();
|
||||
|
||||
if (matchesScopes == null || matchesScopes.Count == 0)
|
||||
{
|
||||
return new ErrorResponse<bool>(new List<Error>
|
||||
{
|
||||
new ScopeNotAuthorisedError(
|
||||
$"no one user scope: '{string.Join(",", userScopes)}' match with some allowed scope: '{string.Join(",", routeAllowedScopes)}'")
|
||||
});
|
||||
}
|
||||
|
||||
return new OkResponse<bool>(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class UnauthorisedError : Error
|
||||
{
|
||||
public UnauthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.UnauthorizedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class UnauthorisedError : Error
|
||||
{
|
||||
public UnauthorisedError(string message)
|
||||
: base(message, OcelotErrorCode.UnauthorizedError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class UserDoesNotHaveClaimError : Error
|
||||
{
|
||||
public UserDoesNotHaveClaimError(string message)
|
||||
: base(message, OcelotErrorCode.UserDoesNotHaveClaimError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
using Ocelot.Errors;
|
||||
|
||||
namespace Ocelot.Authorisation
|
||||
{
|
||||
public class UserDoesNotHaveClaimError : Error
|
||||
{
|
||||
public UserDoesNotHaveClaimError(string message)
|
||||
: base(message, OcelotErrorCode.UserDoesNotHaveClaimError)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,25 +1,25 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class CachedResponse
|
||||
{
|
||||
public CachedResponse(
|
||||
HttpStatusCode statusCode = HttpStatusCode.OK,
|
||||
Dictionary<string, IEnumerable<string>> headers = null,
|
||||
string body = null
|
||||
)
|
||||
{
|
||||
StatusCode = statusCode;
|
||||
Headers = headers ?? new Dictionary<string, IEnumerable<string>>();
|
||||
Body = body ?? "";
|
||||
}
|
||||
|
||||
public HttpStatusCode StatusCode { get; private set; }
|
||||
|
||||
public Dictionary<string, IEnumerable<string>> Headers { get; private set; }
|
||||
|
||||
public string Body { get; private set; }
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class CachedResponse
|
||||
{
|
||||
public CachedResponse(
|
||||
HttpStatusCode statusCode = HttpStatusCode.OK,
|
||||
Dictionary<string, IEnumerable<string>> headers = null,
|
||||
string body = null
|
||||
)
|
||||
{
|
||||
StatusCode = statusCode;
|
||||
Headers = headers ?? new Dictionary<string, IEnumerable<string>>();
|
||||
Body = body ?? "";
|
||||
}
|
||||
|
||||
public HttpStatusCode StatusCode { get; private set; }
|
||||
|
||||
public Dictionary<string, IEnumerable<string>> Headers { get; private set; }
|
||||
|
||||
public string Body { get; private set; }
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public interface IOcelotCache<T>
|
||||
{
|
||||
void Add(string key, T value, TimeSpan ttl, string region);
|
||||
void AddAndDelete(string key, T value, TimeSpan ttl, string region);
|
||||
T Get(string key, string region);
|
||||
void ClearRegion(string region);
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public interface IOcelotCache<T>
|
||||
{
|
||||
void Add(string key, T value, TimeSpan ttl, string region);
|
||||
void AddAndDelete(string key, T value, TimeSpan ttl, string region);
|
||||
T Get(string key, string region);
|
||||
void ClearRegion(string region);
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public interface IRegionCreator
|
||||
{
|
||||
string Create(FileReRoute reRoute);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public interface IRegionCreator
|
||||
{
|
||||
string Create(FileReRoute reRoute);
|
||||
}
|
||||
}
|
@ -1,116 +1,116 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Middleware;
|
||||
using System.IO;
|
||||
|
||||
namespace Ocelot.Cache.Middleware
|
||||
{
|
||||
public class OutputCacheMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IOcelotLogger _logger;
|
||||
private readonly IOcelotCache<CachedResponse> _outputCache;
|
||||
private readonly IRegionCreator _regionCreator;
|
||||
|
||||
public OutputCacheMiddleware(RequestDelegate next,
|
||||
IOcelotLoggerFactory loggerFactory,
|
||||
IRequestScopedDataRepository scopedDataRepository,
|
||||
IOcelotCache<CachedResponse> outputCache,
|
||||
IRegionCreator regionCreator)
|
||||
: base(scopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_outputCache = outputCache;
|
||||
_logger = loggerFactory.CreateLogger<OutputCacheMiddleware>();
|
||||
_regionCreator = regionCreator;
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (!DownstreamRoute.ReRoute.IsCached)
|
||||
{
|
||||
await _next.Invoke(context);
|
||||
return;
|
||||
}
|
||||
|
||||
var downstreamUrlKey = $"{DownstreamRequest.Method.Method}-{DownstreamRequest.RequestUri.OriginalString}";
|
||||
|
||||
_logger.LogDebug("started checking cache for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
var cached = _outputCache.Get(downstreamUrlKey, DownstreamRoute.ReRoute.CacheOptions.Region);
|
||||
|
||||
if (cached != null)
|
||||
{
|
||||
_logger.LogDebug("cache entry exists for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
var response = CreateHttpResponseMessage(cached);
|
||||
SetHttpResponseMessageThisRequest(response);
|
||||
|
||||
_logger.LogDebug("finished returned cached response for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug("no resonse cached for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
await _next.Invoke(context);
|
||||
|
||||
if (PipelineError)
|
||||
{
|
||||
_logger.LogDebug("there was a pipeline error for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
cached = await CreateCachedResponse(HttpResponseMessage);
|
||||
|
||||
_outputCache.Add(downstreamUrlKey, cached, TimeSpan.FromSeconds(DownstreamRoute.ReRoute.CacheOptions.TtlSeconds), DownstreamRoute.ReRoute.CacheOptions.Region);
|
||||
|
||||
_logger.LogDebug("finished response added to cache for {downstreamUrlKey}", downstreamUrlKey);
|
||||
}
|
||||
|
||||
internal HttpResponseMessage CreateHttpResponseMessage(CachedResponse cached)
|
||||
{
|
||||
if (cached == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var response = new HttpResponseMessage(cached.StatusCode);
|
||||
foreach (var header in cached.Headers)
|
||||
{
|
||||
response.Headers.Add(header.Key, header.Value);
|
||||
}
|
||||
var content = new MemoryStream(Convert.FromBase64String(cached.Body));
|
||||
response.Content = new StreamContent(content);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
internal async Task<CachedResponse> CreateCachedResponse(HttpResponseMessage response)
|
||||
{
|
||||
if (response == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var statusCode = response.StatusCode;
|
||||
var headers = response.Headers.ToDictionary(v => v.Key, v => v.Value);
|
||||
string body = null;
|
||||
|
||||
if (response.Content != null)
|
||||
{
|
||||
var content = await response.Content.ReadAsByteArrayAsync();
|
||||
body = Convert.ToBase64String(content);
|
||||
}
|
||||
|
||||
var cached = new CachedResponse(statusCode, headers, body);
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Middleware;
|
||||
using System.IO;
|
||||
|
||||
namespace Ocelot.Cache.Middleware
|
||||
{
|
||||
public class OutputCacheMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IOcelotLogger _logger;
|
||||
private readonly IOcelotCache<CachedResponse> _outputCache;
|
||||
private readonly IRegionCreator _regionCreator;
|
||||
|
||||
public OutputCacheMiddleware(RequestDelegate next,
|
||||
IOcelotLoggerFactory loggerFactory,
|
||||
IRequestScopedDataRepository scopedDataRepository,
|
||||
IOcelotCache<CachedResponse> outputCache,
|
||||
IRegionCreator regionCreator)
|
||||
: base(scopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_outputCache = outputCache;
|
||||
_logger = loggerFactory.CreateLogger<OutputCacheMiddleware>();
|
||||
_regionCreator = regionCreator;
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (!DownstreamRoute.ReRoute.IsCached)
|
||||
{
|
||||
await _next.Invoke(context);
|
||||
return;
|
||||
}
|
||||
|
||||
var downstreamUrlKey = $"{DownstreamRequest.Method.Method}-{DownstreamRequest.RequestUri.OriginalString}";
|
||||
|
||||
_logger.LogDebug("started checking cache for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
var cached = _outputCache.Get(downstreamUrlKey, DownstreamRoute.ReRoute.CacheOptions.Region);
|
||||
|
||||
if (cached != null)
|
||||
{
|
||||
_logger.LogDebug("cache entry exists for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
var response = CreateHttpResponseMessage(cached);
|
||||
SetHttpResponseMessageThisRequest(response);
|
||||
|
||||
_logger.LogDebug("finished returned cached response for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug("no resonse cached for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
await _next.Invoke(context);
|
||||
|
||||
if (PipelineError)
|
||||
{
|
||||
_logger.LogDebug("there was a pipeline error for {downstreamUrlKey}", downstreamUrlKey);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
cached = await CreateCachedResponse(HttpResponseMessage);
|
||||
|
||||
_outputCache.Add(downstreamUrlKey, cached, TimeSpan.FromSeconds(DownstreamRoute.ReRoute.CacheOptions.TtlSeconds), DownstreamRoute.ReRoute.CacheOptions.Region);
|
||||
|
||||
_logger.LogDebug("finished response added to cache for {downstreamUrlKey}", downstreamUrlKey);
|
||||
}
|
||||
|
||||
internal HttpResponseMessage CreateHttpResponseMessage(CachedResponse cached)
|
||||
{
|
||||
if (cached == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var response = new HttpResponseMessage(cached.StatusCode);
|
||||
foreach (var header in cached.Headers)
|
||||
{
|
||||
response.Headers.Add(header.Key, header.Value);
|
||||
}
|
||||
var content = new MemoryStream(Convert.FromBase64String(cached.Body));
|
||||
response.Content = new StreamContent(content);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
internal async Task<CachedResponse> CreateCachedResponse(HttpResponseMessage response)
|
||||
{
|
||||
if (response == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var statusCode = response.StatusCode;
|
||||
var headers = response.Headers.ToDictionary(v => v.Key, v => v.Value);
|
||||
string body = null;
|
||||
|
||||
if (response.Content != null)
|
||||
{
|
||||
var content = await response.Content.ReadAsByteArrayAsync();
|
||||
body = Convert.ToBase64String(content);
|
||||
}
|
||||
|
||||
var cached = new CachedResponse(statusCode, headers, body);
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Cache.Middleware
|
||||
{
|
||||
public static class OutputCacheMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseOutputCacheMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<OutputCacheMiddleware>();
|
||||
}
|
||||
}
|
||||
}
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Cache.Middleware
|
||||
{
|
||||
public static class OutputCacheMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseOutputCacheMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<OutputCacheMiddleware>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,44 +1,44 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using CacheManager.Core;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class OcelotCacheManagerCache<T> : IOcelotCache<T>
|
||||
{
|
||||
private readonly ICacheManager<T> _cacheManager;
|
||||
|
||||
public OcelotCacheManagerCache(ICacheManager<T> cacheManager)
|
||||
{
|
||||
_cacheManager = cacheManager;
|
||||
}
|
||||
|
||||
public void Add(string key, T value, TimeSpan ttl, string region)
|
||||
{
|
||||
_cacheManager.Add(new CacheItem<T>(key, region, value, ExpirationMode.Absolute, ttl));
|
||||
}
|
||||
|
||||
public void AddAndDelete(string key, T value, TimeSpan ttl, string region)
|
||||
{
|
||||
var exists = _cacheManager.Get(key);
|
||||
|
||||
if (exists != null)
|
||||
{
|
||||
_cacheManager.Remove(key);
|
||||
}
|
||||
|
||||
Add(key, value, ttl, region);
|
||||
}
|
||||
|
||||
public T Get(string key, string region)
|
||||
{
|
||||
return _cacheManager.Get<T>(key, region);
|
||||
}
|
||||
|
||||
public void ClearRegion(string region)
|
||||
{
|
||||
_cacheManager.ClearRegion(region);
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using CacheManager.Core;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class OcelotCacheManagerCache<T> : IOcelotCache<T>
|
||||
{
|
||||
private readonly ICacheManager<T> _cacheManager;
|
||||
|
||||
public OcelotCacheManagerCache(ICacheManager<T> cacheManager)
|
||||
{
|
||||
_cacheManager = cacheManager;
|
||||
}
|
||||
|
||||
public void Add(string key, T value, TimeSpan ttl, string region)
|
||||
{
|
||||
_cacheManager.Add(new CacheItem<T>(key, region, value, ExpirationMode.Absolute, ttl));
|
||||
}
|
||||
|
||||
public void AddAndDelete(string key, T value, TimeSpan ttl, string region)
|
||||
{
|
||||
var exists = _cacheManager.Get(key);
|
||||
|
||||
if (exists != null)
|
||||
{
|
||||
_cacheManager.Remove(key);
|
||||
}
|
||||
|
||||
Add(key, value, ttl, region);
|
||||
}
|
||||
|
||||
public T Get(string key, string region)
|
||||
{
|
||||
return _cacheManager.Get<T>(key, region);
|
||||
}
|
||||
|
||||
public void ClearRegion(string region)
|
||||
{
|
||||
_cacheManager.ClearRegion(region);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,29 +1,29 @@
|
||||
using System.Net.Http;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Ocelot.Cache;
|
||||
using Ocelot.Configuration.Provider;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
[Authorize]
|
||||
[Route("outputcache")]
|
||||
public class OutputCacheController : Controller
|
||||
{
|
||||
private IOcelotCache<CachedResponse> _cache;
|
||||
|
||||
public OutputCacheController(IOcelotCache<CachedResponse> cache)
|
||||
{
|
||||
_cache = cache;
|
||||
}
|
||||
|
||||
[HttpDelete]
|
||||
[Route("{region}")]
|
||||
public IActionResult Delete(string region)
|
||||
{
|
||||
_cache.ClearRegion(region);
|
||||
return new NoContentResult();
|
||||
}
|
||||
}
|
||||
using System.Net.Http;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Ocelot.Cache;
|
||||
using Ocelot.Configuration.Provider;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
[Authorize]
|
||||
[Route("outputcache")]
|
||||
public class OutputCacheController : Controller
|
||||
{
|
||||
private IOcelotCache<CachedResponse> _cache;
|
||||
|
||||
public OutputCacheController(IOcelotCache<CachedResponse> cache)
|
||||
{
|
||||
_cache = cache;
|
||||
}
|
||||
|
||||
[HttpDelete]
|
||||
[Route("{region}")]
|
||||
public IActionResult Delete(string region)
|
||||
{
|
||||
_cache.ClearRegion(region);
|
||||
return new NoContentResult();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,24 +1,24 @@
|
||||
using System.Linq;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
|
||||
public class RegionCreator : IRegionCreator
|
||||
{
|
||||
public string Create(FileReRoute reRoute)
|
||||
{
|
||||
if(!string.IsNullOrEmpty(reRoute?.FileCacheOptions?.Region))
|
||||
{
|
||||
return reRoute?.FileCacheOptions?.Region;
|
||||
}
|
||||
|
||||
var methods = string.Join("", reRoute.UpstreamHttpMethod.Select(m => m));
|
||||
|
||||
var region = $"{methods}{reRoute.UpstreamPathTemplate.Replace("/", "")}";
|
||||
|
||||
return region;
|
||||
}
|
||||
}
|
||||
using System.Linq;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
|
||||
public class RegionCreator : IRegionCreator
|
||||
{
|
||||
public string Create(FileReRoute reRoute)
|
||||
{
|
||||
if(!string.IsNullOrEmpty(reRoute?.FileCacheOptions?.Region))
|
||||
{
|
||||
return reRoute?.FileCacheOptions?.Region;
|
||||
}
|
||||
|
||||
var methods = string.Join("", reRoute.UpstreamHttpMethod.Select(m => m));
|
||||
|
||||
var region = $"{methods}{reRoute.UpstreamPathTemplate.Replace("/", "")}";
|
||||
|
||||
return region;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class Regions
|
||||
{
|
||||
public Regions(List<string> value)
|
||||
{
|
||||
Value = value;
|
||||
}
|
||||
public List<string> Value {get;private set;}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Cache
|
||||
{
|
||||
public class Regions
|
||||
{
|
||||
public Regions(List<string> value)
|
||||
{
|
||||
Value = value;
|
||||
}
|
||||
public List<string> Value {get;private set;}
|
||||
}
|
||||
}
|
@ -1,46 +1,46 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Claims;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Infrastructure.Claims.Parser;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Claims
|
||||
{
|
||||
public class AddClaimsToRequest : IAddClaimsToRequest
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public AddClaimsToRequest(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response SetClaimsOnContext(List<ClaimToThing> claimsToThings, HttpContext context)
|
||||
{
|
||||
foreach (var config in claimsToThings)
|
||||
{
|
||||
var value = _claimsParser.GetValue(context.User.Claims, config.NewKey, config.Delimiter, config.Index);
|
||||
|
||||
if (value.IsError)
|
||||
{
|
||||
return new ErrorResponse(value.Errors);
|
||||
}
|
||||
|
||||
var exists = context.User.Claims.FirstOrDefault(x => x.Type == config.ExistingKey);
|
||||
|
||||
var identity = context.User.Identity as ClaimsIdentity;
|
||||
|
||||
if (exists != null)
|
||||
{
|
||||
identity?.RemoveClaim(exists);
|
||||
}
|
||||
|
||||
identity?.AddClaim(new System.Security.Claims.Claim(config.ExistingKey, value.Data));
|
||||
}
|
||||
|
||||
return new OkResponse();
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Claims;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Infrastructure.Claims.Parser;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Claims
|
||||
{
|
||||
public class AddClaimsToRequest : IAddClaimsToRequest
|
||||
{
|
||||
private readonly IClaimsParser _claimsParser;
|
||||
|
||||
public AddClaimsToRequest(IClaimsParser claimsParser)
|
||||
{
|
||||
_claimsParser = claimsParser;
|
||||
}
|
||||
|
||||
public Response SetClaimsOnContext(List<ClaimToThing> claimsToThings, HttpContext context)
|
||||
{
|
||||
foreach (var config in claimsToThings)
|
||||
{
|
||||
var value = _claimsParser.GetValue(context.User.Claims, config.NewKey, config.Delimiter, config.Index);
|
||||
|
||||
if (value.IsError)
|
||||
{
|
||||
return new ErrorResponse(value.Errors);
|
||||
}
|
||||
|
||||
var exists = context.User.Claims.FirstOrDefault(x => x.Type == config.ExistingKey);
|
||||
|
||||
var identity = context.User.Identity as ClaimsIdentity;
|
||||
|
||||
if (exists != null)
|
||||
{
|
||||
identity?.RemoveClaim(exists);
|
||||
}
|
||||
|
||||
identity?.AddClaim(new System.Security.Claims.Claim(config.ExistingKey, value.Data));
|
||||
}
|
||||
|
||||
return new OkResponse();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Claims
|
||||
{
|
||||
public interface IAddClaimsToRequest
|
||||
{
|
||||
Response SetClaimsOnContext(List<ClaimToThing> claimsToThings,
|
||||
HttpContext context);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Configuration;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Claims
|
||||
{
|
||||
public interface IAddClaimsToRequest
|
||||
{
|
||||
Response SetClaimsOnContext(List<ClaimToThing> claimsToThings,
|
||||
HttpContext context);
|
||||
}
|
||||
}
|
||||
|
@ -1,46 +1,46 @@
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
namespace Ocelot.Claims.Middleware
|
||||
{
|
||||
public class ClaimsBuilderMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IAddClaimsToRequest _addClaimsToRequest;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public ClaimsBuilderMiddleware(RequestDelegate next,
|
||||
IRequestScopedDataRepository requestScopedDataRepository,
|
||||
IOcelotLoggerFactory loggerFactory,
|
||||
IAddClaimsToRequest addClaimsToRequest)
|
||||
: base(requestScopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_addClaimsToRequest = addClaimsToRequest;
|
||||
_logger = loggerFactory.CreateLogger<ClaimsBuilderMiddleware>();
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (DownstreamRoute.ReRoute.ClaimsToClaims.Any())
|
||||
{
|
||||
_logger.LogDebug("this route has instructions to convert claims to other claims");
|
||||
|
||||
var result = _addClaimsToRequest.SetClaimsOnContext(DownstreamRoute.ReRoute.ClaimsToClaims, context);
|
||||
|
||||
if (result.IsError)
|
||||
{
|
||||
_logger.LogDebug("error converting claims to other claims, setting pipeline error");
|
||||
|
||||
SetPipelineError(result.Errors);
|
||||
return;
|
||||
}
|
||||
}
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Ocelot.Infrastructure.RequestData;
|
||||
using Ocelot.Logging;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
namespace Ocelot.Claims.Middleware
|
||||
{
|
||||
public class ClaimsBuilderMiddleware : OcelotMiddleware
|
||||
{
|
||||
private readonly RequestDelegate _next;
|
||||
private readonly IAddClaimsToRequest _addClaimsToRequest;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public ClaimsBuilderMiddleware(RequestDelegate next,
|
||||
IRequestScopedDataRepository requestScopedDataRepository,
|
||||
IOcelotLoggerFactory loggerFactory,
|
||||
IAddClaimsToRequest addClaimsToRequest)
|
||||
: base(requestScopedDataRepository)
|
||||
{
|
||||
_next = next;
|
||||
_addClaimsToRequest = addClaimsToRequest;
|
||||
_logger = loggerFactory.CreateLogger<ClaimsBuilderMiddleware>();
|
||||
}
|
||||
|
||||
public async Task Invoke(HttpContext context)
|
||||
{
|
||||
if (DownstreamRoute.ReRoute.ClaimsToClaims.Any())
|
||||
{
|
||||
_logger.LogDebug("this route has instructions to convert claims to other claims");
|
||||
|
||||
var result = _addClaimsToRequest.SetClaimsOnContext(DownstreamRoute.ReRoute.ClaimsToClaims, context);
|
||||
|
||||
if (result.IsError)
|
||||
{
|
||||
_logger.LogDebug("error converting claims to other claims, setting pipeline error");
|
||||
|
||||
SetPipelineError(result.Errors);
|
||||
return;
|
||||
}
|
||||
}
|
||||
await _next.Invoke(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Claims.Middleware
|
||||
{
|
||||
public static class ClaimsBuilderMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseClaimsBuilderMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<ClaimsBuilderMiddleware>();
|
||||
}
|
||||
}
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
|
||||
namespace Ocelot.Claims.Middleware
|
||||
{
|
||||
public static class ClaimsBuilderMiddlewareExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseClaimsBuilderMiddleware(this IApplicationBuilder builder)
|
||||
{
|
||||
return builder.UseMiddleware<ClaimsBuilderMiddleware>();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,22 +1,22 @@
|
||||
using System;
|
||||
using Microsoft.AspNetCore.Cryptography.KeyDerivation;
|
||||
|
||||
namespace Ocelot.Configuration.Authentication
|
||||
{
|
||||
public class HashMatcher : IHashMatcher
|
||||
{
|
||||
public bool Match(string password, string salt, string hash)
|
||||
{
|
||||
byte[] s = Convert.FromBase64String(salt);
|
||||
|
||||
string hashed = Convert.ToBase64String(KeyDerivation.Pbkdf2(
|
||||
password: password,
|
||||
salt: s,
|
||||
prf: KeyDerivationPrf.HMACSHA256,
|
||||
iterationCount: 10000,
|
||||
numBytesRequested: 256 / 8));
|
||||
|
||||
return hashed == hash;
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using Microsoft.AspNetCore.Cryptography.KeyDerivation;
|
||||
|
||||
namespace Ocelot.Configuration.Authentication
|
||||
{
|
||||
public class HashMatcher : IHashMatcher
|
||||
{
|
||||
public bool Match(string password, string salt, string hash)
|
||||
{
|
||||
byte[] s = Convert.FromBase64String(salt);
|
||||
|
||||
string hashed = Convert.ToBase64String(KeyDerivation.Pbkdf2(
|
||||
password: password,
|
||||
salt: s,
|
||||
prf: KeyDerivationPrf.HMACSHA256,
|
||||
iterationCount: 10000,
|
||||
numBytesRequested: 256 / 8));
|
||||
|
||||
return hashed == hash;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
namespace Ocelot.Configuration.Authentication
|
||||
{
|
||||
public interface IHashMatcher
|
||||
{
|
||||
bool Match(string password, string salt, string hash);
|
||||
}
|
||||
namespace Ocelot.Configuration.Authentication
|
||||
{
|
||||
public interface IHashMatcher
|
||||
{
|
||||
bool Match(string password, string salt, string hash);
|
||||
}
|
||||
}
|
@ -1,16 +1,16 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class AuthenticationOptions
|
||||
{
|
||||
public AuthenticationOptions(List<string> allowedScopes, string authenticationProviderKey)
|
||||
{
|
||||
AllowedScopes = allowedScopes;
|
||||
AuthenticationProviderKey = authenticationProviderKey;
|
||||
}
|
||||
|
||||
public List<string> AllowedScopes { get; private set; }
|
||||
public string AuthenticationProviderKey { get; private set; }
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class AuthenticationOptions
|
||||
{
|
||||
public AuthenticationOptions(List<string> allowedScopes, string authenticationProviderKey)
|
||||
{
|
||||
AllowedScopes = allowedScopes;
|
||||
AuthenticationProviderKey = authenticationProviderKey;
|
||||
}
|
||||
|
||||
public List<string> AllowedScopes { get; private set; }
|
||||
public string AuthenticationProviderKey { get; private set; }
|
||||
}
|
||||
}
|
||||
|
@ -1,27 +1,27 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class AuthenticationOptionsBuilder
|
||||
{
|
||||
private List<string> _allowedScopes = new List<string>();
|
||||
private string _authenticationProviderKey;
|
||||
|
||||
public AuthenticationOptionsBuilder WithAllowedScopes(List<string> allowedScopes)
|
||||
{
|
||||
_allowedScopes = allowedScopes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AuthenticationOptionsBuilder WithAuthenticationProviderKey(string authenticationProviderKey)
|
||||
{
|
||||
_authenticationProviderKey = authenticationProviderKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AuthenticationOptions Build()
|
||||
{
|
||||
return new AuthenticationOptions(_allowedScopes, _authenticationProviderKey);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class AuthenticationOptionsBuilder
|
||||
{
|
||||
private List<string> _allowedScopes = new List<string>();
|
||||
private string _authenticationProviderKey;
|
||||
|
||||
public AuthenticationOptionsBuilder WithAllowedScopes(List<string> allowedScopes)
|
||||
{
|
||||
_allowedScopes = allowedScopes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AuthenticationOptionsBuilder WithAuthenticationProviderKey(string authenticationProviderKey)
|
||||
{
|
||||
_authenticationProviderKey = authenticationProviderKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public AuthenticationOptions Build()
|
||||
{
|
||||
return new AuthenticationOptions(_allowedScopes, _authenticationProviderKey);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,34 +1,34 @@
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class QoSOptionsBuilder
|
||||
{
|
||||
private int _exceptionsAllowedBeforeBreaking;
|
||||
|
||||
private int _durationOfBreak;
|
||||
|
||||
private int _timeoutValue;
|
||||
|
||||
public QoSOptionsBuilder WithExceptionsAllowedBeforeBreaking(int exceptionsAllowedBeforeBreaking)
|
||||
{
|
||||
_exceptionsAllowedBeforeBreaking = exceptionsAllowedBeforeBreaking;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptionsBuilder WithDurationOfBreak(int durationOfBreak)
|
||||
{
|
||||
_durationOfBreak = durationOfBreak;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptionsBuilder WithTimeoutValue(int timeoutValue)
|
||||
{
|
||||
_timeoutValue = timeoutValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptions Build()
|
||||
{
|
||||
return new QoSOptions(_exceptionsAllowedBeforeBreaking, _durationOfBreak, _timeoutValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class QoSOptionsBuilder
|
||||
{
|
||||
private int _exceptionsAllowedBeforeBreaking;
|
||||
|
||||
private int _durationOfBreak;
|
||||
|
||||
private int _timeoutValue;
|
||||
|
||||
public QoSOptionsBuilder WithExceptionsAllowedBeforeBreaking(int exceptionsAllowedBeforeBreaking)
|
||||
{
|
||||
_exceptionsAllowedBeforeBreaking = exceptionsAllowedBeforeBreaking;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptionsBuilder WithDurationOfBreak(int durationOfBreak)
|
||||
{
|
||||
_durationOfBreak = durationOfBreak;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptionsBuilder WithTimeoutValue(int timeoutValue)
|
||||
{
|
||||
_timeoutValue = timeoutValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public QoSOptions Build()
|
||||
{
|
||||
return new QoSOptions(_exceptionsAllowedBeforeBreaking, _durationOfBreak, _timeoutValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,71 +1,71 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class RateLimitOptionsBuilder
|
||||
{
|
||||
private bool _enableRateLimiting;
|
||||
private string _clientIdHeader;
|
||||
private List<string> _clientWhitelist;
|
||||
private bool _disableRateLimitHeaders;
|
||||
private string _quotaExceededMessage;
|
||||
private string _rateLimitCounterPrefix;
|
||||
private RateLimitRule _rateLimitRule;
|
||||
private int _httpStatusCode;
|
||||
|
||||
public RateLimitOptionsBuilder WithEnableRateLimiting(bool enableRateLimiting)
|
||||
{
|
||||
_enableRateLimiting = enableRateLimiting;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithClientIdHeader(string clientIdheader)
|
||||
{
|
||||
_clientIdHeader = clientIdheader;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithClientWhiteList(List<string> clientWhitelist)
|
||||
{
|
||||
_clientWhitelist = clientWhitelist;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithDisableRateLimitHeaders(bool disableRateLimitHeaders)
|
||||
{
|
||||
_disableRateLimitHeaders = disableRateLimitHeaders;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithQuotaExceededMessage(string quotaExceededMessage)
|
||||
{
|
||||
_quotaExceededMessage = quotaExceededMessage;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithRateLimitCounterPrefix(string rateLimitCounterPrefix)
|
||||
{
|
||||
_rateLimitCounterPrefix = rateLimitCounterPrefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithRateLimitRule(RateLimitRule rateLimitRule)
|
||||
{
|
||||
_rateLimitRule = rateLimitRule;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithHttpStatusCode(int httpStatusCode)
|
||||
{
|
||||
_httpStatusCode = httpStatusCode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptions Build()
|
||||
{
|
||||
return new RateLimitOptions(_enableRateLimiting, _clientIdHeader, _clientWhitelist,
|
||||
_disableRateLimitHeaders, _quotaExceededMessage, _rateLimitCounterPrefix,
|
||||
_rateLimitRule, _httpStatusCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class RateLimitOptionsBuilder
|
||||
{
|
||||
private bool _enableRateLimiting;
|
||||
private string _clientIdHeader;
|
||||
private List<string> _clientWhitelist;
|
||||
private bool _disableRateLimitHeaders;
|
||||
private string _quotaExceededMessage;
|
||||
private string _rateLimitCounterPrefix;
|
||||
private RateLimitRule _rateLimitRule;
|
||||
private int _httpStatusCode;
|
||||
|
||||
public RateLimitOptionsBuilder WithEnableRateLimiting(bool enableRateLimiting)
|
||||
{
|
||||
_enableRateLimiting = enableRateLimiting;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithClientIdHeader(string clientIdheader)
|
||||
{
|
||||
_clientIdHeader = clientIdheader;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithClientWhiteList(List<string> clientWhitelist)
|
||||
{
|
||||
_clientWhitelist = clientWhitelist;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithDisableRateLimitHeaders(bool disableRateLimitHeaders)
|
||||
{
|
||||
_disableRateLimitHeaders = disableRateLimitHeaders;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithQuotaExceededMessage(string quotaExceededMessage)
|
||||
{
|
||||
_quotaExceededMessage = quotaExceededMessage;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithRateLimitCounterPrefix(string rateLimitCounterPrefix)
|
||||
{
|
||||
_rateLimitCounterPrefix = rateLimitCounterPrefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithRateLimitRule(RateLimitRule rateLimitRule)
|
||||
{
|
||||
_rateLimitRule = rateLimitRule;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptionsBuilder WithHttpStatusCode(int httpStatusCode)
|
||||
{
|
||||
_httpStatusCode = httpStatusCode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RateLimitOptions Build()
|
||||
{
|
||||
return new RateLimitOptions(_enableRateLimiting, _clientIdHeader, _clientWhitelist,
|
||||
_disableRateLimitHeaders, _quotaExceededMessage, _rateLimitCounterPrefix,
|
||||
_rateLimitRule, _httpStatusCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,249 +1,239 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using Ocelot.Values;
|
||||
using System.Linq;
|
||||
using Ocelot.Configuration.Creator;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ReRouteBuilder
|
||||
{
|
||||
private AuthenticationOptions _authenticationOptions;
|
||||
private string _loadBalancerKey;
|
||||
private string _downstreamPathTemplate;
|
||||
private string _upstreamTemplate;
|
||||
private UpstreamPathTemplate _upstreamTemplatePattern;
|
||||
private List<HttpMethod> _upstreamHttpMethod;
|
||||
private bool _isAuthenticated;
|
||||
private List<ClaimToThing> _configHeaderExtractorProperties;
|
||||
private List<ClaimToThing> _claimToClaims;
|
||||
private Dictionary<string, string> _routeClaimRequirement;
|
||||
private bool _isAuthorised;
|
||||
private List<ClaimToThing> _claimToQueries;
|
||||
private string _requestIdHeaderKey;
|
||||
private bool _isCached;
|
||||
private CacheOptions _fileCacheOptions;
|
||||
private string _downstreamScheme;
|
||||
private string _downstreamHost;
|
||||
private int _downstreamPort;
|
||||
private string _loadBalancer;
|
||||
private bool _useQos;
|
||||
private QoSOptions _qosOptions;
|
||||
private HttpHandlerOptions _httpHandlerOptions;
|
||||
public bool _enableRateLimiting;
|
||||
public RateLimitOptions _rateLimitOptions;
|
||||
private string _authenticationProviderKey;
|
||||
private bool _useServiceDiscovery;
|
||||
private string _serviceName;
|
||||
|
||||
private List<HeaderFindAndReplace> _upstreamHeaderFindAndReplace;
|
||||
private List<HeaderFindAndReplace> _downstreamHeaderFindAndReplace;
|
||||
|
||||
public ReRouteBuilder WithLoadBalancer(string loadBalancer)
|
||||
{
|
||||
_loadBalancer = loadBalancer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamScheme(string downstreamScheme)
|
||||
{
|
||||
_downstreamScheme = downstreamScheme;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamHost(string downstreamHost)
|
||||
{
|
||||
_downstreamHost = downstreamHost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamPathTemplate(string input)
|
||||
{
|
||||
_downstreamPathTemplate = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamPathTemplate(string input)
|
||||
{
|
||||
_upstreamTemplate = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamTemplatePattern(UpstreamPathTemplate input)
|
||||
{
|
||||
_upstreamTemplatePattern = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamHttpMethod(List<string> input)
|
||||
{
|
||||
_upstreamHttpMethod = (input.Count == 0) ? new List<HttpMethod>() : input.Select(x => new HttpMethod(x.Trim())).ToList();
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsAuthenticated(bool input)
|
||||
{
|
||||
_isAuthenticated = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsAuthorised(bool input)
|
||||
{
|
||||
_isAuthorised = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRequestIdKey(string input)
|
||||
{
|
||||
_requestIdHeaderKey = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToHeaders(List<ClaimToThing> input)
|
||||
{
|
||||
_configHeaderExtractorProperties = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToClaims(List<ClaimToThing> input)
|
||||
{
|
||||
_claimToClaims = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRouteClaimsRequirement(Dictionary<string, string> input)
|
||||
{
|
||||
_routeClaimRequirement = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToQueries(List<ClaimToThing> input)
|
||||
{
|
||||
_claimToQueries = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsCached(bool input)
|
||||
{
|
||||
_isCached = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithCacheOptions(CacheOptions input)
|
||||
{
|
||||
_fileCacheOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamPort(int port)
|
||||
{
|
||||
_downstreamPort = port;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsQos(bool input)
|
||||
{
|
||||
_useQos = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithQosOptions(QoSOptions input)
|
||||
{
|
||||
_qosOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithReRouteKey(string loadBalancerKey)
|
||||
{
|
||||
_loadBalancerKey = loadBalancerKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithAuthenticationOptions(AuthenticationOptions authenticationOptions)
|
||||
{
|
||||
_authenticationOptions = authenticationOptions;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithEnableRateLimiting(bool input)
|
||||
{
|
||||
_enableRateLimiting = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRateLimitOptions(RateLimitOptions input)
|
||||
{
|
||||
_rateLimitOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithAuthenticationProviderKey(string authenticationProviderKey)
|
||||
{
|
||||
_authenticationProviderKey = authenticationProviderKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithHttpHandlerOptions(HttpHandlerOptions input)
|
||||
{
|
||||
_httpHandlerOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUseServiceDiscovery(bool useServiceDiscovery)
|
||||
{
|
||||
_useServiceDiscovery = useServiceDiscovery;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithServiceName(string serviceName)
|
||||
{
|
||||
_serviceName = serviceName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamHeaderFindAndReplace(List<HeaderFindAndReplace> upstreamHeaderFindAndReplace)
|
||||
{
|
||||
_upstreamHeaderFindAndReplace = upstreamHeaderFindAndReplace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamHeaderFindAndReplace(List<HeaderFindAndReplace> downstreamHeaderFindAndReplace)
|
||||
{
|
||||
_downstreamHeaderFindAndReplace = downstreamHeaderFindAndReplace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRoute Build()
|
||||
{
|
||||
return new ReRoute(
|
||||
new PathTemplate(_downstreamPathTemplate),
|
||||
new PathTemplate(_upstreamTemplate),
|
||||
_upstreamHttpMethod,
|
||||
_upstreamTemplatePattern,
|
||||
_isAuthenticated,
|
||||
_authenticationOptions,
|
||||
_configHeaderExtractorProperties,
|
||||
_claimToClaims,
|
||||
_routeClaimRequirement,
|
||||
_isAuthorised,
|
||||
_claimToQueries,
|
||||
_requestIdHeaderKey,
|
||||
_isCached,
|
||||
_fileCacheOptions,
|
||||
_downstreamScheme,
|
||||
_loadBalancer,
|
||||
_downstreamHost,
|
||||
_downstreamPort,
|
||||
_loadBalancerKey,
|
||||
_useQos,
|
||||
_qosOptions,
|
||||
_enableRateLimiting,
|
||||
_rateLimitOptions,
|
||||
_httpHandlerOptions,
|
||||
_useServiceDiscovery,
|
||||
_serviceName,
|
||||
_upstreamHeaderFindAndReplace,
|
||||
_downstreamHeaderFindAndReplace);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using Ocelot.Values;
|
||||
using System.Linq;
|
||||
using Ocelot.Configuration.Creator;
|
||||
using System;
|
||||
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ReRouteBuilder
|
||||
{
|
||||
private AuthenticationOptions _authenticationOptions;
|
||||
private string _loadBalancerKey;
|
||||
private string _downstreamPathTemplate;
|
||||
private string _upstreamTemplate;
|
||||
private UpstreamPathTemplate _upstreamTemplatePattern;
|
||||
private List<HttpMethod> _upstreamHttpMethod;
|
||||
private bool _isAuthenticated;
|
||||
private List<ClaimToThing> _configHeaderExtractorProperties;
|
||||
private List<ClaimToThing> _claimToClaims;
|
||||
private Dictionary<string, string> _routeClaimRequirement;
|
||||
private bool _isAuthorised;
|
||||
private List<ClaimToThing> _claimToQueries;
|
||||
private string _requestIdHeaderKey;
|
||||
private bool _isCached;
|
||||
private CacheOptions _fileCacheOptions;
|
||||
private string _downstreamScheme;
|
||||
private string _loadBalancer;
|
||||
private bool _useQos;
|
||||
private QoSOptions _qosOptions;
|
||||
private HttpHandlerOptions _httpHandlerOptions;
|
||||
private bool _enableRateLimiting;
|
||||
private RateLimitOptions _rateLimitOptions;
|
||||
private bool _useServiceDiscovery;
|
||||
private string _serviceName;
|
||||
private List<HeaderFindAndReplace> _upstreamHeaderFindAndReplace;
|
||||
private List<HeaderFindAndReplace> _downstreamHeaderFindAndReplace;
|
||||
private readonly List<DownstreamHostAndPort> _downstreamAddresses;
|
||||
|
||||
public ReRouteBuilder()
|
||||
{
|
||||
_downstreamAddresses = new List<DownstreamHostAndPort>();
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamAddresses(List<DownstreamHostAndPort> downstreamAddresses)
|
||||
{
|
||||
_downstreamAddresses.AddRange(downstreamAddresses);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithLoadBalancer(string loadBalancer)
|
||||
{
|
||||
_loadBalancer = loadBalancer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamScheme(string downstreamScheme)
|
||||
{
|
||||
_downstreamScheme = downstreamScheme;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamPathTemplate(string input)
|
||||
{
|
||||
_downstreamPathTemplate = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamPathTemplate(string input)
|
||||
{
|
||||
_upstreamTemplate = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamTemplatePattern(UpstreamPathTemplate input)
|
||||
{
|
||||
_upstreamTemplatePattern = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamHttpMethod(List<string> input)
|
||||
{
|
||||
_upstreamHttpMethod = (input.Count == 0) ? new List<HttpMethod>() : input.Select(x => new HttpMethod(x.Trim())).ToList();
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsAuthenticated(bool input)
|
||||
{
|
||||
_isAuthenticated = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsAuthorised(bool input)
|
||||
{
|
||||
_isAuthorised = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRequestIdKey(string input)
|
||||
{
|
||||
_requestIdHeaderKey = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToHeaders(List<ClaimToThing> input)
|
||||
{
|
||||
_configHeaderExtractorProperties = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToClaims(List<ClaimToThing> input)
|
||||
{
|
||||
_claimToClaims = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRouteClaimsRequirement(Dictionary<string, string> input)
|
||||
{
|
||||
_routeClaimRequirement = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithClaimsToQueries(List<ClaimToThing> input)
|
||||
{
|
||||
_claimToQueries = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsCached(bool input)
|
||||
{
|
||||
_isCached = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithCacheOptions(CacheOptions input)
|
||||
{
|
||||
_fileCacheOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithIsQos(bool input)
|
||||
{
|
||||
_useQos = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithQosOptions(QoSOptions input)
|
||||
{
|
||||
_qosOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithReRouteKey(string loadBalancerKey)
|
||||
{
|
||||
_loadBalancerKey = loadBalancerKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithAuthenticationOptions(AuthenticationOptions authenticationOptions)
|
||||
{
|
||||
_authenticationOptions = authenticationOptions;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithEnableRateLimiting(bool input)
|
||||
{
|
||||
_enableRateLimiting = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithRateLimitOptions(RateLimitOptions input)
|
||||
{
|
||||
_rateLimitOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithHttpHandlerOptions(HttpHandlerOptions input)
|
||||
{
|
||||
_httpHandlerOptions = input;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUseServiceDiscovery(bool useServiceDiscovery)
|
||||
{
|
||||
_useServiceDiscovery = useServiceDiscovery;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithServiceName(string serviceName)
|
||||
{
|
||||
_serviceName = serviceName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithUpstreamHeaderFindAndReplace(List<HeaderFindAndReplace> upstreamHeaderFindAndReplace)
|
||||
{
|
||||
_upstreamHeaderFindAndReplace = upstreamHeaderFindAndReplace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteBuilder WithDownstreamHeaderFindAndReplace(List<HeaderFindAndReplace> downstreamHeaderFindAndReplace)
|
||||
{
|
||||
_downstreamHeaderFindAndReplace = downstreamHeaderFindAndReplace;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRoute Build()
|
||||
{
|
||||
return new ReRoute(
|
||||
new PathTemplate(_downstreamPathTemplate),
|
||||
new PathTemplate(_upstreamTemplate),
|
||||
_upstreamHttpMethod,
|
||||
_upstreamTemplatePattern,
|
||||
_isAuthenticated,
|
||||
_authenticationOptions,
|
||||
_configHeaderExtractorProperties,
|
||||
_claimToClaims,
|
||||
_routeClaimRequirement,
|
||||
_isAuthorised,
|
||||
_claimToQueries,
|
||||
_requestIdHeaderKey,
|
||||
_isCached,
|
||||
_fileCacheOptions,
|
||||
_downstreamScheme,
|
||||
_loadBalancer,
|
||||
_loadBalancerKey,
|
||||
_useQos,
|
||||
_qosOptions,
|
||||
_enableRateLimiting,
|
||||
_rateLimitOptions,
|
||||
_httpHandlerOptions,
|
||||
_useServiceDiscovery,
|
||||
_serviceName,
|
||||
_upstreamHeaderFindAndReplace,
|
||||
_downstreamHeaderFindAndReplace,
|
||||
_downstreamAddresses);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,46 +1,46 @@
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ReRouteOptionsBuilder
|
||||
{
|
||||
private bool _isAuthenticated;
|
||||
private bool _isAuthorised;
|
||||
private bool _isCached;
|
||||
private bool _isQoS;
|
||||
private bool _enableRateLimiting;
|
||||
|
||||
public ReRouteOptionsBuilder WithIsCached(bool isCached)
|
||||
{
|
||||
_isCached = isCached;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsAuthenticated(bool isAuthenticated)
|
||||
{
|
||||
_isAuthenticated = isAuthenticated;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsAuthorised(bool isAuthorised)
|
||||
{
|
||||
_isAuthorised = isAuthorised;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsQos(bool isQoS)
|
||||
{
|
||||
_isQoS = isQoS;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithRateLimiting(bool enableRateLimiting)
|
||||
{
|
||||
_enableRateLimiting = enableRateLimiting;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptions Build()
|
||||
{
|
||||
return new ReRouteOptions(_isAuthenticated, _isAuthorised, _isCached, _isQoS, _enableRateLimiting);
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ReRouteOptionsBuilder
|
||||
{
|
||||
private bool _isAuthenticated;
|
||||
private bool _isAuthorised;
|
||||
private bool _isCached;
|
||||
private bool _isQoS;
|
||||
private bool _enableRateLimiting;
|
||||
|
||||
public ReRouteOptionsBuilder WithIsCached(bool isCached)
|
||||
{
|
||||
_isCached = isCached;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsAuthenticated(bool isAuthenticated)
|
||||
{
|
||||
_isAuthenticated = isAuthenticated;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsAuthorised(bool isAuthorised)
|
||||
{
|
||||
_isAuthorised = isAuthorised;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithIsQos(bool isQoS)
|
||||
{
|
||||
_isQoS = isQoS;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptionsBuilder WithRateLimiting(bool enableRateLimiting)
|
||||
{
|
||||
_enableRateLimiting = enableRateLimiting;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ReRouteOptions Build()
|
||||
{
|
||||
return new ReRouteOptions(_isAuthenticated, _isAuthorised, _isCached, _isQoS, _enableRateLimiting);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,25 +1,25 @@
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ServiceProviderConfigurationBuilder
|
||||
{
|
||||
private string _serviceDiscoveryProviderHost;
|
||||
private int _serviceDiscoveryProviderPort;
|
||||
|
||||
public ServiceProviderConfigurationBuilder WithServiceDiscoveryProviderHost(string serviceDiscoveryProviderHost)
|
||||
{
|
||||
_serviceDiscoveryProviderHost = serviceDiscoveryProviderHost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ServiceProviderConfigurationBuilder WithServiceDiscoveryProviderPort(int serviceDiscoveryProviderPort)
|
||||
{
|
||||
_serviceDiscoveryProviderPort = serviceDiscoveryProviderPort;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ServiceProviderConfiguration Build()
|
||||
{
|
||||
return new ServiceProviderConfiguration(_serviceDiscoveryProviderHost,_serviceDiscoveryProviderPort);
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Configuration.Builder
|
||||
{
|
||||
public class ServiceProviderConfigurationBuilder
|
||||
{
|
||||
private string _serviceDiscoveryProviderHost;
|
||||
private int _serviceDiscoveryProviderPort;
|
||||
|
||||
public ServiceProviderConfigurationBuilder WithServiceDiscoveryProviderHost(string serviceDiscoveryProviderHost)
|
||||
{
|
||||
_serviceDiscoveryProviderHost = serviceDiscoveryProviderHost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ServiceProviderConfigurationBuilder WithServiceDiscoveryProviderPort(int serviceDiscoveryProviderPort)
|
||||
{
|
||||
_serviceDiscoveryProviderPort = serviceDiscoveryProviderPort;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ServiceProviderConfiguration Build()
|
||||
{
|
||||
return new ServiceProviderConfiguration(_serviceDiscoveryProviderHost,_serviceDiscoveryProviderPort);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,14 +1,14 @@
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class CacheOptions
|
||||
{
|
||||
public CacheOptions(int ttlSeconds, string region)
|
||||
{
|
||||
TtlSeconds = ttlSeconds;
|
||||
Region = region;
|
||||
}
|
||||
|
||||
public int TtlSeconds { get; private set; }
|
||||
public string Region {get;private set;}
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class CacheOptions
|
||||
{
|
||||
public CacheOptions(int ttlSeconds, string region)
|
||||
{
|
||||
TtlSeconds = ttlSeconds;
|
||||
Region = region;
|
||||
}
|
||||
|
||||
public int TtlSeconds { get; private set; }
|
||||
public string Region {get;private set;}
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class ClaimToThing
|
||||
{
|
||||
public ClaimToThing(string existingKey, string newKey, string delimiter, int index)
|
||||
{
|
||||
NewKey = newKey;
|
||||
Delimiter = delimiter;
|
||||
Index = index;
|
||||
ExistingKey = existingKey;
|
||||
}
|
||||
|
||||
public string ExistingKey { get; private set; }
|
||||
public string NewKey { get; private set; }
|
||||
public string Delimiter { get; private set; }
|
||||
public int Index { get; private set; }
|
||||
}
|
||||
}
|
||||
namespace Ocelot.Configuration
|
||||
{
|
||||
public class ClaimToThing
|
||||
{
|
||||
public ClaimToThing(string existingKey, string newKey, string delimiter, int index)
|
||||
{
|
||||
NewKey = newKey;
|
||||
Delimiter = delimiter;
|
||||
Index = index;
|
||||
ExistingKey = existingKey;
|
||||
}
|
||||
|
||||
public string ExistingKey { get; private set; }
|
||||
public string NewKey { get; private set; }
|
||||
public string Delimiter { get; private set; }
|
||||
public int Index { get; private set; }
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class AuthenticationOptionsCreator : IAuthenticationOptionsCreator
|
||||
{
|
||||
public AuthenticationOptions Create(FileReRoute reRoute)
|
||||
{
|
||||
return new AuthenticationOptions(reRoute.AuthenticationOptions.AllowedScopes, reRoute.AuthenticationOptions.AuthenticationProviderKey);
|
||||
}
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class AuthenticationOptionsCreator : IAuthenticationOptionsCreator
|
||||
{
|
||||
public AuthenticationOptions Create(FileReRoute reRoute)
|
||||
{
|
||||
return new AuthenticationOptions(reRoute.AuthenticationOptions.AllowedScopes, reRoute.AuthenticationOptions.AuthenticationProviderKey);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,41 +1,41 @@
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.Parser;
|
||||
using Ocelot.Logging;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class ClaimsToThingCreator : IClaimsToThingCreator
|
||||
{
|
||||
private readonly IClaimToThingConfigurationParser _claimToThingConfigParser;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public ClaimsToThingCreator(IClaimToThingConfigurationParser claimToThingConfigurationParser,
|
||||
IOcelotLoggerFactory loggerFactory)
|
||||
{
|
||||
_logger = loggerFactory.CreateLogger<ClaimsToThingCreator>();
|
||||
_claimToThingConfigParser = claimToThingConfigurationParser;
|
||||
}
|
||||
|
||||
public List<ClaimToThing> Create(Dictionary<string,string> inputToBeParsed)
|
||||
{
|
||||
var claimsToThings = new List<ClaimToThing>();
|
||||
|
||||
foreach (var input in inputToBeParsed)
|
||||
{
|
||||
var claimToThing = _claimToThingConfigParser.Extract(input.Key, input.Value);
|
||||
|
||||
if (claimToThing.IsError)
|
||||
{
|
||||
_logger.LogDebug("ClaimsToThingCreator.BuildAddThingsToRequest",
|
||||
$"Unable to extract configuration for key: {input.Key} and value: {input.Value} your configuration file is incorrect");
|
||||
}
|
||||
else
|
||||
{
|
||||
claimsToThings.Add(claimToThing.Data);
|
||||
}
|
||||
}
|
||||
|
||||
return claimsToThings;
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.Parser;
|
||||
using Ocelot.Logging;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class ClaimsToThingCreator : IClaimsToThingCreator
|
||||
{
|
||||
private readonly IClaimToThingConfigurationParser _claimToThingConfigParser;
|
||||
private readonly IOcelotLogger _logger;
|
||||
|
||||
public ClaimsToThingCreator(IClaimToThingConfigurationParser claimToThingConfigurationParser,
|
||||
IOcelotLoggerFactory loggerFactory)
|
||||
{
|
||||
_logger = loggerFactory.CreateLogger<ClaimsToThingCreator>();
|
||||
_claimToThingConfigParser = claimToThingConfigurationParser;
|
||||
}
|
||||
|
||||
public List<ClaimToThing> Create(Dictionary<string,string> inputToBeParsed)
|
||||
{
|
||||
var claimsToThings = new List<ClaimToThing>();
|
||||
|
||||
foreach (var input in inputToBeParsed)
|
||||
{
|
||||
var claimToThing = _claimToThingConfigParser.Extract(input.Key, input.Value);
|
||||
|
||||
if (claimToThing.IsError)
|
||||
{
|
||||
_logger.LogDebug("ClaimsToThingCreator.BuildAddThingsToRequest",
|
||||
$"Unable to extract configuration for key: {input.Key} and value: {input.Value} your configuration file is incorrect");
|
||||
}
|
||||
else
|
||||
{
|
||||
claimsToThings.Add(claimToThing.Data);
|
||||
}
|
||||
}
|
||||
|
||||
return claimsToThings;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class DownstreamAddressesCreator : IDownstreamAddressesCreator
|
||||
{
|
||||
public List<DownstreamHostAndPort> Create(FileReRoute reRoute)
|
||||
{
|
||||
return reRoute.DownstreamHostAndPorts.Select(hostAndPort => new DownstreamHostAndPort(hostAndPort.Host, hostAndPort.Port)).ToList();
|
||||
}
|
||||
}
|
||||
}
|
@ -38,6 +38,7 @@ namespace Ocelot.Configuration.Creator
|
||||
private readonly IHttpHandlerOptionsCreator _httpHandlerOptionsCreator;
|
||||
private readonly IAdministrationPath _adminPath;
|
||||
private readonly IHeaderFindAndReplaceCreator _headerFAndRCreator;
|
||||
private readonly IDownstreamAddressesCreator _downstreamAddressesCreator;
|
||||
|
||||
|
||||
public FileOcelotConfigurationCreator(
|
||||
@ -55,9 +56,11 @@ namespace Ocelot.Configuration.Creator
|
||||
IRegionCreator regionCreator,
|
||||
IHttpHandlerOptionsCreator httpHandlerOptionsCreator,
|
||||
IAdministrationPath adminPath,
|
||||
IHeaderFindAndReplaceCreator headerFAndRCreator
|
||||
IHeaderFindAndReplaceCreator headerFAndRCreator,
|
||||
IDownstreamAddressesCreator downstreamAddressesCreator
|
||||
)
|
||||
{
|
||||
_downstreamAddressesCreator = downstreamAddressesCreator;
|
||||
_headerFAndRCreator = headerFAndRCreator;
|
||||
_adminPath = adminPath;
|
||||
_regionCreator = regionCreator;
|
||||
@ -133,6 +136,8 @@ namespace Ocelot.Configuration.Creator
|
||||
|
||||
var hAndRs = _headerFAndRCreator.Create(fileReRoute);
|
||||
|
||||
var downstreamAddresses = _downstreamAddressesCreator.Create(fileReRoute);
|
||||
|
||||
var reRoute = new ReRouteBuilder()
|
||||
.WithDownstreamPathTemplate(fileReRoute.DownstreamPathTemplate)
|
||||
.WithUpstreamPathTemplate(fileReRoute.UpstreamPathTemplate)
|
||||
@ -150,8 +155,7 @@ namespace Ocelot.Configuration.Creator
|
||||
.WithCacheOptions(new CacheOptions(fileReRoute.FileCacheOptions.TtlSeconds, region))
|
||||
.WithDownstreamScheme(fileReRoute.DownstreamScheme)
|
||||
.WithLoadBalancer(fileReRoute.LoadBalancer)
|
||||
.WithDownstreamHost(fileReRoute.DownstreamHost)
|
||||
.WithDownstreamPort(fileReRoute.DownstreamPort)
|
||||
.WithDownstreamAddresses(downstreamAddresses)
|
||||
.WithReRouteKey(reRouteKey)
|
||||
.WithIsQos(fileReRouteOptions.IsQos)
|
||||
.WithQosOptions(qosOptions)
|
||||
|
@ -1,68 +1,68 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class HeaderFindAndReplaceCreator : IHeaderFindAndReplaceCreator
|
||||
{
|
||||
private IBaseUrlFinder _finder;
|
||||
private Dictionary<string, Func<string>> _placeholders;
|
||||
|
||||
public HeaderFindAndReplaceCreator(IBaseUrlFinder finder)
|
||||
{
|
||||
_finder = finder;
|
||||
_placeholders = new Dictionary<string, Func<string>>();
|
||||
_placeholders.Add("{BaseUrl}", () => {
|
||||
return _finder.Find();
|
||||
});
|
||||
}
|
||||
|
||||
public HeaderTransformations Create(FileReRoute fileReRoute)
|
||||
{
|
||||
var upstream = new List<HeaderFindAndReplace>();
|
||||
|
||||
foreach(var input in fileReRoute.UpstreamHeaderTransform)
|
||||
{
|
||||
var hAndr = Map(input);
|
||||
upstream.Add(hAndr);
|
||||
}
|
||||
|
||||
var downstream = new List<HeaderFindAndReplace>();
|
||||
|
||||
foreach(var input in fileReRoute.DownstreamHeaderTransform)
|
||||
{
|
||||
var hAndr = Map(input);
|
||||
downstream.Add(hAndr);
|
||||
}
|
||||
|
||||
return new HeaderTransformations(upstream, downstream);
|
||||
}
|
||||
|
||||
private HeaderFindAndReplace Map(KeyValuePair<string,string> input)
|
||||
{
|
||||
var findAndReplace = input.Value.Split(",");
|
||||
|
||||
var replace = findAndReplace[1].TrimStart();
|
||||
|
||||
var startOfPlaceholder = replace.IndexOf("{");
|
||||
if(startOfPlaceholder > -1)
|
||||
{
|
||||
var endOfPlaceholder = replace.IndexOf("}", startOfPlaceholder);
|
||||
|
||||
var placeholder = replace.Substring(startOfPlaceholder, startOfPlaceholder + (endOfPlaceholder + 1));
|
||||
|
||||
if(_placeholders.ContainsKey(placeholder))
|
||||
{
|
||||
var value = _placeholders[placeholder].Invoke();
|
||||
replace = replace.Replace(placeholder, value);
|
||||
}
|
||||
}
|
||||
|
||||
var hAndr = new HeaderFindAndReplace(input.Key, findAndReplace[0], replace, 0);
|
||||
|
||||
return hAndr;
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Middleware;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class HeaderFindAndReplaceCreator : IHeaderFindAndReplaceCreator
|
||||
{
|
||||
private IBaseUrlFinder _finder;
|
||||
private Dictionary<string, Func<string>> _placeholders;
|
||||
|
||||
public HeaderFindAndReplaceCreator(IBaseUrlFinder finder)
|
||||
{
|
||||
_finder = finder;
|
||||
_placeholders = new Dictionary<string, Func<string>>();
|
||||
_placeholders.Add("{BaseUrl}", () => {
|
||||
return _finder.Find();
|
||||
});
|
||||
}
|
||||
|
||||
public HeaderTransformations Create(FileReRoute fileReRoute)
|
||||
{
|
||||
var upstream = new List<HeaderFindAndReplace>();
|
||||
|
||||
foreach(var input in fileReRoute.UpstreamHeaderTransform)
|
||||
{
|
||||
var hAndr = Map(input);
|
||||
upstream.Add(hAndr);
|
||||
}
|
||||
|
||||
var downstream = new List<HeaderFindAndReplace>();
|
||||
|
||||
foreach(var input in fileReRoute.DownstreamHeaderTransform)
|
||||
{
|
||||
var hAndr = Map(input);
|
||||
downstream.Add(hAndr);
|
||||
}
|
||||
|
||||
return new HeaderTransformations(upstream, downstream);
|
||||
}
|
||||
|
||||
private HeaderFindAndReplace Map(KeyValuePair<string,string> input)
|
||||
{
|
||||
var findAndReplace = input.Value.Split(",");
|
||||
|
||||
var replace = findAndReplace[1].TrimStart();
|
||||
|
||||
var startOfPlaceholder = replace.IndexOf("{");
|
||||
if(startOfPlaceholder > -1)
|
||||
{
|
||||
var endOfPlaceholder = replace.IndexOf("}", startOfPlaceholder);
|
||||
|
||||
var placeholder = replace.Substring(startOfPlaceholder, startOfPlaceholder + (endOfPlaceholder + 1));
|
||||
|
||||
if(_placeholders.ContainsKey(placeholder))
|
||||
{
|
||||
var value = _placeholders[placeholder].Invoke();
|
||||
replace = replace.Replace(placeholder, value);
|
||||
}
|
||||
}
|
||||
|
||||
var hAndr = new HeaderFindAndReplace(input.Key, findAndReplace[0], replace, 0);
|
||||
|
||||
return hAndr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,17 +1,17 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class HeaderTransformations
|
||||
{
|
||||
public HeaderTransformations(List<HeaderFindAndReplace> upstream, List<HeaderFindAndReplace> downstream)
|
||||
{
|
||||
Upstream = upstream;
|
||||
Downstream = downstream;
|
||||
}
|
||||
|
||||
public List<HeaderFindAndReplace> Upstream {get;private set;}
|
||||
|
||||
public List<HeaderFindAndReplace> Downstream {get;private set;}
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public class HeaderTransformations
|
||||
{
|
||||
public HeaderTransformations(List<HeaderFindAndReplace> upstream, List<HeaderFindAndReplace> downstream)
|
||||
{
|
||||
Upstream = upstream;
|
||||
Downstream = downstream;
|
||||
}
|
||||
|
||||
public List<HeaderFindAndReplace> Upstream {get;private set;}
|
||||
|
||||
public List<HeaderFindAndReplace> Downstream {get;private set;}
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IAuthenticationOptionsCreator
|
||||
{
|
||||
AuthenticationOptions Create(FileReRoute reRoute);
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IAuthenticationOptionsCreator
|
||||
{
|
||||
AuthenticationOptions Create(FileReRoute reRoute);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IClaimsToThingCreator
|
||||
{
|
||||
List<ClaimToThing> Create(Dictionary<string,string> thingsBeingAdded);
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IClaimsToThingCreator
|
||||
{
|
||||
List<ClaimToThing> Create(Dictionary<string,string> thingsBeingAdded);
|
||||
}
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IDownstreamAddressesCreator
|
||||
{
|
||||
List<DownstreamHostAndPort> Create(FileReRoute reRoute);
|
||||
}
|
||||
}
|
@ -1,10 +1,10 @@
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IHeaderFindAndReplaceCreator
|
||||
{
|
||||
HeaderTransformations Create(FileReRoute fileReRoute);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IHeaderFindAndReplaceCreator
|
||||
{
|
||||
HeaderTransformations Create(FileReRoute fileReRoute);
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
using System.Threading.Tasks;
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IOcelotConfigurationCreator
|
||||
{
|
||||
Task<Response<IOcelotConfiguration>> Create(FileConfiguration fileConfiguration);
|
||||
}
|
||||
using System.Threading.Tasks;
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Responses;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IOcelotConfigurationCreator
|
||||
{
|
||||
Task<Response<IOcelotConfiguration>> Create(FileConfiguration fileConfiguration);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IQoSOptionsCreator
|
||||
{
|
||||
QoSOptions Create(FileReRoute fileReRoute);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IQoSOptionsCreator
|
||||
{
|
||||
QoSOptions Create(FileReRoute fileReRoute);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IRateLimitOptionsCreator
|
||||
{
|
||||
RateLimitOptions Create(FileReRoute fileReRoute, FileGlobalConfiguration globalConfiguration, bool enableRateLimiting);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IRateLimitOptionsCreator
|
||||
{
|
||||
RateLimitOptions Create(FileReRoute fileReRoute, FileGlobalConfiguration globalConfiguration, bool enableRateLimiting);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IReRouteOptionsCreator
|
||||
{
|
||||
ReRouteOptions Create(FileReRoute fileReRoute);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IReRouteOptionsCreator
|
||||
{
|
||||
ReRouteOptions Create(FileReRoute fileReRoute);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IRequestIdKeyCreator
|
||||
{
|
||||
string Create(FileReRoute fileReRoute, FileGlobalConfiguration globalConfiguration);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IRequestIdKeyCreator
|
||||
{
|
||||
string Create(FileReRoute fileReRoute, FileGlobalConfiguration globalConfiguration);
|
||||
}
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IServiceProviderConfigurationCreator
|
||||
{
|
||||
ServiceProviderConfiguration Create(FileGlobalConfiguration globalConfiguration);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IServiceProviderConfigurationCreator
|
||||
{
|
||||
ServiceProviderConfiguration Create(FileGlobalConfiguration globalConfiguration);
|
||||
}
|
||||
}
|
@ -1,10 +1,10 @@
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Values;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IUpstreamTemplatePatternCreator
|
||||
{
|
||||
UpstreamPathTemplate Create(FileReRoute reRoute);
|
||||
}
|
||||
using Ocelot.Configuration.File;
|
||||
using Ocelot.Values;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public interface IUpstreamTemplatePatternCreator
|
||||
{
|
||||
UpstreamPathTemplate Create(FileReRoute reRoute);
|
||||
}
|
||||
}
|
@ -1,26 +1,26 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using IdentityServer4.AccessTokenValidation;
|
||||
using IdentityServer4.Models;
|
||||
using Ocelot.Configuration.Provider;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public static class IdentityServerConfigurationCreator
|
||||
{
|
||||
public static IdentityServerConfiguration GetIdentityServerConfiguration(string secret)
|
||||
{
|
||||
var credentialsSigningCertificateLocation = Environment.GetEnvironmentVariable("OCELOT_CERTIFICATE");
|
||||
var credentialsSigningCertificatePassword = Environment.GetEnvironmentVariable("OCELOT_CERTIFICATE_PASSWORD");
|
||||
|
||||
return new IdentityServerConfiguration(
|
||||
"admin",
|
||||
false,
|
||||
secret,
|
||||
new List<string> { "admin", "openid", "offline_access" },
|
||||
credentialsSigningCertificateLocation,
|
||||
credentialsSigningCertificatePassword
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using IdentityServer4.AccessTokenValidation;
|
||||
using IdentityServer4.Models;
|
||||
using Ocelot.Configuration.Provider;
|
||||
|
||||
namespace Ocelot.Configuration.Creator
|
||||
{
|
||||
public static class IdentityServerConfigurationCreator
|
||||
{
|
||||
public static IdentityServerConfiguration GetIdentityServerConfiguration(string secret)
|
||||
{
|
||||
var credentialsSigningCertificateLocation = Environment.GetEnvironmentVariable("OCELOT_CERTIFICATE");
|
||||
var credentialsSigningCertificatePassword = Environment.GetEnvironmentVariable("OCELOT_CERTIFICATE_PASSWORD");
|
||||
|
||||
return new IdentityServerConfiguration(
|
||||
"admin",
|
||||
false,
|
||||
secret,
|
||||
new List<string> { "admin", "openid", "offline_access" },
|
||||
credentialsSigningCertificateLocation,
|
||||
credentialsSigningCertificatePassword
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user