Compare commits

...

18 Commits

Author SHA1 Message Date
ViViDboarder 26a70dce27 Refactor of scripts to add a target for auto creating a pr
I plan to use this for automated builds
2020-07-01 12:45:20 -07:00
ViViDboarder 771760f828 Update archive creation to make deploying changes easier 2020-07-01 10:17:27 -07:00
ViViDboarder 8c7f2225b0 Update simpler readme 2020-06-30 18:27:09 -07:00
ViViDboarder cd6afe7256 Update to gomod 2020-06-30 18:06:35 -07:00
ViViDboarder 99e8034f02 New Archival targets to make it easier to deploy 2018-01-06 09:04:44 -08:00
ViViDboarder 70ac28bb2f Make more idomatic 2018-01-05 22:09:09 -08:00
ViViDboarder 6f8135ce6b Simpler packaging 2018-01-05 12:50:13 -08:00
ViViDboarder be9e0b853e Add support for lightning and clean up some of the functions 2018-01-05 09:57:11 -08:00
ViViDboarder 7aae9915f0 Create build dir to reduce mess 2017-07-15 14:24:10 -07:00
ViViDboarder 0be33639b0 Includes more guides and better logging
Large-ish refactor
2017-07-15 13:36:28 -07:00
ViViDboarder 86d554f10a Improve logging a bit 2017-07-12 10:35:51 -07:00
ViViDboarder 9b268fb293 Update top level entry id for apexcode 2017-07-12 10:35:39 -07:00
ViViDboarder 7a89b82da9 Add dep locks and update makefile to build properly 2017-05-05 14:32:33 -07:00
ViViDboarder 7a87bcb34c fix again 2016-07-26 18:40:05 -07:00
ViViDboarder a501c6a12f Remove spaces in archive names and fix readme 2016-07-26 18:38:37 -07:00
ViViDboarder b82e18781b Add some stuff for version tracking as well as archival 2016-07-26 18:27:01 -07:00
ViViDboarder 80d6c867cd Small refactor 2016-07-26 18:26:47 -07:00
ViViDboarder e64ac5df22 Yay! Fixed. Also added some style changes 2016-07-26 17:21:56 -07:00
31 changed files with 1363 additions and 491 deletions

13
.gitignore vendored
View File

@ -5,3 +5,16 @@
*.html
Content/
docSet.dsidx
# Downloaded CSS files
docs.min.css
holygrail.min.css
syntax-highlighter.min.css
*-version.txt
*.tgz
# External Go dependencies
vendor/
build/
archive/
.DS_Store
docset-gen
repotmp/

27
Gopkg.lock generated Normal file
View File

@ -0,0 +1,27 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
name = "github.com/coopernurse/gorp"
packages = ["."]
revision = "236e1383df4c6b402e251d3fe2dcea50dab1faf1"
[[projects]]
branch = "master"
name = "github.com/mattn/go-sqlite3"
packages = ["."]
revision = "cf7286f069c3ef596efcc87781a4653a2e7607bd"
[[projects]]
branch = "master"
name = "golang.org/x/net"
packages = ["context"]
revision = "feeb485667d1fdabe727840fe00adc22431bc86e"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "1f1d5928eb9c306d42831a9b9d96e1ef8d11095cd843a3222acd869ee999f650"
solver-name = "gps-cdcl"
solver-version = 1

7
Gopkg.toml Normal file
View File

@ -0,0 +1,7 @@
[[constraint]]
branch = "master"
name = "github.com/coopernurse/gorp"
[[constraint]]
branch = "master"
name = "github.com/mattn/go-sqlite3"

110
Makefile
View File

@ -1,44 +1,84 @@
default: complete
.PHONY: default test
default: all
complete: run-combined package-apex package-vf package-combined
.PHONY: all
all: package-apex package-vf package-lightning
run-apex:
(cd SFDashC && go run *.go --silent apexcode)
run-vf:
(cd SFDashC && go run *.go --silent pages)
.PHONY: run-apex
run-apex: clean-index
go run ./SFDashC/*.go apexcode
run-combined:
(cd SFDashC && go run *.go --silent apexcode pages)
.PHONY: run-vf
run-vf: clean-index
go run ./SFDashC/*.go pages
package-apex:
$(eval type = Apex)
$(eval package = Salesforce $(type).docset)
mkdir -p "$(package)/Contents/Resources/Documents"
cp -r SFDashC/atlas.en-us.200.0.apexcode.meta "$(package)/Contents/Resources/Documents/"
cp SFDashC/*.html "$(package)/Contents/Resources/Documents/"
cp SFDashC/Info-$(type).plist "$(package)/Contents/Info.plist"
cp SFDashC/docSet.dsidx "$(package)/Contents/Resources/"
.PHONY: run-lightning
run-lightning: clean-index
go run ./SFDashC/*.go lightning
package-vf:
$(eval type = Pages)
$(eval package = Salesforce $(type).docset)
mkdir -p "$(package)/Contents/Resources/Documents"
cp -r SFDashC/atlas.en-us.200.0.pages.meta "$(package)/Contents/Resources/Documents/"
cp SFDashC/*.html "$(package)/Contents/Resources/Documents/"
cp SFDashC/Info-$(type).plist "$(package)/Contents/Info.plist"
cp SFDashC/docSet.dsidx "$(package)/Contents/Resources/"
.PHONY: package-apex
package-apex: run-apex
./scripts/package-docset.sh apexcode
package-combined:
$(eval type = Combined)
$(eval package = Salesforce $(type).docset)
mkdir -p "$(package)/Contents/Resources/Documents"
cp -r SFDashC/*.meta "$(package)/Contents/Resources/Documents/"
cp SFDashC/*.html "$(package)/Contents/Resources/Documents/"
cp SFDashC/Info-$(type).plist "$(package)/Contents/Info.plist"
cp SFDashC/docSet.dsidx "$(package)/Contents/Resources/"
.PHONY: package-vf
package-vf: run-vf
./scripts/package-docset.sh pages
clean:
rm -fr SFDashC/*.meta
rm -f SFDashC/docSet.dsidx
.PHONY: package-lightning
package-lightning: run-lightning
./scripts/package-docset.sh lightning
.PHONY: archive-apex
archive-apex: package-apex
./scripts/archive-docset.sh apexcode
./archive/Salesforce_Apex: archive-apex
.PHONY: archive-vf
archive-vf: package-vf
./scripts/archive-docset.sh pages
./archive/Salesforce_Visualforce: archive-vf
.PHONY: archive-lightning
./archive-lightning: package-lightning
./scripts/archive-docset.sh lightning
./archive/Salesforce_Lightning: archive-lightning
.PHONY: archive-all
archive-all: archive-apex archive-vf # archive-lightning Lightning package isn't functional
./archive: archive-all
.PHONY: create-pr
create-pr: ./archive
./scripts/create-pr.sh
.PHONY: clean-index
clean-index:
rm -f ./build/docSet.dsidx
.PHONY: clean-package
clean-package:
rm -fr *.docset
.PHONY: clean-archive
clean-archive:
rm -f *.tgz
rm -fr ./archive
.PHONY: clean
clean: clean-index clean-package clean-archive
.PHONY: clean-build
clean-build:
rm -fr ./build
.PHONY: clean-pr
clean-pr:
rm -fr ./repotmp
.PHONY: clean-all
clean-all: clean clean-build clean-pr

View File

@ -3,17 +3,15 @@ SFDashC
SFDashC is a go application for downloading and constructing Dash docsets from the Salesforce online documentation
Everything is wrapped with a Makefile and can be completely built by simply executing:
Everything is wrapped with a Makefile and can be completely built by executing:
make
That's it!
It will generate 3 docsets: Salesforce Apex, Salesforce Visualforce, and Salesforce Combined
It will generate 3 docsets: Salesforce Apex, Salesforce Visualforce, and Salesforce Lightning
Dependencies
------------
To Do
-----
Currently these are not auto resolved. You must install the following:
* github.com/coopernurse/gorp
* github.com/mattn/go-sqlite3
- [ ] Now that new `ForceCascadeType` is available, some of the entries in `./SFDashC/supportedtypes.go` can be simplified

View File

@ -1,16 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce Apex</string>
<key>CFBundleName</key>
<string>Salesforce Apex</string>
<key>DocSetPlatformFamily</key>
<string>apex</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>apexcode.html</string>
</dict>
</plist>

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce</string>
<!-- Displayed in list view -->
<key>CFBundleName</key>
<string>Salesforce</string>
<key>DocSetPlatformFamily</key>
<string>sfdc</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>index.htm</string>
</dict>
</plist>

View File

@ -1,16 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce Visualforce</string>
<key>CFBundleName</key>
<string>Salesforce Visualforce</string>
<key>DocSetPlatformFamily</key>
<string>vf</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>pages.html</string>
</dict>
</plist>

59
SFDashC/database.go Normal file
View File

@ -0,0 +1,59 @@
package main
import (
"database/sql"
"github.com/coopernurse/gorp"
_ "github.com/mattn/go-sqlite3"
"os"
"path/filepath"
)
var dbmap *gorp.DbMap
var dbName = "docSet.dsidx"
// InitDb will initialize a new instance of a sqlite db for indexing
func InitDb(buildDir string) *gorp.DbMap {
dbPath := filepath.Join(buildDir, dbName)
err := os.MkdirAll(filepath.Dir(dbPath), 0755)
ExitIfError(err)
db, err := sql.Open("sqlite3", dbPath)
ExitIfError(err)
dbmap := &gorp.DbMap{Db: db, Dialect: gorp.SqliteDialect{}}
dbmap.AddTableWithName(SearchIndex{}, "searchIndex").SetKeys(true, "ID")
err = dbmap.CreateTablesIfNotExists()
ExitIfError(err)
err = dbmap.TruncateTables()
ExitIfError(err)
return dbmap
}
// SaveSearchIndex will index a particular entry into the sqlite3 database
func SaveSearchIndex(dbmap *gorp.DbMap, entry TOCEntry, entryType SupportedType, toc *AtlasTOC) {
if entry.LinkAttr.Href == "" || !entryType.IsValidType() {
return
}
relLink := entry.GetContentFilepath(toc, false)
name := entry.CleanTitle(entryType)
if entryType.ShowNamespace && len(entryHierarchy) > 0 {
// Show namespace for methods
name = entryHierarchy[len(entryHierarchy)-1] + "." + name
}
si := SearchIndex{
Name: name,
Type: entryType.TypeName,
Path: relLink,
}
err := dbmap.Insert(&si)
ExitIfError(err)
LogDebug("%s is indexed as a %s", entry.Text, entryType.TypeName)
}

View File

@ -1,30 +1,14 @@
package main
import (
"errors"
"fmt"
"os"
"log"
)
var shouldWarn = true
// Custom errors
type errorString struct {
message string
}
// Error retrievies the Error message from the error
func (err errorString) Error() string {
return err.message
}
// NoWarn disables all warning output
func WithoutWarning() {
shouldWarn = false
}
// NewCustomError creates a custom error using a string as the message
func NewCustomError(message string) error {
return &errorString{message}
return errors.New(message)
}
// NewFormatedError creates a new error using Sprintf
@ -34,20 +18,19 @@ func NewFormatedError(format string, a ...interface{}) error {
// NewTypeNotFoundError returns an error for a TOCEntry with an unknown type
func NewTypeNotFoundError(entry TOCEntry) error {
return NewFormatedError("Type not found : %s %s", entry.Text, entry.ID)
return NewFormatedError("Type not found: %s %s", entry.Text, entry.ID)
}
// ExitIfError is a helper function for terminating if an error is not nil
func ExitIfError(err error) {
if err != nil {
fmt.Println("ERROR :", err)
os.Exit(1)
log.Fatal(err)
}
}
// WarnIfError is a helper function for terminating if an error is not nil
func WarnIfError(err error) {
if err != nil && shouldWarn {
fmt.Println("WARNING :", err)
if err != nil {
LogWarning(err.Error())
}
}

View File

@ -1,5 +0,0 @@
<html>
<body>
This is a joint docset
</body>
</html>

80
SFDashC/logging.go Normal file
View File

@ -0,0 +1,80 @@
package main
import (
"fmt"
"log"
)
const (
prefix = "SFDashC"
// Log Levels
ERROR = iota
WARNING = iota
INFO = iota
DEBUG = iota
)
var logLevel int
func init() {
logLevel = INFO
}
func getLevelText() string {
switch logLevel {
case ERROR:
return "ERROR"
case WARNING:
return "WARNING"
case INFO:
return "INFO"
case DEBUG:
return "DEBUG"
default:
return "UNKNOWN"
}
}
func getLogPrefix() string {
return fmt.Sprintf("%s: %s:", prefix, getLevelText())
}
// SetLogLevel will set the maximum level to print
func SetLogLevel(level int) {
logLevel = level
}
// Log will print a formatted message with a prefix for a specified level
// If the level is greater than the maximum log level, it will not print
func Log(level int, format string, a ...interface{}) {
if level <= logLevel {
message := fmt.Sprintf(format, a...)
message = fmt.Sprintf("%s: %s: %s", prefix, getLevelText(), message)
log.Println(message)
}
}
// LogError will print an error message
// If the level is greater than the maximum log level, it will not print
// It is recommended to use log.Fatal() instead since it will handle exits for you
func LogError(format string, a ...interface{}) {
Log(ERROR, format, a...)
}
// LogInfo will print a warning message
// If the level is greater than the maximum log level, it will not print
func LogWarning(format string, a ...interface{}) {
Log(WARNING, format, a...)
}
// LogInfo will print an info message
// If the level is greater than the maximum log level, it will not print
func LogInfo(format string, a ...interface{}) {
Log(INFO, format, a...)
}
// LogDebug will print an debug message
// If the level is greater than the maximum log level, it will not print
func LogDebug(format string, a ...interface{}) {
Log(DEBUG, format, a...)
}

View File

@ -1,114 +1,39 @@
package main
import (
"database/sql"
"encoding/json"
"flag"
"fmt"
"github.com/coopernurse/gorp"
_ "github.com/mattn/go-sqlite3"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
)
/*
TODO:
- Move structs to own file
- Move db stuff to own file
- Stylesheets
*/
// CSS Paths
var cssBasePath = "https://developer.salesforce.com/resource/stylesheets"
var cssFiles = []string{"docs.min.css"}
var cssBaseURL = "https://developer.salesforce.com/resource/stylesheets"
var cssFiles = []string{"holygrail.min.css", "docs.min.css", "syntax-highlighter.min.css"}
var buildDir = "build"
// JSON Structs
// AtlasTOC represents the meta documenation from Salesforce
type AtlasTOC struct {
AvailableVersions []VersionInfo `json:"available_versions"`
Content string
ContentDocumentID string `json:"content_document_id"`
Deliverable string
DocTitle string `json:"doc_title"`
Locale string
Language LanguageInfo
PDFUrl string `json:"pdf_url"`
TOCEntries []TOCEntry `json:"toc"`
Title string
Version VersionInfo
}
// LanguageInfo contains information for linking and displaying the language
type LanguageInfo struct {
Label string
Locale string
URL string
}
// VersionInfo representes a Salesforce documentation version
type VersionInfo struct {
DocVersion string `json:"doc_version"`
ReleaseVersion string `json:"release_version"`
VersionText string `json:"version_text"`
VersionURL string `json:"version_url"`
}
// TOCEntry represents a single Table of Contents item
type TOCEntry struct {
Text string
ID string
LinkAttr LinkAttr `json:"a_attr,omitempty"`
Children []TOCEntry
ComputedFirstTopic bool
ComputedResetPageLayout bool
}
// LinkAttr represents all attributes bound to a link
type LinkAttr struct {
Href string
}
// TOCContent contains content information for a piece of documenation
type TOCContent struct {
ID string
Title string
Content string
}
// Sqlite Struct
// SearchIndex is the database table that indexes the docs
type SearchIndex struct {
ID int64 `db:id`
Name string `db:name`
Type string `db:type`
Path string `db:path`
}
var dbmap *gorp.DbMap
var wg sync.WaitGroup
var throttle = make(chan int, maxConcurrency)
const maxConcurrency = 16
var throttle = make(chan int, maxConcurrency)
func parseFlags() (locale string, deliverables []string, silent bool) {
func parseFlags() (locale string, deliverables []string, debug bool) {
flag.StringVar(
&locale, "locale", "en-us",
"locale to use for documentation (default: en-us)",
)
flag.BoolVar(
&silent, "silent", false, "this flag supresses warning messages",
&debug, "debug", false, "this flag supresses warning messages",
)
flag.Parse()
// All other args are for deliverables
// apexcode or pages
// apexcode, pages, or lightening
deliverables = flag.Args()
return
}
@ -116,40 +41,46 @@ func parseFlags() (locale string, deliverables []string, silent bool) {
// getTOC Retrieves the TOC JSON and Unmarshals it
func getTOC(locale string, deliverable string) (toc *AtlasTOC, err error) {
var tocURL = fmt.Sprintf("https://developer.salesforce.com/docs/get_document/atlas.%s.%s.meta", locale, deliverable)
LogDebug("TOC URL: %s", tocURL)
resp, err := http.Get(tocURL)
if err != nil {
return
}
ExitIfError(err)
// Read the downloaded JSON
defer resp.Body.Close()
defer func() {
ExitIfError(resp.Body.Close())
}()
contents, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
ExitIfError(err)
// Load into Struct
toc = new(AtlasTOC)
LogDebug("TOC JSON: %s", string(contents))
err = json.Unmarshal([]byte(contents), toc)
return
}
// verifyVersion ensures that the version retrieved is the latest
func verifyVersion(toc *AtlasTOC) error {
// jsonVersion, _ := json.Marshal(toc.Version)
// LogDebug("toc.Version" + string(jsonVersion))
currentVersion := toc.Version.DocVersion
// jsonAvailVersions, _ := json.Marshal(toc.AvailableVersions)
// LogDebug("toc.AvailableVersions" + string(jsonAvailVersions))
topVersion := toc.AvailableVersions[0].DocVersion
if currentVersion != topVersion {
return NewFormatedError("verifyVersion : retrieved version is not the latest. Found %s, latest is %s", currentVersion, topVersion)
return NewFormatedError("verifyVersion: retrieved version is not the latest. Found %s, latest is %s", currentVersion, topVersion)
}
return nil
}
func printSuccess(toc *AtlasTOC) {
fmt.Println("Success:", toc.DocTitle, "-", toc.Version.VersionText)
LogInfo("Success: %s - %s - %s", toc.DocTitle, toc.Version.VersionText, toc.Version.DocVersion)
}
func saveMainContent(toc *AtlasTOC) {
filePath := fmt.Sprintf("%s.html", toc.Deliverable)
// Prepend build dir
filePath = filepath.Join(buildDir, filePath)
// Make sure file doesn't exist first
if _, err := os.Stat(filePath); os.IsNotExist(err) {
content := toc.Content
@ -162,292 +93,157 @@ func saveMainContent(toc *AtlasTOC) {
ofile, err := os.Create(filePath)
ExitIfError(err)
defer ofile.Close()
_, err = ofile.WriteString(content)
defer func() {
ExitIfError(ofile.Close())
}()
_, err = ofile.WriteString(
"<meta http-equiv='Content-Type' content='text/html; charset=UTF-8' />" +
content,
)
ExitIfError(err)
}
}
func main() {
locale, deliverables, silent := parseFlags()
if silent {
WithoutWarning()
}
// Init the Sqlite db
dbmap = initDb()
err := dbmap.TruncateTables()
// saveContentVersion will retrieve the version number from the TOC and save that to a text file
func saveContentVersion(toc *AtlasTOC) {
filePath := fmt.Sprintf("%s-version.txt", toc.Deliverable)
// Prepend build dir
filePath = filepath.Join(buildDir, filePath)
err := os.MkdirAll(filepath.Dir(filePath), 0755)
ExitIfError(err)
for _, deliverable := range deliverables {
toc, err := getTOC(locale, deliverable)
ofile, err := os.Create(filePath)
ExitIfError(err)
defer func() {
ExitIfError(ofile.Close())
}()
_, err = ofile.WriteString(toc.Version.DocVersion)
ExitIfError(err)
}
// downloadCSS will download a CSS file using the CSS base URL
func downloadCSS(fileName string, wg *sync.WaitGroup) {
downloadFile(cssBaseURL+"/"+fileName, fileName, wg)
}
// downloadFile will download n aribtrary file to a given file path
// It will also handle throttling if a WaitGroup is provided
func downloadFile(url string, fileName string, wg *sync.WaitGroup) {
if wg != nil {
defer wg.Done()
}
filePath := filepath.Join(buildDir, fileName)
if _, err := os.Stat(filePath); os.IsNotExist(err) {
err = os.MkdirAll(filepath.Dir(filePath), 0755)
ExitIfError(err)
saveMainContent(toc)
ofile, err := os.Create(filePath)
ExitIfError(err)
defer func() {
ExitIfError(ofile.Close())
}()
err = verifyVersion(toc)
WarnIfError(err)
response, err := http.Get(url)
ExitIfError(err)
defer func() {
ExitIfError(response.Body.Close())
}()
// Download each entry
for _, entry := range toc.TOCEntries {
if entry.ID == "apex_reference" || entry.ID == "pages_compref" {
processChildReferences(entry, nil, toc)
}
}
printSuccess(toc)
_, err = io.Copy(ofile, response.Body)
ExitIfError(err)
}
// Download CSS
throttle <- 1
/*
* wg.Add(1)
* for _, cssUrl := range cssFiles {
* go downloadLink(cssBasePath+"/"+cssUrl, &wg)
* }
*/
wg.Wait()
if wg != nil {
<-throttle
}
}
// SupportedType contains information for generating indexes for types we care about
type SupportedType struct {
TypeName, TitleSuffix string
PushName, AppendParents, IsContainer, NoTrim, ShowNamespace, ParseContent bool
// getEntryType will return an entry type that should be used for a given entry and it's parent's type
func getEntryType(entry TOCEntry, parentType SupportedType) (SupportedType, error) {
if parentType.ForceCascadeType {
return parentType.CreateChildType(), nil
}
childType, err := lookupEntryType(entry)
if err != nil && parentType.ShouldCascade() {
childType = parentType.CreateChildType()
err = nil
}
return childType, err
}
var supportedTypes = []SupportedType{
SupportedType{
TypeName: "Method",
TitleSuffix: "Methods",
AppendParents: true,
IsContainer: true,
ShowNamespace: true,
},
SupportedType{
TypeName: "Constructor",
TitleSuffix: "Constructors",
AppendParents: true,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Class",
TitleSuffix: "Class",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Namespace",
TitleSuffix: "Namespace",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Interface",
TitleSuffix: "Interface",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Statement",
TitleSuffix: "Statement",
ShowNamespace: false,
},
SupportedType{
TypeName: "Enum",
TitleSuffix: "Enum",
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Property",
TitleSuffix: "Properties",
AppendParents: true,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Guide",
TitleSuffix: "Example Implementation",
NoTrim: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Statement",
TitleSuffix: "Statements",
NoTrim: true,
AppendParents: false,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Field",
TitleSuffix: "Fields",
AppendParents: true,
PushName: true,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Exception",
TitleSuffix: "Exceptions",
NoTrim: true,
AppendParents: true,
ShowNamespace: false,
ParseContent: true,
},
SupportedType{
TypeName: "Constant",
TitleSuffix: "Constants",
NoTrim: true,
AppendParents: true,
ShowNamespace: false,
ParseContent: true,
},
SupportedType{
TypeName: "Class",
TitleSuffix: "Class (Base Email Methods)",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
}
// IsType indicates that the TOCEntry is of a given SupportedType
// This is done by checking the suffix of the entry text
func (entry TOCEntry) IsType(t SupportedType) bool {
return strings.HasSuffix(entry.Text, t.TitleSuffix)
}
// CleanTitle trims known suffix from TOCEntry titles
func (entry TOCEntry) CleanTitle(t SupportedType) string {
if t.NoTrim {
return entry.Text
}
return strings.TrimSuffix(entry.Text, " "+t.TitleSuffix)
}
// GetRelLink extracts only the relative link from the Link Href
func (entry TOCEntry) GetRelLink() (relLink string) {
if entry.LinkAttr.Href == "" {
return
}
// Get the JSON file
relLink = entry.LinkAttr.Href
anchorIndex := strings.LastIndex(relLink, "#")
if anchorIndex > 0 {
relLink = relLink[0:anchorIndex]
}
return
}
// GetContent retrieves Content for this TOCEntry from the API
func (entry TOCEntry) GetContent(toc *AtlasTOC) (content *TOCContent, err error) {
relLink := entry.GetRelLink()
if relLink == "" {
return
}
url := fmt.Sprintf(
"https://developer.salesforce.com/docs/get_document_content/%s/%s/%s/%s",
toc.Deliverable,
relLink,
toc.Locale,
toc.Version.DocVersion,
)
// fmt.Println(url)
resp, err := http.Get(url)
if err != nil {
return
}
// Read the downloaded JSON
defer resp.Body.Close()
contents, err := ioutil.ReadAll(resp.Body)
// fmt.Println(string(contents))
if err != nil {
return
}
// Load into Struct
content = new(TOCContent)
err = json.Unmarshal([]byte(contents), content)
return
}
func getEntryType(entry TOCEntry) (*SupportedType, error) {
if strings.HasPrefix(entry.ID, "pages_compref_") {
return &SupportedType{
TypeName: "Tag",
NoTrim: true,
}, nil
}
for _, t := range supportedTypes {
// lookupEntryType returns the matching SupportedType for a given entry or returns an error
func lookupEntryType(entry TOCEntry) (SupportedType, error) {
for _, t := range SupportedTypes {
if entry.IsType(t) {
return &t, nil
return t, nil
}
}
return nil, NewTypeNotFoundError(entry)
return SupportedType{}, NewTypeNotFoundError(entry)
}
// processEntryReference downloads html and indexes a toc item
func processEntryReference(entry TOCEntry, entryType SupportedType, toc *AtlasTOC) {
LogDebug("Processing: %s", entry.Text)
throttle <- 1
wg.Add(1)
go downloadContent(entry, toc, &wg)
if entryType.ShouldSkipIndex() {
LogDebug("%s is a container or is hidden. Do not index", entry.Text)
} else if !entryType.IsValidType() {
LogDebug("No entry type for %s. Cannot index", entry.Text)
} else {
SaveSearchIndex(dbmap, entry, entryType, toc)
}
}
// entryHierarchy allows breadcrumb naming
var entryHierarchy []string
func processChildReferences(entry TOCEntry, entryType *SupportedType, toc *AtlasTOC) {
if entryType != nil && entryType.PushName {
entryHierarchy = append(entryHierarchy, entry.CleanTitle(*entryType))
// processChildReferences iterates through all child toc items, cascading types, and indexes them
func processChildReferences(entry TOCEntry, entryType SupportedType, toc *AtlasTOC) {
if entryType.PushName {
entryHierarchy = append(entryHierarchy, entry.CleanTitle(entryType))
}
for _, child := range entry.Children {
// fmt.Println("Processing: " + child.Text)
LogDebug("Reading child: %s", child.Text)
var err error
var childType *SupportedType
var childType SupportedType
// Skip anything without an HTML page
if child.LinkAttr.Href != "" {
throttle <- 1
wg.Add(1)
go downloadContent(child, toc, &wg)
childType, err = getEntryType(child)
if childType == nil && (entryType != nil && entryType.IsContainer) {
saveSearchIndex(dbmap, child, entryType, toc)
} else if childType != nil && !childType.IsContainer {
saveSearchIndex(dbmap, child, childType, toc)
childType, err = getEntryType(child, entryType)
if err == nil {
processEntryReference(child, childType, toc)
} else {
WarnIfError(err)
}
} else {
LogDebug("%s has no link. Skipping", child.Text)
}
if len(child.Children) > 0 {
processChildReferences(child, childType, toc)
}
}
// fmt.Println("Done processing children for " + entry.Text)
LogDebug("Done processing children for %s", entry.Text)
if entryType != nil && entryType.PushName {
if entryType.PushName {
entryHierarchy = entryHierarchy[:len(entryHierarchy)-1]
}
}
// GetContentFilepath returns the filepath that should be used for the content
func (entry TOCEntry) GetContentFilepath(toc *AtlasTOC) string {
relLink := entry.GetRelLink()
if relLink == "" {
ExitIfError(NewFormatedError("Link not found for %s", entry.ID))
}
return fmt.Sprintf("%s/%s/%s", toc.Version.VersionURL, toc.Deliverable, relLink)
}
// downloadContent will download the html file for a given entry
func downloadContent(entry TOCEntry, toc *AtlasTOC, wg *sync.WaitGroup) {
defer wg.Done()
filePath := entry.GetContentFilepath(toc)
filePath := entry.GetContentFilepath(toc, true)
// Prepend build dir
filePath = filepath.Join(buildDir, filePath)
// Make sure file doesn't exist first
if _, err := os.Stat(filePath); os.IsNotExist(err) {
content, err := entry.GetContent(toc)
@ -461,53 +257,66 @@ func downloadContent(entry TOCEntry, toc *AtlasTOC, wg *sync.WaitGroup) {
ofile, err := os.Create(filePath)
ExitIfError(err)
defer ofile.Close()
_, err = ofile.WriteString("<base href=\"../../\"/>\n" + content.Content)
header := "<meta http-equiv='Content-Type' content='text/html; charset=UTF-8' />" +
"<base href=\"../../\"/>\n"
for _, cssFile := range cssFiles {
header += fmt.Sprintf("<link rel=\"stylesheet\" type=\"text/css\" href=\"%s\">", cssFile)
}
header += "<style>body { padding: 15px; }</style>"
defer func() {
ExitIfError(ofile.Close())
}()
_, err = ofile.WriteString(
header + content.Content,
)
ExitIfError(err)
}
<-throttle
}
/**********************
Database
**********************/
func saveSearchIndex(dbmap *gorp.DbMap, entry TOCEntry, entryType *SupportedType, toc *AtlasTOC) {
if entry.LinkAttr.Href == "" || entryType == nil {
return
func main() {
LogInfo("Starting...")
locale, deliverables, debug := parseFlags()
if debug {
SetLogLevel(DEBUG)
}
relLink := entry.GetContentFilepath(toc)
name := entry.CleanTitle(*entryType)
if entryType.ShowNamespace && len(entryHierarchy) > 0 {
// Show namespace for methods
name = entryHierarchy[len(entryHierarchy)-1] + "." + name
// Download CSS
for _, cssFile := range cssFiles {
throttle <- 1
wg.Add(1)
go downloadCSS(cssFile, &wg)
}
// fmt.Println("Storing: " + name)
// Download icon
go downloadFile("https://developer.salesforce.com/resources2/favicon.ico", "icon.ico", nil)
si := SearchIndex{
Name: name,
Type: entryType.TypeName,
Path: relLink,
// Init the Sqlite db
dbmap = InitDb(buildDir)
err := dbmap.TruncateTables()
ExitIfError(err)
for _, deliverable := range deliverables {
toc, err := getTOC(locale, deliverable)
err = verifyVersion(toc)
WarnIfError(err)
saveMainContent(toc)
saveContentVersion(toc)
// Download each entry
for _, entry := range toc.TOCEntries {
entryType, err := lookupEntryType(entry)
if err == nil {
processEntryReference(entry, entryType, toc)
}
processChildReferences(entry, entryType, toc)
}
printSuccess(toc)
}
dbmap.Insert(&si)
}
func initDb() *gorp.DbMap {
db, err := sql.Open("sqlite3", "docSet.dsidx")
ExitIfError(err)
dbmap := &gorp.DbMap{Db: db, Dialect: gorp.SqliteDialect{}}
dbmap.AddTableWithName(SearchIndex{}, "searchIndex").SetKeys(true, "ID")
err = dbmap.CreateTablesIfNotExists()
ExitIfError(err)
err = dbmap.TruncateTables()
ExitIfError(err)
return dbmap
wg.Wait()
}

235
SFDashC/structs.go Normal file
View File

@ -0,0 +1,235 @@
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strings"
)
// JSON Structs
// AtlasTOC represents the meta documenation from Salesforce
type AtlasTOC struct {
AvailableVersions []VersionInfo `json:"available_versions"`
Content string
ContentDocumentID string `json:"content_document_id"`
Deliverable string
DocTitle string `json:"doc_title"`
Locale string
Language LanguageInfo
PDFUrl string `json:"pdf_url"`
TOCEntries []TOCEntry `json:"toc"`
Title string
Version VersionInfo
}
// LanguageInfo contains information for linking and displaying the language
type LanguageInfo struct {
Label string
Locale string
URL string
}
// VersionInfo representes a Salesforce documentation version
type VersionInfo struct {
DocVersion string `json:"doc_version"`
ReleaseVersion string `json:"release_version"`
VersionText string `json:"version_text"`
VersionURL string `json:"version_url"`
}
// TOCEntry represents a single Table of Contents item
type TOCEntry struct {
Text string
ID string
LinkAttr LinkAttr `json:"a_attr,omitempty"`
Children []TOCEntry
ComputedFirstTopic bool
ComputedResetPageLayout bool
}
// LinkAttr represents all attributes bound to a link
type LinkAttr struct {
Href string
}
// TOCContent contains content information for a piece of documenation
type TOCContent struct {
ID string
Title string
Content string
}
// SupportedType contains information for generating indexes for types we care about
type SupportedType struct {
// Exact match against an id
ID string
// Match against a prefix for the id
IDPrefix string
// Match against a prefix for the title
TitlePrefix string
// Match against a suffix for the title
TitleSuffix string
// Override Title
TitleOverride string
// Docset type
TypeName string
// Not sure...
AppendParents bool
// Skip trimming of suffix from title
NoTrim bool
// Not sure...
ParseContent bool
// Should this name be pushed int othe path for child entries Eg. Class name prefix methods
PushName bool
// Should a namspace be prefixed to the database entry
ShowNamespace bool
// Indicates that this just contains other nodes and we don't want to index this node
// This type will cascade down one level, but IsContainer itself is not hereditary
IsContainer bool
// Indicates that this and all nodes underneith should be hidden
IsHidden bool
// Should cascade type downwards unless the child has it's own type
CascadeType bool
// Should cascade type downwards, even if children have their own type
ForceCascadeType bool
}
// Sqlite Struct
// SearchIndex is the database table that indexes the docs
type SearchIndex struct {
ID int64 `db:id`
Name string `db:name`
Type string `db:type`
Path string `db:path`
}
// matchesTitle returns true if the title matches that of the specified type
func (suppType SupportedType) matchesTitle(title string) bool {
match := false
match = match || (suppType.TitlePrefix != "" &&
strings.HasPrefix(title, suppType.TitlePrefix))
match = match || (suppType.TitleSuffix != "" &&
strings.HasSuffix(title, suppType.TitleSuffix))
return match
}
// matchesID returns true if the ID matches that of the specified type
func (suppType SupportedType) matchesID(id string) bool {
if suppType.ID != "" && suppType.ID == id {
return true
}
if suppType.IDPrefix != "" {
return strings.HasPrefix(id, suppType.IDPrefix)
}
return false
}
// ShouldCascade returns if this type should be cascaded down to the child
func (suppType SupportedType) ShouldCascade() bool {
return suppType.ForceCascadeType || suppType.CascadeType || suppType.IsContainer
}
// CreateChildType returns a child type inheriting the current type
func (suppType SupportedType) CreateChildType() SupportedType {
// Reset values that do not cascade
suppType.IsContainer = false
return suppType
}
func (suppType SupportedType) ShouldSkipIndex() bool {
return suppType.IsContainer || suppType.IsHidden
}
// IsValidType returns whether or not this is a valid type
func (suppType SupportedType) IsValidType() bool {
return suppType.TypeName != ""
}
// IsType indicates that the TOCEntry is of a given SupportedType
// This is done by checking the suffix of the entry text
func (entry TOCEntry) IsType(t SupportedType) bool {
return t.matchesTitle(entry.Text) || t.matchesID(entry.ID)
}
// CleanTitle trims known suffix from TOCEntry titles
func (entry TOCEntry) CleanTitle(t SupportedType) string {
if t.TitleOverride != "" {
return t.TitleOverride
}
if t.NoTrim {
return entry.Text
}
return strings.TrimSuffix(entry.Text, " "+t.TitleSuffix)
}
// GetRelLink extracts only the relative link from the Link Href
func (entry TOCEntry) GetRelLink(removeAnchor bool) (relLink string) {
if entry.LinkAttr.Href == "" {
return
}
// Get the JSON file
relLink = entry.LinkAttr.Href
if removeAnchor {
anchorIndex := strings.LastIndex(relLink, "#")
if anchorIndex > 0 {
relLink = relLink[0:anchorIndex]
}
}
return
}
// GetContent retrieves Content for this TOCEntry from the API
func (entry TOCEntry) GetContent(toc *AtlasTOC) (content *TOCContent, err error) {
relLink := entry.GetRelLink(true)
if relLink == "" {
return
}
url := fmt.Sprintf(
"https://developer.salesforce.com/docs/get_document_content/%s/%s/%s/%s",
toc.Deliverable,
relLink,
toc.Locale,
toc.Version.DocVersion,
)
resp, err := http.Get(url)
if err != nil {
return
}
// Read the downloaded JSON
defer func() {
ExitIfError(resp.Body.Close())
}()
contents, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
// Load into Struct
content = new(TOCContent)
err = json.Unmarshal([]byte(contents), content)
if err != nil {
fmt.Println("Error reading JSON")
fmt.Println(resp.Status)
fmt.Println(url)
fmt.Println(string(contents))
return
}
return
}
// GetContentFilepath returns the filepath that should be used for the content
func (entry TOCEntry) GetContentFilepath(toc *AtlasTOC, removeAnchor bool) string {
relLink := entry.GetRelLink(removeAnchor)
if relLink == "" {
ExitIfError(NewFormatedError("Link not found for %s", entry.ID))
}
return fmt.Sprintf("atlas.%s.%s.meta/%s/%s", toc.Locale, toc.Deliverable, toc.Deliverable, relLink)
}

296
SFDashC/supportedtypes.go Normal file
View File

@ -0,0 +1,296 @@
package main
var SupportedTypes = []SupportedType{
// ID Based overrides should come first
SupportedType{
ID: "ref_tag_set_attr_intf",
TypeName: "Guide",
},
SupportedType{
ID: "namespaces_intro",
TypeName: "Guide",
},
SupportedType{
ID: "namespaces_using_organization",
TypeName: "Guide",
},
SupportedType{
TypeName: "Guide",
ID: "apex_intro_get_started",
CascadeType: true,
},
SupportedType{
TypeName: "Guide",
ID: "pages_flows_customize_runtime_ui",
},
SupportedType{
TypeName: "Guide",
ID: "pages_quick_start_controller_shell",
},
SupportedType{
TypeName: "Guide",
ID: "pages_email_custom_controller",
},
SupportedType{
TypeName: "Guide",
IDPrefix: "apex_qs_",
CascadeType: true,
},
SupportedType{
TypeName: "Guide",
ID: "apex_process_plugin_using",
},
SupportedType{
TypeName: "Guide",
ID: "apex_platform_cache_builder",
},
SupportedType{
TypeName: "Guide",
ID: "apex_classes_restful_http_testing_httpcalloutmock",
},
SupportedType{
TypeName: "Guide",
ID: "apex_classes_namespaces_and_invoking_methods",
},
SupportedType{
TypeName: "Guide",
ID: "apex_classes_schema_namespace_using",
},
// Apex types
SupportedType{
TypeName: "Method",
TitleSuffix: "Methods",
AppendParents: true,
IsContainer: true,
ShowNamespace: true,
},
SupportedType{
TypeName: "Constructor",
TitleSuffix: "Constructors",
AppendParents: true,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Class",
TitleSuffix: "Class",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Namespace",
TitleSuffix: "Namespace",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Interface",
TitleSuffix: "Global Interface",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Interface",
TitleSuffix: "Interface",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Statement",
ID: "langCon_apex_SOQL_query_all_rows",
TitleOverride: "ALL ROWS",
},
SupportedType{
TypeName: "Statement",
TitleSuffix: "Statement",
ShowNamespace: false,
},
SupportedType{
TypeName: "Enum",
TitleSuffix: "Enum",
AppendParents: true,
ShowNamespace: true,
},
SupportedType{
TypeName: "Property",
TitleSuffix: "Properties",
AppendParents: true,
IsContainer: true,
ShowNamespace: true,
},
SupportedType{
TypeName: "Guide",
TitleSuffix: "Example Implementation",
NoTrim: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Statement",
TitleSuffix: "Statements",
NoTrim: true,
AppendParents: false,
IsContainer: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Field",
TitleSuffix: "Fields",
AppendParents: true,
PushName: true,
IsContainer: true,
ShowNamespace: true,
},
SupportedType{
TypeName: "Exception",
TitleSuffix: "Exceptions",
NoTrim: true,
AppendParents: true,
ShowNamespace: false,
ParseContent: true,
},
SupportedType{
TypeName: "Constant",
TitleSuffix: "Constants",
NoTrim: true,
AppendParents: true,
ShowNamespace: false,
ParseContent: true,
},
SupportedType{
TypeName: "Class",
TitleSuffix: "Class (Base Email Methods)",
PushName: true,
AppendParents: true,
ShowNamespace: false,
},
SupportedType{
TypeName: "Guide",
TitlePrefix: "Best Practices",
TitleSuffix: "Best Practices",
NoTrim: true,
PushName: false,
AppendParents: false,
ShowNamespace: false,
},
// VF Types
SupportedType{
IDPrefix: "pages_compref_",
TypeName: "Tag",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_maps",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_dynamic_vf",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_comp_cust",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_resources",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_controller",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_styling",
NoTrim: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_security",
NoTrim: true,
},
SupportedType{
TypeName: "Variables",
TitleSuffix: "Global Variables",
NoTrim: true,
AppendParents: true,
ShowNamespace: false,
ParseContent: true,
IsContainer: true,
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_variables_functions",
},
SupportedType{
TypeName: "Guide",
IDPrefix: "pages_variables_operators",
},
// Aurora components
SupportedType{
TypeName: "Tag",
ID: "aura_compref",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Tag",
ID: "ref_messaging",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Interface",
ID: "ref_interfaces",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Event",
ID: "ref_events",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Event",
ID: "ref_events_aura",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Guide",
ID: "debug_intro",
},
SupportedType{
TypeName: "Guide",
ID: "components_using",
},
SupportedType{
TypeName: "Guide",
ID: "components_overview",
},
SupportedType{
TypeName: "Guide",
IDPrefix: "qs_intro",
IsContainer: true,
CascadeType: true,
},
SupportedType{
TypeName: "Guide",
ID: "events_intro",
},
SupportedType{
TypeName: "Guide",
ID: "apps_intro",
IsHidden: true,
CascadeType: true,
},
}

12
go.mod Normal file
View File

@ -0,0 +1,12 @@
module github.com/vividboarder/docset-sfdc/SFDashC
go 1.14
require (
github.com/coopernurse/gorp v1.6.2-0.20141208005834-236e1383df4c
github.com/go-sql-driver/mysql v1.5.0 // indirect
github.com/lib/pq v1.7.0 // indirect
github.com/mattn/go-sqlite3 v1.2.1-0.20170407154627-cf7286f069c3
github.com/ziutek/mymysql v1.5.4 // indirect
golang.org/x/net v0.0.0-20170503120255-feeb485667d1 // indirect
)

12
go.sum Normal file
View File

@ -0,0 +1,12 @@
github.com/coopernurse/gorp v1.6.2-0.20141208005834-236e1383df4c h1:ZPZPlyzvrXG672C6jiMkDqIQXK6ekn7rFAxpTU33Tt4=
github.com/coopernurse/gorp v1.6.2-0.20141208005834-236e1383df4c/go.mod h1:wkfIkQktc4uuBo0kLNE8tMMN9okbsTa2orfZvBaL9F8=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/lib/pq v1.7.0 h1:h93mCPfUSkaul3Ka/VG8uZdmW1uMHDGxzu0NWHuJmHY=
github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/mattn/go-sqlite3 v1.2.1-0.20170407154627-cf7286f069c3 h1:e5KQ62MmaYQ0RqL4So5ssEDgNuVoXed4lfuhLBxsVWI=
github.com/mattn/go-sqlite3 v1.2.1-0.20170407154627-cf7286f069c3/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs=
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
golang.org/x/net v0.0.0-20170503120255-feeb485667d1 h1:ED1ux2Gd2RrNOJ0GyENgOJEhK1yAo6Xx/+CNrzZlcig=
golang.org/x/net v0.0.0-20170503120255-feeb485667d1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=

View File

@ -0,0 +1,13 @@
Salesforce DOCSET_NAME
===============
Created by [ViViDboarder](https://github.com/ViViDboarder)
## Generation
* Clone the repo for [docset-sfdc](https://github.com/ViViDboarder/docset-sfdc)
* Run `make` as described in the readme
## Dependencies
To avoid redundancy, those are listed on the readme for the other repo

18
resources/Info-Apex.plist Normal file
View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce Apex</string>
<key>CFBundleName</key>
<string>Salesforce Apex</string>
<key>DocSetPlatformFamily</key>
<string>apex</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>apexcode.html</string>
<key>DashDocSetFallbackURL</key>
<string>https://developer.salesforce.com/docs/</string>
</dict>
</plist>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce Lightning</string>
<key>CFBundleName</key>
<string>Salesforce Lightning</string>
<key>DocSetPlatformFamily</key>
<string>lightning</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>lightning.html</string>
<key>DashDocSetFallbackURL</key>
<string>https://developer.salesforce.com/docs/</string>
</dict>
</plist>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>Salesforce Visualforce</string>
<key>CFBundleName</key>
<string>Salesforce Visualforce</string>
<key>DocSetPlatformFamily</key>
<string>vf</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>pages.html</string>
<key>DashDocSetFallbackURL</key>
<string>https://developer.salesforce.com/docs/</string>
</dict>
</plist>

BIN
resources/bolt-icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
resources/bolt-icon@2x.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

BIN
resources/cloud-icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
resources/cloud-icon@2x.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@ -0,0 +1,15 @@
{
"name": "Salesforce Apex",
"version": "VERSION",
"archive": "Salesforce_Apex.tgz",
"author": {
"name": "ViViDboarder",
"link": "https://github.com/ViViDboarder"
},
"aliases": [
"apex",
"salesforce",
"sfdc"
],
"specific_versions": []
}

View File

@ -0,0 +1,15 @@
{
"name": "Salesforce Lightning",
"version": "VERSION",
"archive": "Salesforce_Lightning.tgz",
"author": {
"name": "ViViDboarder",
"link": "https://github.com/ViViDboarder"
},
"aliases": [
"lightning",
"salesforce",
"sfdc"
],
"specific_versions": []
}

View File

@ -0,0 +1,15 @@
{
"name": "Salesforce Visualforce",
"version": "VERSION",
"archive": "Salesforce_Visualforce.tgz",
"author": {
"name": "ViViDboarder",
"link": "https://github.com/ViViDboarder"
},
"aliases": [
"visualforce",
"salesforce",
"sfdc"
],
"specific_versions": []
}

62
scripts/archive-docset.sh Executable file
View File

@ -0,0 +1,62 @@
#! /bin/bash
set -e
files_dir=./resources
build_dir=./build
out_dir=.
archive_dir=./archive
deliverable=$1
function get_friendly_name {
local deliverable=$1
local name
name="$(tr '[:lower:]' '[:upper:]' <<< "${deliverable:0:1}")${deliverable:1}"
case "$deliverable" in
"apexcode")
name="Apex"
;;
"pages")
name="Visualforce"
;;
esac
echo $name
}
function get_icon_name {
local icon="cloud-icon"
case "$1" in
"lightning")
icon="bolt-icon"
;;
esac
echo $icon
}
function main {
local name
name=$(get_friendly_name "$deliverable")
local package="$out_dir/Salesforce $name.docset"
local archive_dir="$archive_dir/Salesforce_$name"
local archive="$archive_dir/Salesforce_$name.tgz"
local icon
icon=$(get_icon_name "$deliverable")
mkdir -p "$archive_dir"
# Generate docset.json
local version
version=$(cat "$build_dir/$deliverable-version.txt")
sed "s/VERSION/$version/" "$files_dir/docset-$deliverable.json" > "$archive_dir/docset.json"
# Generated tgz archive
tar --exclude=".DS_Store" -czf "$archive" "$package"
# Copy icons
cp "$files_dir/$icon.png" "$archive_dir/icon.png"
cp "$files_dir/$icon@2x.png" "$archive_dir/icon@2x.png"
# Copy readme
sed "s/DOCSET_NAME/$name/" "$files_dir/Archive_Readme.md" > "$archive_dir/README.md"
echo "Finished archive $archive"
}
main

124
scripts/create-pr.sh Executable file
View File

@ -0,0 +1,124 @@
#! /bin/bash
set -eu
set -o pipefail
if ! git config --global user.name ;then
# If no global git configs exist, let's set some temporary values
export GIT_COMMITTER_NAME="${GIT_COMMITTER_NAME:-"ViViDboarder"}"
export GIT_COMMITTER_EMAIL="${GIT_COMMITTER_EMAIL:-"ViViDboarder@gmail.com"}"
export GIT_AUTHOR_NAME="$GIT_COMMITTER_NAME"
export GIT_AUTHOR_EMAIL="$GIT_COMMITTER_EMAIL"
fi
# Get name of the fork and target repo
FORK_REPO="${FORK_REPO:-"ViViDboarder/Dash-User-Contributions"}"
TARGET_REPO="${TARGET_REPO:-"ViViDboarder/Dash-User-Contributions"}"
# If no github user is provided, take it from the fork name
if [ -z "${GITHUB_USER:-""}" ]; then
GITHUB_USER="${FORK_REPO%%/*}"
fi
GITHUB_TOKEN="${GITHUB_TOKEN:-default}"
WORKDIR=$(pwd)
TMP_DIR="$WORKDIR/repotmp"
REPO_DIR="$TMP_DIR/Dash-User-Contributions"
function validate() {
if [ -z "$GITHUB_TOKEN" ]; then
echo "Must provide \$GITHUB_TOKEN as an environment variable"
exit 1
fi
echo "Creating PR for $GITHUB_USER to $TARGET_REPO"
}
function read_version() {
local apex_version
apex_version="$(cat ./build/apexcode-version.txt)"
local pages_version
pages_version="$(cat ./build/pages-version.txt)"
local lightning_version
lightning_version="$(cat ./build/lightning-version.txt)"
if [ "$apex_version" != "$pages_version" ] || [ "$apex_version" != "$lightning_version" ]; then
echo "Apex: $apex_version, Pages: $pages_version, Lightning: $lightning_version"
echo "One of the doc versions doesn't match"
exit 1
fi
# All versions match, return one of them
echo "$apex_version"
}
function workdir_git() {
cd "$REPO_DIR"
if ! git "$@" ;then
# Be sure to return to workdir after a failed git command
cd "$WORKDIR"
return 1
fi
cd "$WORKDIR"
}
function shallow_clone_or_pull() {
mkdir -p "$TMP_DIR"
if [ -d "$REPO_DIR" ]; then
workdir_git checkout master
workdir_git pull --ff-only origin master
else
git clone --depth 1 "https://$GITHUB_USER:$GITHUB_TOKEN@github.com/$FORK_REPO" "$REPO_DIR"
fi
}
function copy_release() {
cp -r archive/* "$REPO_DIR/docsets/"
}
function create_release_branch() {
local branch="$1"
if ! workdir_git checkout -b "$branch" ; then
echo "Could not create release branch. Release likely already exists."
exit 1
fi
}
function create_pr() {
local version="$1"
local branch="$2"
local title="Update Salesforce docsets to $version"
workdir_git checkout "$branch"
workdir_git add .
workdir_git commit -m "$title"
workdir_git push origin HEAD
local result
result=$(curl \
-u "$GITHUB_USER:$GITHUB_TOKEN" \
-X POST \
-H "Accept: application/vnd.github.v3+json" \
"https://api.github.com/repos/$TARGET_REPO/pulls" \
-d "{\"title\":\"$title\",\"body\":\"This branch contains auto-generated updates to version $version\", \"head\":\"$GITHUB_USER:$branch\",\"base\":\"master\"}")
local result_url
if result_url="$(echo "$result" | jq --exit-status --raw-output .html_url)" ;then
echo "Pull request created at $result_url"
else
echo "$result"
exit 1
fi
}
function main() {
validate
local version
version="$(read_version)" || { echo "$version"; exit 1; }
local branch="salesforce-$version"
shallow_clone_or_pull
copy_release
create_release_branch "$branch"
create_pr "$version" "$branch"
}
main

57
scripts/package-docset.sh Executable file
View File

@ -0,0 +1,57 @@
#! /bin/bash
set -e
files_dir=./resources
build_dir=./build
out_dir=.
deliverable=$1
function get-friendly-name {
local deliverable=$1
local name="$(tr '[:lower:]' '[:upper:]' <<< ${deliverable:0:1})${deliverable:1}"
case "$deliverable" in
"apexcode")
name="Apex"
;;
"pages")
name="Visualforce"
;;
esac
echo $name
}
function get_icon_name {
local icon="cloud-icon"
case "$1" in
"lightning")
icon="bolt-icon"
;;
esac
echo $icon
}
function main {
local name=$(get-friendly-name $deliverable)
local package="$out_dir/Salesforce $name.docset"
local icon=$(get_icon_name $deliverable)
mkdir -p "$package/Contents/Resources/Documents"
# Copy all meta HTML
cp -r $build_dir/atlas.en-us.$deliverable.meta "$package/Contents/Resources/Documents/"
# Copy HTML and CSS
cp $build_dir/$deliverable.html "$package/Contents/Resources/Documents/"
cp $build_dir/*.css "$package/Contents/Resources/Documents/"
# Copy plsit
cp $files_dir/Info-$name.plist "$package/Contents/Info.plist"
# Copy index
cp $build_dir/docSet.dsidx "$package/Contents/Resources/"
# Copy icons
cp "$files_dir/$icon.png" "$package/icon.png"
cp "$files_dir/$icon@2x.png" "$package/icon@2x.png"
echo "Finished building $package"
}
main