mirror of
https://github.com/hoernschen/dendrite.git
synced 2024-12-27 07:28:27 +00:00
Convert clientapi to using base component
This commit is contained in:
parent
7f1aa47770
commit
0f38a0be8a
1 changed files with 40 additions and 87 deletions
|
@ -15,113 +15,66 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/matrix-org/dendrite/clientapi/auth/storage/accounts"
|
||||
"github.com/matrix-org/dendrite/clientapi/auth/storage/devices"
|
||||
"github.com/matrix-org/dendrite/clientapi/consumers"
|
||||
"github.com/matrix-org/dendrite/clientapi/producers"
|
||||
"github.com/matrix-org/dendrite/clientapi/routing"
|
||||
"github.com/matrix-org/dendrite/common"
|
||||
"github.com/matrix-org/dendrite/common/config"
|
||||
"github.com/matrix-org/dendrite/common/basecomponent"
|
||||
"github.com/matrix-org/dendrite/common/keydb"
|
||||
"github.com/matrix-org/dendrite/roomserver/api"
|
||||
|
||||
"github.com/matrix-org/gomatrixserverlib"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
sarama "gopkg.in/Shopify/sarama.v1"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
logDir = os.Getenv("LOG_DIR")
|
||||
configPath = flag.String("config", "dendrite.yaml", "The path to the config file, For more information see the config file in this repository")
|
||||
)
|
||||
|
||||
func main() {
|
||||
common.SetupLogging(logDir)
|
||||
|
||||
flag.Parse()
|
||||
|
||||
cfg, err := config.Load(*configPath)
|
||||
if err != nil {
|
||||
log.Fatalf("Invalid config file: %s", err)
|
||||
}
|
||||
|
||||
closer, err := cfg.SetupTracing("DendriteClientAPI")
|
||||
if err != nil {
|
||||
log.WithError(err).Fatalf("Failed to start tracer")
|
||||
}
|
||||
defer closer.Close() // nolint: errcheck
|
||||
|
||||
queryAPI := api.NewRoomserverQueryAPIHTTP(cfg.RoomServerURL(), nil)
|
||||
aliasAPI := api.NewRoomserverAliasAPIHTTP(cfg.RoomServerURL(), nil)
|
||||
inputAPI := api.NewRoomserverInputAPIHTTP(cfg.RoomServerURL(), nil)
|
||||
|
||||
roomserverProducer := producers.NewRoomserverProducer(inputAPI)
|
||||
|
||||
kafkaProducer, err := sarama.NewSyncProducer(cfg.Kafka.Addresses, nil)
|
||||
if err != nil {
|
||||
log.WithFields(log.Fields{
|
||||
log.ErrorKey: err,
|
||||
"addresses": cfg.Kafka.Addresses,
|
||||
}).Panic("Failed to setup kafka producers")
|
||||
}
|
||||
// SetupClientAPIComponent sets up and registers HTTP handlers for the ClientAPI
|
||||
// component.
|
||||
func SetupClientAPIComponent(
|
||||
base *basecomponent.BaseDendrite,
|
||||
deviceDB *devices.Database,
|
||||
accountsDB *accounts.Database,
|
||||
federation *gomatrixserverlib.FederationClient,
|
||||
keyRing *gomatrixserverlib.KeyRing,
|
||||
) {
|
||||
roomserverProducer := producers.NewRoomserverProducer(base.InputAPI())
|
||||
|
||||
userUpdateProducer := &producers.UserUpdateProducer{
|
||||
Producer: kafkaProducer,
|
||||
Topic: string(cfg.Kafka.Topics.UserUpdates),
|
||||
Producer: base.KafkaProducer,
|
||||
Topic: string(base.Cfg.Kafka.Topics.UserUpdates),
|
||||
}
|
||||
|
||||
syncProducer := &producers.SyncAPIProducer{
|
||||
Producer: kafkaProducer,
|
||||
Topic: string(cfg.Kafka.Topics.OutputClientData),
|
||||
Producer: base.KafkaProducer,
|
||||
Topic: string(base.Cfg.Kafka.Topics.OutputClientData),
|
||||
}
|
||||
|
||||
federation := gomatrixserverlib.NewFederationClient(
|
||||
cfg.Matrix.ServerName, cfg.Matrix.KeyID, cfg.Matrix.PrivateKey,
|
||||
consumer := consumers.NewOutputRoomEventConsumer(
|
||||
base.Cfg, base.KafkaConsumer, accountsDB, base.QueryAPI(),
|
||||
)
|
||||
|
||||
accountDB, err := accounts.NewDatabase(string(cfg.Database.Account), cfg.Matrix.ServerName)
|
||||
if err != nil {
|
||||
log.Panicf("Failed to setup account database(%q): %s", cfg.Database.Account, err.Error())
|
||||
}
|
||||
deviceDB, err := devices.NewDatabase(string(cfg.Database.Device), cfg.Matrix.ServerName)
|
||||
if err != nil {
|
||||
log.Panicf("Failed to setup device database(%q): %s", cfg.Database.Device, err.Error())
|
||||
}
|
||||
keyDB, err := keydb.NewDatabase(string(cfg.Database.ServerKey))
|
||||
if err != nil {
|
||||
log.Panicf("Failed to setup key database(%q): %s", cfg.Database.ServerKey, err.Error())
|
||||
if err := consumer.Start(); err != nil {
|
||||
logrus.WithError(err).Panicf("failed to start room server consumer")
|
||||
}
|
||||
|
||||
keyRing := keydb.CreateKeyRing(federation.Client, keyDB)
|
||||
|
||||
kafkaConsumer, err := sarama.NewConsumer(cfg.Kafka.Addresses, nil)
|
||||
if err != nil {
|
||||
log.WithFields(log.Fields{
|
||||
log.ErrorKey: err,
|
||||
"addresses": cfg.Kafka.Addresses,
|
||||
}).Panic("Failed to setup kafka consumers")
|
||||
}
|
||||
|
||||
consumer := consumers.NewOutputRoomEventConsumer(cfg, kafkaConsumer, accountDB, queryAPI)
|
||||
if err = consumer.Start(); err != nil {
|
||||
log.Panicf("startup: failed to start room server consumer")
|
||||
}
|
||||
|
||||
log.Info("Starting client API server on ", cfg.Listen.ClientAPI)
|
||||
|
||||
api := mux.NewRouter()
|
||||
routing.Setup(
|
||||
api, *cfg, roomserverProducer,
|
||||
queryAPI, aliasAPI, accountDB, deviceDB, federation, keyRing,
|
||||
base.APIMux, *base.Cfg, roomserverProducer,
|
||||
base.QueryAPI(), base.AliasAPI(), accountsDB, deviceDB,
|
||||
federation, *keyRing,
|
||||
userUpdateProducer, syncProducer,
|
||||
)
|
||||
common.SetupHTTPAPI(http.DefaultServeMux, common.WrapHandlerInCORS(api))
|
||||
|
||||
log.Fatal(http.ListenAndServe(string(cfg.Listen.ClientAPI), nil))
|
||||
}
|
||||
|
||||
func main() {
|
||||
base := basecomponent.NewBaseDendrite("ClientAPI")
|
||||
defer base.Close() // nolint: errcheck
|
||||
|
||||
accountDB := base.CreateAccountsDB()
|
||||
deviceDB := base.CreateDeviceDB()
|
||||
keyDB := base.CreateKeyDB()
|
||||
federation := base.CreateFederationClient()
|
||||
keyRing := keydb.CreateKeyRing(federation.Client, keyDB)
|
||||
|
||||
SetupClientAPIComponent(
|
||||
base, deviceDB, accountDB, federation, &keyRing,
|
||||
)
|
||||
|
||||
base.SetupAndServeHTTP(string(base.Cfg.Listen.ClientAPI))
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue