Add gosimple linter (#242)

This commit is contained in:
Erik Johnston 2017-09-20 14:15:38 +01:00 committed by GitHub
parent b72142ace5
commit cc2f755cb3
16 changed files with 28 additions and 66 deletions

View file

@ -16,6 +16,7 @@
"misspell", "misspell",
"unparam", "unparam",
"errcheck", "errcheck",
"vet" "vet",
"gosimple"
] ]
} }

View file

@ -67,10 +67,7 @@ func (d *Database) CreateDevice(
} }
dev, err = d.devices.insertDevice(ctx, txn, deviceID, localpart, accessToken) dev, err = d.devices.insertDevice(ctx, txn, deviceID, localpart, accessToken)
if err != nil {
return err return err
}
return nil
}) })
return return
} }

View file

@ -45,9 +45,6 @@ func (p *SyncAPIProducer) SendData(userID string, roomID string, dataType string
m.Key = sarama.StringEncoder(userID) m.Key = sarama.StringEncoder(userID)
m.Value = sarama.ByteEncoder(value) m.Value = sarama.ByteEncoder(value)
if _, _, err := p.Producer.SendMessage(&m); err != nil { _, _, err = p.Producer.SendMessage(&m)
return err return err
} }
return nil
}

View file

@ -57,9 +57,6 @@ func (p *UserUpdateProducer) SendUpdate(
} }
m.Value = sarama.ByteEncoder(value) m.Value = sarama.ByteEncoder(value)
if _, _, err := p.Producer.SendMessage(&m); err != nil { _, _, err = p.Producer.SendMessage(&m)
return err return err
} }
return nil
}

View file

@ -116,11 +116,8 @@ func (s *serverKeyStatements) upsertServerKeys(
string(request.ServerName), string(request.KeyID), nameAndKeyID(request), string(request.ServerName), string(request.KeyID), nameAndKeyID(request),
int64(keys.ValidUntilTS), keyJSON, int64(keys.ValidUntilTS), keyJSON,
) )
if err != nil {
return err return err
} }
return nil
}
func nameAndKeyID(request gomatrixserverlib.PublicKeyRequest) string { func nameAndKeyID(request gomatrixserverlib.PublicKeyRequest) string {
return string(request.ServerName) + "\x1F" + string(request.KeyID) return string(request.ServerName) + "\x1F" + string(request.KeyID)

View file

@ -135,19 +135,16 @@ func NewMatrixKey(matrixKeyPath string) (err error) {
err = keyOut.Close() err = keyOut.Close()
})() })()
if err = pem.Encode(keyOut, &pem.Block{ err = pem.Encode(keyOut, &pem.Block{
Type: "MATRIX PRIVATE KEY", Type: "MATRIX PRIVATE KEY",
Headers: map[string]string{ Headers: map[string]string{
"Key-ID": "ed25519:" + base64.RawStdEncoding.EncodeToString(data[:3]), "Key-ID": "ed25519:" + base64.RawStdEncoding.EncodeToString(data[:3]),
}, },
Bytes: data[3:], Bytes: data[3:],
}); err != nil { })
return err return err
} }
return nil
}
const certificateDuration = time.Hour * 24 * 365 * 10 const certificateDuration = time.Hour * 24 * 365 * 10
// NewTLSKey generates a new RSA TLS key and certificate and writes it to a file. // NewTLSKey generates a new RSA TLS key and certificate and writes it to a file.
@ -191,12 +188,9 @@ func NewTLSKey(tlsKeyPath, tlsCertPath string) error {
return err return err
} }
defer keyOut.Close() // nolint: errcheck defer keyOut.Close() // nolint: errcheck
if err = pem.Encode(keyOut, &pem.Block{ err = pem.Encode(keyOut, &pem.Block{
Type: "RSA PRIVATE KEY", Type: "RSA PRIVATE KEY",
Bytes: x509.MarshalPKCS1PrivateKey(priv), Bytes: x509.MarshalPKCS1PrivateKey(priv),
}); err != nil { })
return err return err
} }
return nil
}

View file

@ -347,9 +347,5 @@ func fillDisplayName(
// Use the m.room.third_party_invite event to fill the "displayname" and // Use the m.room.third_party_invite event to fill the "displayname" and
// update the m.room.member event's content with it // update the m.room.member event's content with it
content.ThirdPartyInvite.DisplayName = thirdPartyInviteContent.DisplayName content.ThirdPartyInvite.DisplayName = thirdPartyInviteContent.DisplayName
if err := builder.SetContent(content); err != nil { return builder.SetContent(content)
return err
}
return nil
} }

View file

@ -55,7 +55,7 @@ func GetPathFromBase64Hash(base64Hash types.Base64Hash, absBasePath config.Path)
// check if the absolute absBasePath is a prefix of the absolute filePath // check if the absolute absBasePath is a prefix of the absolute filePath
// if so, no directory escape has occurred and the filePath is valid // if so, no directory escape has occurred and the filePath is valid
// Note: absBasePath is already absolute // Note: absBasePath is already absolute
if strings.HasPrefix(filePath, string(absBasePath)) == false { if !strings.HasPrefix(filePath, string(absBasePath)) {
return "", fmt.Errorf("Invalid filePath (not within absBasePath %v): %v", absBasePath, filePath) return "", fmt.Errorf("Invalid filePath (not within absBasePath %v): %v", absBasePath, filePath)
} }

View file

@ -80,11 +80,7 @@ func SelectThumbnail(desired types.ThumbnailSize, thumbnails []*types.ThumbnailM
fitness := calcThumbnailFitness(types.ThumbnailSize(thumbnailSize), nil, desired) fitness := calcThumbnailFitness(types.ThumbnailSize(thumbnailSize), nil, desired)
if isBetter := fitness.betterThan(bestFit, desired.ResizeMethod == "crop"); isBetter { if isBetter := fitness.betterThan(bestFit, desired.ResizeMethod == "crop"); isBetter {
bestFit = fitness bestFit = fitness
chosenThumbnailSize = &types.ThumbnailSize{ chosenThumbnailSize = (*types.ThumbnailSize)(&thumbnailSize)
Width: thumbnailSize.Width,
Height: thumbnailSize.Height,
ResizeMethod: thumbnailSize.ResizeMethod,
}
} }
} }

View file

@ -156,7 +156,7 @@ func createThumbnail(src types.Path, img image.Image, config types.ThumbnailSize
logger.WithFields(log.Fields{ logger.WithFields(log.Fields{
"ActualWidth": width, "ActualWidth": width,
"ActualHeight": height, "ActualHeight": height,
"processTime": time.Now().Sub(start), "processTime": time.Since(start),
}).Info("Generated thumbnail") }).Info("Generated thumbnail")
stat, err := os.Stat(string(dst)) stat, err := os.Stat(string(dst))

View file

@ -155,7 +155,7 @@ func (r *downloadRequest) jsonErrorResponse(w http.ResponseWriter, res util.JSON
// Validate validates the downloadRequest fields // Validate validates the downloadRequest fields
func (r *downloadRequest) Validate() *util.JSONResponse { func (r *downloadRequest) Validate() *util.JSONResponse {
if mediaIDRegex.MatchString(string(r.MediaMetadata.MediaID)) == false { if !mediaIDRegex.MatchString(string(r.MediaMetadata.MediaID)) {
return &util.JSONResponse{ return &util.JSONResponse{
Code: 404, Code: 404,
JSON: jsonerror.NotFound(fmt.Sprintf("mediaId must be a non-empty string using only characters in %v", mediaIDCharacters)), JSON: jsonerror.NotFound(fmt.Sprintf("mediaId must be a non-empty string using only characters in %v", mediaIDCharacters)),
@ -337,7 +337,7 @@ func (r *downloadRequest) getThumbnailFile(
thumbnail, thumbnailSize = thumbnailer.SelectThumbnail(r.ThumbnailSize, thumbnails, thumbnailSizes) thumbnail, thumbnailSize = thumbnailer.SelectThumbnail(r.ThumbnailSize, thumbnails, thumbnailSizes)
// If dynamicThumbnails is true and we are not over-loaded then we would have generated what was requested above. // If dynamicThumbnails is true and we are not over-loaded then we would have generated what was requested above.
// So we don't try to generate a pre-generated thumbnail here. // So we don't try to generate a pre-generated thumbnail here.
if thumbnailSize != nil && dynamicThumbnails == false { if thumbnailSize != nil && !dynamicThumbnails {
r.Logger.WithFields(log.Fields{ r.Logger.WithFields(log.Fields{
"Width": thumbnailSize.Width, "Width": thumbnailSize.Width,
"Height": thumbnailSize.Height, "Height": thumbnailSize.Height,
@ -525,7 +525,7 @@ func (r *downloadRequest) fetchRemoteFileAndStoreMetadata(
// If the file is a duplicate (has the same hash as an existing file) then // If the file is a duplicate (has the same hash as an existing file) then
// there is valid metadata in the database for that file. As such we only // there is valid metadata in the database for that file. As such we only
// remove the file if it is not a duplicate. // remove the file if it is not a duplicate.
if duplicate == false { if !duplicate {
finalDir := filepath.Dir(string(finalPath)) finalDir := filepath.Dir(string(finalPath))
fileutils.RemoveDir(types.Path(finalDir), r.Logger) fileutils.RemoveDir(types.Path(finalDir), r.Logger)
} }

View file

@ -226,7 +226,7 @@ func (r *uploadRequest) storeFileAndMetadata(tmpDir types.Path, absBasePath conf
// If the file is a duplicate (has the same hash as an existing file) then // If the file is a duplicate (has the same hash as an existing file) then
// there is valid metadata in the database for that file. As such we only // there is valid metadata in the database for that file. As such we only
// remove the file if it is not a duplicate. // remove the file if it is not a duplicate.
if duplicate == false { if !duplicate {
fileutils.RemoveDir(types.Path(path.Dir(string(finalPath))), r.Logger) fileutils.RemoveDir(types.Path(path.Dir(string(finalPath))), r.Logger)
} }
return &util.JSONResponse{ return &util.JSONResponse{

View file

@ -102,10 +102,7 @@ func fillPublicRoomsReq(httpReq *http.Request, request *publicRoomReq) *util.JSO
request.Since = httpReq.FormValue("since") request.Since = httpReq.FormValue("since")
return nil return nil
} else if httpReq.Method == "POST" { } else if httpReq.Method == "POST" {
if reqErr := httputil.UnmarshalJSONRequest(httpReq, request); reqErr != nil { return httputil.UnmarshalJSONRequest(httpReq, request)
return reqErr
}
return nil
} }
return &util.JSONResponse{ return &util.JSONResponse{

View file

@ -217,11 +217,7 @@ func (r *RoomserverAliasAPI) sendUpdatedAliasesEvent(
var inputRes api.InputRoomEventsResponse var inputRes api.InputRoomEventsResponse
// Send the request // Send the request
if err := r.InputAPI.InputRoomEvents(ctx, &inputReq, &inputRes); err != nil { return r.InputAPI.InputRoomEvents(ctx, &inputReq, &inputRes)
return err
}
return nil
} }
// SetupHTTP adds the RoomserverAliasAPI handlers to the http.ServeMux. // SetupHTTP adds the RoomserverAliasAPI handlers to the http.ServeMux.

View file

@ -102,8 +102,7 @@ type latestEventsUpdater struct {
} }
func (u *latestEventsUpdater) doUpdateLatestEvents() error { func (u *latestEventsUpdater) doUpdateLatestEvents() error {
var prevEvents []gomatrixserverlib.EventReference prevEvents := u.event.PrevEvents()
prevEvents = u.event.PrevEvents()
oldLatest := u.updater.LatestEvents() oldLatest := u.updater.LatestEvents()
u.lastEventIDSent = u.updater.LastEventIDSent() u.lastEventIDSent = u.updater.LastEventIDSent()
u.oldStateNID = u.updater.CurrentStateSnapshotNID() u.oldStateNID = u.updater.CurrentStateSnapshotNID()
@ -194,11 +193,8 @@ func (u *latestEventsUpdater) latestState() error {
u.stateBeforeEventRemoves, u.stateBeforeEventAdds, err = state.DifferenceBetweeenStateSnapshots( u.stateBeforeEventRemoves, u.stateBeforeEventAdds, err = state.DifferenceBetweeenStateSnapshots(
u.ctx, u.db, u.newStateNID, u.stateAtEvent.BeforeStateSnapshotNID, u.ctx, u.db, u.newStateNID, u.stateAtEvent.BeforeStateSnapshotNID,
) )
if err != nil {
return err return err
} }
return nil
}
func calculateLatest( func calculateLatest(
oldLatest []types.StateAtEventAndReference, oldLatest []types.StateAtEventAndReference,
@ -211,7 +207,7 @@ func calculateLatest(
for _, l := range oldLatest { for _, l := range oldLatest {
keep := true keep := true
for _, prevEvent := range prevEvents { for _, prevEvent := range prevEvents {
if l.EventID == prevEvent.EventID && bytes.Compare(l.EventSHA256, prevEvent.EventSHA256) == 0 { if l.EventID == prevEvent.EventID && bytes.Equal(l.EventSHA256, prevEvent.EventSHA256) {
// This event can be removed from the latest events cause we've found an event that references it. // This event can be removed from the latest events cause we've found an event that references it.
// (If an event is referenced by another event then it can't be one of the latest events in the room // (If an event is referenced by another event then it can't be one of the latest events in the room
// because we have an event that comes after it) // because we have an event that comes after it)

View file

@ -461,9 +461,7 @@ func (d *SyncServerDatabase) fetchMissingStateEvents(
if len(stateEvents) != len(missing) { if len(stateEvents) != len(missing) {
return nil, fmt.Errorf("failed to map all event IDs to events: (got %d, wanted %d)", len(stateEvents), len(missing)) return nil, fmt.Errorf("failed to map all event IDs to events: (got %d, wanted %d)", len(stateEvents), len(missing))
} }
for _, e := range stateEvents { events = append(events, stateEvents...)
events = append(events, e)
}
return events, nil return events, nil
} }