2019-01-04 06:23:50 +08:00
|
|
|
package dlna
|
|
|
|
|
|
|
|
import (
|
2019-08-23 05:20:09 +08:00
|
|
|
"context"
|
2019-01-04 06:23:50 +08:00
|
|
|
"encoding/xml"
|
2021-11-04 18:12:57 +08:00
|
|
|
"errors"
|
2019-01-04 06:23:50 +08:00
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2019-05-27 02:51:22 +08:00
|
|
|
"regexp"
|
2019-08-23 18:51:50 +08:00
|
|
|
"strings"
|
2019-01-04 06:23:50 +08:00
|
|
|
|
|
|
|
"github.com/anacrolix/dms/dlna"
|
|
|
|
"github.com/anacrolix/dms/upnp"
|
2020-02-18 08:13:17 +08:00
|
|
|
"github.com/rclone/rclone/cmd/serve/dlna/upnpav"
|
2019-07-29 01:47:38 +08:00
|
|
|
"github.com/rclone/rclone/fs"
|
|
|
|
"github.com/rclone/rclone/vfs"
|
2019-01-04 06:23:50 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
type contentDirectoryService struct {
|
|
|
|
*server
|
|
|
|
upnp.Eventing
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cds *contentDirectoryService) updateIDString() string {
|
|
|
|
return fmt.Sprintf("%d", uint32(os.Getpid()))
|
|
|
|
}
|
|
|
|
|
2019-05-27 02:51:22 +08:00
|
|
|
var mediaMimeTypeRegexp = regexp.MustCompile("^(video|audio|image)/")
|
|
|
|
|
2019-01-04 06:23:50 +08:00
|
|
|
// Turns the given entry and DMS host into a UPnP object. A nil object is
|
|
|
|
// returned if the entry is not of interest.
|
2019-09-23 07:58:24 +08:00
|
|
|
func (cds *contentDirectoryService) cdsObjectToUpnpavObject(cdsObject object, fileInfo vfs.Node, resources vfs.Nodes, host string) (ret interface{}, err error) {
|
2019-01-04 06:23:50 +08:00
|
|
|
obj := upnpav.Object{
|
|
|
|
ID: cdsObject.ID(),
|
|
|
|
Restricted: 1,
|
|
|
|
ParentID: cdsObject.ParentID(),
|
|
|
|
}
|
|
|
|
|
|
|
|
if fileInfo.IsDir() {
|
2020-06-16 21:56:02 +08:00
|
|
|
defaultChildCount := 1
|
2019-01-04 06:23:50 +08:00
|
|
|
obj.Class = "object.container.storageFolder"
|
|
|
|
obj.Title = fileInfo.Name()
|
2019-09-16 01:33:45 +08:00
|
|
|
return upnpav.Container{
|
2020-06-16 21:56:02 +08:00
|
|
|
Object: obj,
|
|
|
|
ChildCount: &defaultChildCount,
|
2019-09-16 01:33:45 +08:00
|
|
|
}, nil
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if !fileInfo.Mode().IsRegular() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-08-23 05:20:09 +08:00
|
|
|
// Read the mime type from the fs.Object if possible,
|
|
|
|
// otherwise fall back to working out what it is from the file path.
|
|
|
|
var mimeType string
|
|
|
|
if o, ok := fileInfo.DirEntry().(fs.Object); ok {
|
|
|
|
mimeType = fs.MimeType(context.TODO(), o)
|
2023-09-01 22:12:44 +08:00
|
|
|
// If backend doesn't know what the mime type is then
|
|
|
|
// try getting it from the file name
|
|
|
|
if mimeType == "application/octet-stream" {
|
|
|
|
mimeType = fs.MimeTypeFromName(fileInfo.Name())
|
|
|
|
}
|
2019-08-23 05:20:09 +08:00
|
|
|
} else {
|
|
|
|
mimeType = fs.MimeTypeFromName(fileInfo.Name())
|
|
|
|
}
|
|
|
|
|
2019-05-27 02:51:22 +08:00
|
|
|
mediaType := mediaMimeTypeRegexp.FindStringSubmatch(mimeType)
|
|
|
|
if mediaType == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
obj.Class = "object.item." + mediaType[1] + "Item"
|
2019-01-04 06:23:50 +08:00
|
|
|
obj.Title = fileInfo.Name()
|
2020-02-18 08:49:35 +08:00
|
|
|
obj.Date = upnpav.Timestamp{Time: fileInfo.ModTime()}
|
2019-01-04 06:23:50 +08:00
|
|
|
|
|
|
|
item := upnpav.Item{
|
|
|
|
Object: obj,
|
|
|
|
Res: make([]upnpav.Resource, 0, 1),
|
|
|
|
}
|
|
|
|
|
|
|
|
item.Res = append(item.Res, upnpav.Resource{
|
|
|
|
URL: (&url.URL{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: host,
|
2019-10-07 23:25:02 +08:00
|
|
|
Path: path.Join(resPath, cdsObject.Path),
|
2019-01-04 06:23:50 +08:00
|
|
|
}).String(),
|
2019-05-27 02:51:22 +08:00
|
|
|
ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{
|
2019-01-04 06:23:50 +08:00
|
|
|
SupportRange: true,
|
|
|
|
}.String()),
|
2019-09-23 07:58:24 +08:00
|
|
|
Size: uint64(fileInfo.Size()),
|
2019-01-04 06:23:50 +08:00
|
|
|
})
|
|
|
|
|
2019-09-23 07:58:24 +08:00
|
|
|
for _, resource := range resources {
|
|
|
|
subtitleURL := (&url.URL{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: host,
|
2019-10-07 23:25:02 +08:00
|
|
|
Path: path.Join(resPath, resource.Path()),
|
2019-09-23 07:58:24 +08:00
|
|
|
}).String()
|
2023-12-20 18:00:08 +08:00
|
|
|
|
|
|
|
// Read the mime type from the fs.Object if possible,
|
|
|
|
// otherwise fall back to working out what it is from the file path.
|
|
|
|
var mimeType string
|
|
|
|
if o, ok := resource.DirEntry().(fs.Object); ok {
|
|
|
|
mimeType = fs.MimeType(context.TODO(), o)
|
|
|
|
// If backend doesn't know what the mime type is then
|
|
|
|
// try getting it from the file name
|
|
|
|
if mimeType == "application/octet-stream" {
|
|
|
|
mimeType = fs.MimeTypeFromName(resource.Name())
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
mimeType = fs.MimeTypeFromName(resource.Name())
|
|
|
|
}
|
|
|
|
|
2019-09-23 07:58:24 +08:00
|
|
|
item.Res = append(item.Res, upnpav.Resource{
|
|
|
|
URL: subtitleURL,
|
2023-12-20 18:00:08 +08:00
|
|
|
ProtocolInfo: fmt.Sprintf("http-get:*:%s:*", mimeType),
|
2019-09-23 07:58:24 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-01-04 06:23:50 +08:00
|
|
|
ret = item
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns all the upnpav objects in a directory.
|
|
|
|
func (cds *contentDirectoryService) readContainer(o object, host string) (ret []interface{}, err error) {
|
|
|
|
node, err := cds.vfs.Stat(o.Path)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if !node.IsDir() {
|
|
|
|
err = errors.New("not a directory")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
dir := node.(*vfs.Dir)
|
|
|
|
dirEntries, err := dir.ReadDirAll()
|
|
|
|
if err != nil {
|
|
|
|
err = errors.New("failed to list directory")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-24 21:23:14 +08:00
|
|
|
// if there's a "Subs" child directory, add its children to the list as well,
|
|
|
|
// so mediaWithResources is able to find them.
|
|
|
|
for _, node := range dirEntries {
|
|
|
|
if strings.EqualFold(node.Name(), "Subs") && node.IsDir() {
|
|
|
|
subtitleDir := node.(*vfs.Dir)
|
|
|
|
subtitleEntries, err := subtitleDir.ReadDirAll()
|
|
|
|
if err != nil {
|
|
|
|
err = errors.New("failed to list subtitle directory")
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
dirEntries = append(dirEntries, subtitleEntries...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-07 23:25:02 +08:00
|
|
|
dirEntries, mediaResources := mediaWithResources(dirEntries)
|
2019-01-04 06:23:50 +08:00
|
|
|
for _, de := range dirEntries {
|
|
|
|
child := object{
|
|
|
|
path.Join(o.Path, de.Name()),
|
|
|
|
}
|
2019-10-07 23:25:02 +08:00
|
|
|
obj, err := cds.cdsObjectToUpnpavObject(child, de, mediaResources[de], host)
|
2019-01-04 06:23:50 +08:00
|
|
|
if err != nil {
|
2019-05-27 02:51:22 +08:00
|
|
|
fs.Errorf(cds, "error with %s: %s", child.FilePath(), err)
|
2019-01-04 06:23:50 +08:00
|
|
|
continue
|
|
|
|
}
|
2019-05-27 02:51:22 +08:00
|
|
|
if obj == nil {
|
|
|
|
fs.Debugf(cds, "unrecognized file type: %s", de)
|
|
|
|
continue
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
2019-05-27 02:51:22 +08:00
|
|
|
ret = append(ret, obj)
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-09-23 07:58:24 +08:00
|
|
|
// Given a list of nodes, separate them into potential media items and any associated resources (external subtitles,
|
2019-10-07 23:25:02 +08:00
|
|
|
// for example.)
|
|
|
|
//
|
2020-05-20 18:39:20 +08:00
|
|
|
// The result is a slice of potential media nodes (in their original order) and a map containing associated
|
2019-10-07 23:25:02 +08:00
|
|
|
// resources nodes of each media node, if any.
|
|
|
|
func mediaWithResources(nodes vfs.Nodes) (vfs.Nodes, map[vfs.Node]vfs.Nodes) {
|
|
|
|
media, mediaResources := vfs.Nodes{}, make(map[vfs.Node]vfs.Nodes)
|
|
|
|
|
|
|
|
// First, separate out the subtitles and media into maps, keyed by their lowercase base names.
|
2023-11-25 02:44:29 +08:00
|
|
|
mediaByName, subtitlesByName := make(map[string]vfs.Nodes), make(map[string]vfs.Nodes)
|
2019-09-23 07:58:24 +08:00
|
|
|
for _, node := range nodes {
|
2019-10-07 23:25:02 +08:00
|
|
|
baseName, ext := splitExt(strings.ToLower(node.Name()))
|
|
|
|
switch ext {
|
2022-10-01 14:28:50 +08:00
|
|
|
case ".srt", ".ass", ".ssa", ".sub", ".idx", ".sup", ".jss", ".txt", ".usf", ".cue", ".vtt", ".css":
|
|
|
|
// .idx should be with .sub, .css should be with vtt otherwise they should be culled,
|
|
|
|
// and their mimeTypes are not consistent, but anyway these negatives don't throw errors.
|
2023-11-25 02:44:29 +08:00
|
|
|
subtitlesByName[baseName] = append(subtitlesByName[baseName], node)
|
2019-09-23 07:58:24 +08:00
|
|
|
default:
|
2019-10-09 05:29:09 +08:00
|
|
|
mediaByName[baseName] = append(mediaByName[baseName], node)
|
2019-09-23 07:58:24 +08:00
|
|
|
media = append(media, node)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the associated media file for each subtitle
|
2023-11-25 02:44:29 +08:00
|
|
|
for baseName, nodes := range subtitlesByName {
|
2019-10-07 23:25:02 +08:00
|
|
|
// Find a media file with the same basename (video.mp4 for video.srt)
|
2019-10-09 05:29:09 +08:00
|
|
|
mediaNodes, found := mediaByName[baseName]
|
2019-10-07 23:25:02 +08:00
|
|
|
if !found {
|
|
|
|
// Or basename of the basename (video.mp4 for video.en.srt)
|
2023-11-25 02:44:29 +08:00
|
|
|
baseName, _ := splitExt(baseName)
|
2019-10-09 05:29:09 +08:00
|
|
|
mediaNodes, found = mediaByName[baseName]
|
2019-10-07 23:25:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Just advise if no match found
|
|
|
|
if !found {
|
2023-11-25 02:44:29 +08:00
|
|
|
fs.Infof(nodes, "could not find associated media for subtitle: %s", baseName)
|
|
|
|
fs.Infof(mediaByName, "mediaByName is this, baseName is %s", baseName)
|
2019-09-23 07:58:24 +08:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-10-09 05:29:09 +08:00
|
|
|
// Associate with all potential media nodes
|
2023-11-25 02:44:29 +08:00
|
|
|
fs.Debugf(mediaNodes, "associating subtitle: %s", baseName)
|
2019-10-09 05:29:09 +08:00
|
|
|
for _, mediaNode := range mediaNodes {
|
2023-11-25 02:44:29 +08:00
|
|
|
mediaResources[mediaNode] = append(mediaResources[mediaNode], nodes...)
|
2019-10-09 05:29:09 +08:00
|
|
|
}
|
2019-09-23 07:58:24 +08:00
|
|
|
}
|
|
|
|
|
2019-10-07 23:25:02 +08:00
|
|
|
return media, mediaResources
|
2019-09-23 07:58:24 +08:00
|
|
|
}
|
|
|
|
|
2019-01-04 06:23:50 +08:00
|
|
|
type browse struct {
|
|
|
|
ObjectID string
|
|
|
|
BrowseFlag string
|
|
|
|
Filter string
|
|
|
|
StartingIndex int
|
|
|
|
RequestedCount int
|
|
|
|
}
|
|
|
|
|
|
|
|
// ContentDirectory object from ObjectID.
|
|
|
|
func (cds *contentDirectoryService) objectFromID(id string) (o object, err error) {
|
|
|
|
o.Path, err = url.QueryUnescape(id)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if o.Path == "0" {
|
|
|
|
o.Path = "/"
|
|
|
|
}
|
|
|
|
o.Path = path.Clean(o.Path)
|
|
|
|
if !path.IsAbs(o.Path) {
|
|
|
|
err = fmt.Errorf("bad ObjectID %v", o.Path)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cds *contentDirectoryService) Handle(action string, argsXML []byte, r *http.Request) (map[string]string, error) {
|
|
|
|
host := r.Host
|
|
|
|
|
|
|
|
switch action {
|
|
|
|
case "GetSystemUpdateID":
|
|
|
|
return map[string]string{
|
|
|
|
"Id": cds.updateIDString(),
|
|
|
|
}, nil
|
|
|
|
case "GetSortCapabilities":
|
|
|
|
return map[string]string{
|
|
|
|
"SortCaps": "dc:title",
|
|
|
|
}, nil
|
|
|
|
case "Browse":
|
|
|
|
var browse browse
|
2019-01-12 01:17:46 +08:00
|
|
|
if err := xml.Unmarshal(argsXML, &browse); err != nil {
|
2019-01-04 06:23:50 +08:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
obj, err := cds.objectFromID(browse.ObjectID)
|
|
|
|
if err != nil {
|
2024-08-15 01:19:36 +08:00
|
|
|
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "%s", err.Error())
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
|
|
|
switch browse.BrowseFlag {
|
|
|
|
case "BrowseDirectChildren":
|
|
|
|
objs, err := cds.readContainer(obj, host)
|
|
|
|
if err != nil {
|
2024-08-15 01:19:36 +08:00
|
|
|
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "%s", err.Error())
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
|
|
|
totalMatches := len(objs)
|
|
|
|
objs = objs[func() (low int) {
|
|
|
|
low = browse.StartingIndex
|
|
|
|
if low > len(objs) {
|
|
|
|
low = len(objs)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}():]
|
2019-01-12 01:17:46 +08:00
|
|
|
if browse.RequestedCount != 0 && browse.RequestedCount < len(objs) {
|
2019-01-04 06:23:50 +08:00
|
|
|
objs = objs[:browse.RequestedCount]
|
|
|
|
}
|
|
|
|
result, err := xml.Marshal(objs)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return map[string]string{
|
|
|
|
"TotalMatches": fmt.Sprint(totalMatches),
|
|
|
|
"NumberReturned": fmt.Sprint(len(objs)),
|
|
|
|
"Result": didlLite(string(result)),
|
|
|
|
"UpdateID": cds.updateIDString(),
|
|
|
|
}, nil
|
2019-05-27 02:45:01 +08:00
|
|
|
case "BrowseMetadata":
|
2019-09-15 05:16:07 +08:00
|
|
|
node, err := cds.vfs.Stat(obj.Path)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-23 07:58:24 +08:00
|
|
|
// TODO: External subtitles won't appear in the metadata here, but probably should.
|
|
|
|
upnpObject, err := cds.cdsObjectToUpnpavObject(obj, node, vfs.Nodes{}, host)
|
2019-09-15 05:16:07 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
result, err := xml.Marshal(upnpObject)
|
2019-05-27 02:45:01 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return map[string]string{
|
2024-06-02 00:39:42 +08:00
|
|
|
"TotalMatches": "1",
|
|
|
|
"NumberReturned": "1",
|
|
|
|
"Result": didlLite(string(result)),
|
|
|
|
"UpdateID": cds.updateIDString(),
|
2019-05-27 02:45:01 +08:00
|
|
|
}, nil
|
2019-01-04 06:23:50 +08:00
|
|
|
default:
|
|
|
|
return nil, upnp.Errorf(upnp.ArgumentValueInvalidErrorCode, "unhandled browse flag: %v", browse.BrowseFlag)
|
|
|
|
}
|
|
|
|
case "GetSearchCapabilities":
|
|
|
|
return map[string]string{
|
|
|
|
"SearchCaps": "",
|
|
|
|
}, nil
|
2019-05-27 02:45:01 +08:00
|
|
|
// Samsung Extensions
|
|
|
|
case "X_GetFeatureList":
|
|
|
|
return map[string]string{
|
|
|
|
"FeatureList": `<Features xmlns="urn:schemas-upnp-org:av:avs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="urn:schemas-upnp-org:av:avs http://www.upnp.org/schemas/av/avs.xsd">
|
|
|
|
<Feature name="samsung.com_BASICVIEW" version="1">
|
2019-09-23 07:58:24 +08:00
|
|
|
<container id="0" type="object.item.imageItem"/>
|
|
|
|
<container id="0" type="object.item.audioItem"/>
|
|
|
|
<container id="0" type="object.item.videoItem"/>
|
2019-05-27 02:45:01 +08:00
|
|
|
</Feature>
|
|
|
|
</Features>`}, nil
|
|
|
|
case "X_SetBookmark":
|
|
|
|
// just ignore
|
|
|
|
return map[string]string{}, nil
|
2019-01-04 06:23:50 +08:00
|
|
|
default:
|
|
|
|
return nil, upnp.InvalidActionError
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Represents a ContentDirectory object.
|
|
|
|
type object struct {
|
|
|
|
Path string // The cleaned, absolute path for the object relative to the server.
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the actual local filesystem path for the object.
|
|
|
|
func (o *object) FilePath() string {
|
|
|
|
return filepath.FromSlash(o.Path)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the ObjectID for the object. This is used in various ContentDirectory actions.
|
|
|
|
func (o object) ID() string {
|
|
|
|
if !path.IsAbs(o.Path) {
|
2024-08-18 22:58:35 +08:00
|
|
|
fs.Panicf(nil, "Relative object path: %s", o.Path)
|
2019-01-04 06:23:50 +08:00
|
|
|
}
|
|
|
|
if len(o.Path) == 1 {
|
|
|
|
return "0"
|
|
|
|
}
|
|
|
|
return url.QueryEscape(o.Path)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o *object) IsRoot() bool {
|
|
|
|
return o.Path == "/"
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns the object's parent ObjectID. Fortunately it can be deduced from the
|
|
|
|
// ObjectID (for now).
|
|
|
|
func (o object) ParentID() string {
|
|
|
|
if o.IsRoot() {
|
|
|
|
return "-1"
|
|
|
|
}
|
|
|
|
o.Path = path.Dir(o.Path)
|
|
|
|
return o.ID()
|
|
|
|
}
|