tis 2017-12-26 klockan 01:12 +0000 skrev Colin NG:
> +static int resolve_content_path(AVFormatContext *s, const char *url, 
> xmlNodePtr *baseurl_nodes, int n_baseurl_nodes) {
> +
> +    char *tmp_str = av_mallocz(MAX_URL_SIZE);
> +    char *path = av_mallocz(MAX_URL_SIZE);
> +    char *mpdName = NULL;
> +    xmlNodePtr  node = NULL;
> +    char *baseurl = NULL;
> +    char *root_url = NULL;
> +    char *text = NULL;
> +
> +    int isRootHttp = 0;
> +    char token ='/';
> +    int start =  0;
> +    int rootId = 0;
> +    int updated = 0;
> +    int size = 0;
> +    int i;
> +
> +    if (!tmp_str || !path) {
> +        updated = AVERROR(ENOMEM);
> +        goto end;
> +    }
> +
> +    av_strlcpy(tmp_str, url, strlen(url) + 1);

MAX_URL_SIZE maybe? Is there some way url can be too long?

> +    mpdName = strtok (tmp_str, "/");
> +    while (mpdName = strtok (NULL, "/")) {
> +        size = strlen(mpdName);
> +    }

strtok() isn't thread safe. I forget if we guarantee thread safeness at
this stage in lavf

> +
> +    av_strlcpy (path, url, strlen(url) - size + 1);

Similarly here. Plus strlen(url) being computed again

> +    for (rootId = n_baseurl_nodes - 1; rootId > 0; rootId --) {
> +        if (!(node = baseurl_nodes[rootId])) {
> +            continue;
> +        }
> +        if (ishttp(xmlNodeGetContent(node))) {
> +            break;
> +        }
> +    }
> +
> +    node = baseurl_nodes[rootId];
> +    baseurl = xmlNodeGetContent(node);
> +    root_url = (av_strcasecmp(baseurl, "")) ? baseurl : path;
> +    if (node) {
> +        xmlNodeSetContent(node, root_url);
> +    }
> +
> +    size = strlen(root_url);
> +    isRootHttp = ishttp(root_url);
> +
> +    if (root_url[size - 1] == token) {
> +        av_strlcat(root_url, "/", size + 2);
> +        size += 2;
> +    }
> +
> +    for (i = 0; i < n_baseurl_nodes; ++i) {
> +        if (i == rootId) {
> +            continue;
> +        }
> +        text = xmlNodeGetContent(baseurl_nodes[i]);
> +        if (text) {
> +            memset(tmp_str, 0, strlen(tmp_str));
> +            if (!ishttp(text) && isRootHttp) {
> +                av_strlcpy(tmp_str, root_url, size + 1);
> +            }
> +            start = (text[0] == token);
> +            av_strlcat(tmp_str, text + start, MAX_URL_SIZE);
> +            xmlNodeSetContent(baseurl_nodes[i], tmp_str);
> +            updated = 1;
> +            xmlFree(text);
> +        }
> +    }
> +
> +end:
> +    av_free(path);
> +    av_free(tmp_str);
> +    return updated;
> +
> +}

I don't really like lavf doing URL parsing/manipulation like this when
there's (supposedly) mature libraries to do it. Especially after this
year's Blackhat revelations of exploits making use of how whitespace,
unicode, slashes, percent encoding etc are handled differently in every
library [1]. But I'm also not familiar with DASH so can't say what
would be the typical thing to do

/Tomas

[1] https://www.blackhat.com/docs/us-17/thursday/us-17-Tsai-A-New-Era-O
f-SSRF-Exploiting-URL-Parser-In-Trending-Programming-Languages.pdf

Attachment: signature.asc
Description: This is a digitally signed message part

_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
http://ffmpeg.org/mailman/listinfo/ffmpeg-devel

Reply via email to